{ "best_metric": 0.01222781892357284, "best_model_checkpoint": "./results-cc/plbart/plbart_lora_official_0.001/checkpoint-73590", "epoch": 5.0, "eval_steps": 500, "global_step": 73590, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0003397200706617747, "grad_norm": 7.380123138427734, "learning_rate": 0.0009999745209947003, "loss": 12.166, "step": 5 }, { "epoch": 0.0006794401413235494, "grad_norm": 2.548133373260498, "learning_rate": 0.0009999320559858677, "loss": 7.0103, "step": 10 }, { "epoch": 0.0010191602119853241, "grad_norm": 3.6230733394622803, "learning_rate": 0.000999889590977035, "loss": 5.4114, "step": 15 }, { "epoch": 0.001358880282647099, "grad_norm": 1.8844735622406006, "learning_rate": 0.0009998471259682023, "loss": 4.4564, "step": 20 }, { "epoch": 0.0016986003533088735, "grad_norm": 1.3114670515060425, "learning_rate": 0.0009998046609593695, "loss": 4.1105, "step": 25 }, { "epoch": 0.0020383204239706482, "grad_norm": 1.4154554605484009, "learning_rate": 0.0009997621959505368, "loss": 4.0197, "step": 30 }, { "epoch": 0.002378040494632423, "grad_norm": 1.7792688608169556, "learning_rate": 0.0009997197309417041, "loss": 3.878, "step": 35 }, { "epoch": 0.002717760565294198, "grad_norm": 0.9824917316436768, "learning_rate": 0.0009996772659328712, "loss": 3.8156, "step": 40 }, { "epoch": 0.0030574806359559724, "grad_norm": 1.286230206489563, "learning_rate": 0.0009996348009240386, "loss": 4.0769, "step": 45 }, { "epoch": 0.003397200706617747, "grad_norm": 1.319523811340332, "learning_rate": 0.000999592335915206, "loss": 3.975, "step": 50 }, { "epoch": 0.0037369207772795215, "grad_norm": 1.173959493637085, "learning_rate": 0.0009995498709063732, "loss": 3.947, "step": 55 }, { "epoch": 0.0040766408479412965, "grad_norm": 1.5615222454071045, "learning_rate": 0.0009995074058975404, "loss": 3.9179, "step": 60 }, { "epoch": 0.0044163609186030715, "grad_norm": 0.9661747813224792, "learning_rate": 0.0009994649408887077, "loss": 3.8287, "step": 65 }, { "epoch": 0.004756080989264846, "grad_norm": 0.8812354207038879, "learning_rate": 0.000999422475879875, "loss": 4.0607, "step": 70 }, { "epoch": 0.005095801059926621, "grad_norm": 1.160605549812317, "learning_rate": 0.0009993800108710421, "loss": 3.674, "step": 75 }, { "epoch": 0.005435521130588396, "grad_norm": 1.1129422187805176, "learning_rate": 0.0009993375458622097, "loss": 3.6611, "step": 80 }, { "epoch": 0.00577524120125017, "grad_norm": 0.9871999025344849, "learning_rate": 0.0009992950808533768, "loss": 3.6738, "step": 85 }, { "epoch": 0.006114961271911945, "grad_norm": 1.1280853748321533, "learning_rate": 0.0009992526158445442, "loss": 3.7799, "step": 90 }, { "epoch": 0.006454681342573719, "grad_norm": 0.8848223090171814, "learning_rate": 0.0009992101508357115, "loss": 3.9432, "step": 95 }, { "epoch": 0.006794401413235494, "grad_norm": 1.0015486478805542, "learning_rate": 0.0009991676858268786, "loss": 3.889, "step": 100 }, { "epoch": 0.007134121483897269, "grad_norm": 0.8902687430381775, "learning_rate": 0.000999125220818046, "loss": 3.556, "step": 105 }, { "epoch": 0.007473841554559043, "grad_norm": 0.9607239961624146, "learning_rate": 0.0009990827558092133, "loss": 3.8275, "step": 110 }, { "epoch": 0.007813561625220818, "grad_norm": 0.880843460559845, "learning_rate": 0.0009990402908003806, "loss": 3.8183, "step": 115 }, { "epoch": 0.008153281695882593, "grad_norm": 1.2420870065689087, "learning_rate": 0.0009989978257915477, "loss": 4.0439, "step": 120 }, { "epoch": 0.008493001766544368, "grad_norm": 0.9106853008270264, "learning_rate": 0.000998955360782715, "loss": 3.8842, "step": 125 }, { "epoch": 0.008832721837206143, "grad_norm": 1.467310905456543, "learning_rate": 0.0009989128957738824, "loss": 3.7371, "step": 130 }, { "epoch": 0.009172441907867916, "grad_norm": 1.3102498054504395, "learning_rate": 0.0009988704307650495, "loss": 3.719, "step": 135 }, { "epoch": 0.009512161978529691, "grad_norm": 1.2353490591049194, "learning_rate": 0.0009988279657562168, "loss": 3.6821, "step": 140 }, { "epoch": 0.009851882049191466, "grad_norm": 0.9712654948234558, "learning_rate": 0.0009987855007473842, "loss": 3.6396, "step": 145 }, { "epoch": 0.010191602119853241, "grad_norm": 1.044681191444397, "learning_rate": 0.0009987430357385515, "loss": 3.8699, "step": 150 }, { "epoch": 0.010531322190515016, "grad_norm": 1.2838557958602905, "learning_rate": 0.0009987005707297188, "loss": 3.7855, "step": 155 }, { "epoch": 0.010871042261176791, "grad_norm": 1.5111401081085205, "learning_rate": 0.000998658105720886, "loss": 3.8598, "step": 160 }, { "epoch": 0.011210762331838564, "grad_norm": 1.121076226234436, "learning_rate": 0.0009986156407120533, "loss": 3.7322, "step": 165 }, { "epoch": 0.01155048240250034, "grad_norm": 1.2179945707321167, "learning_rate": 0.0009985731757032206, "loss": 3.804, "step": 170 }, { "epoch": 0.011890202473162114, "grad_norm": 1.1907989978790283, "learning_rate": 0.0009985307106943877, "loss": 3.734, "step": 175 }, { "epoch": 0.01222992254382389, "grad_norm": 1.318696141242981, "learning_rate": 0.000998488245685555, "loss": 3.8409, "step": 180 }, { "epoch": 0.012569642614485664, "grad_norm": 1.1070330142974854, "learning_rate": 0.0009984457806767224, "loss": 3.7997, "step": 185 }, { "epoch": 0.012909362685147438, "grad_norm": 1.0574580430984497, "learning_rate": 0.0009984033156678898, "loss": 3.6835, "step": 190 }, { "epoch": 0.013249082755809213, "grad_norm": 1.3317101001739502, "learning_rate": 0.0009983608506590569, "loss": 3.9452, "step": 195 }, { "epoch": 0.013588802826470988, "grad_norm": 1.1395431756973267, "learning_rate": 0.0009983183856502242, "loss": 3.8166, "step": 200 }, { "epoch": 0.013928522897132763, "grad_norm": 1.075817346572876, "learning_rate": 0.0009982759206413915, "loss": 3.8198, "step": 205 }, { "epoch": 0.014268242967794538, "grad_norm": 1.1497353315353394, "learning_rate": 0.0009982334556325587, "loss": 3.5954, "step": 210 }, { "epoch": 0.014607963038456313, "grad_norm": 0.8535149693489075, "learning_rate": 0.000998190990623726, "loss": 3.6965, "step": 215 }, { "epoch": 0.014947683109118086, "grad_norm": 0.911138117313385, "learning_rate": 0.0009981485256148933, "loss": 3.7337, "step": 220 }, { "epoch": 0.015287403179779861, "grad_norm": 0.9495150446891785, "learning_rate": 0.0009981060606060607, "loss": 3.6644, "step": 225 }, { "epoch": 0.015627123250441636, "grad_norm": 0.9166472554206848, "learning_rate": 0.000998063595597228, "loss": 3.6596, "step": 230 }, { "epoch": 0.01596684332110341, "grad_norm": 1.0570188760757446, "learning_rate": 0.0009980211305883951, "loss": 3.776, "step": 235 }, { "epoch": 0.016306563391765186, "grad_norm": 1.4095300436019897, "learning_rate": 0.0009979786655795624, "loss": 3.5764, "step": 240 }, { "epoch": 0.01664628346242696, "grad_norm": 1.0658167600631714, "learning_rate": 0.0009979362005707298, "loss": 3.6179, "step": 245 }, { "epoch": 0.016986003533088736, "grad_norm": 0.8451441526412964, "learning_rate": 0.000997893735561897, "loss": 3.802, "step": 250 }, { "epoch": 0.01732572360375051, "grad_norm": 1.2235006093978882, "learning_rate": 0.0009978512705530644, "loss": 3.6164, "step": 255 }, { "epoch": 0.017665443674412286, "grad_norm": 1.5686720609664917, "learning_rate": 0.0009978088055442316, "loss": 3.8757, "step": 260 }, { "epoch": 0.01800516374507406, "grad_norm": 1.1582263708114624, "learning_rate": 0.000997766340535399, "loss": 4.0076, "step": 265 }, { "epoch": 0.018344883815735832, "grad_norm": 1.3716546297073364, "learning_rate": 0.0009977238755265662, "loss": 3.6583, "step": 270 }, { "epoch": 0.01868460388639761, "grad_norm": 0.9730767607688904, "learning_rate": 0.0009976814105177334, "loss": 3.5778, "step": 275 }, { "epoch": 0.019024323957059382, "grad_norm": 0.9888163208961487, "learning_rate": 0.0009976389455089007, "loss": 3.7006, "step": 280 }, { "epoch": 0.01936404402772116, "grad_norm": 0.9144140481948853, "learning_rate": 0.000997596480500068, "loss": 3.6686, "step": 285 }, { "epoch": 0.019703764098382932, "grad_norm": 1.0418434143066406, "learning_rate": 0.0009975540154912354, "loss": 3.8204, "step": 290 }, { "epoch": 0.020043484169044706, "grad_norm": 0.9498809576034546, "learning_rate": 0.0009975115504824025, "loss": 3.6893, "step": 295 }, { "epoch": 0.020383204239706482, "grad_norm": 1.1205024719238281, "learning_rate": 0.0009974690854735698, "loss": 3.458, "step": 300 }, { "epoch": 0.020722924310368256, "grad_norm": 1.65505850315094, "learning_rate": 0.0009974266204647371, "loss": 3.7977, "step": 305 }, { "epoch": 0.021062644381030032, "grad_norm": 1.1976819038391113, "learning_rate": 0.0009973841554559043, "loss": 3.5173, "step": 310 }, { "epoch": 0.021402364451691806, "grad_norm": 1.02446448802948, "learning_rate": 0.0009973416904470716, "loss": 3.936, "step": 315 }, { "epoch": 0.021742084522353582, "grad_norm": 1.3356537818908691, "learning_rate": 0.000997299225438239, "loss": 3.6805, "step": 320 }, { "epoch": 0.022081804593015356, "grad_norm": 1.1578243970870972, "learning_rate": 0.0009972567604294063, "loss": 3.5689, "step": 325 }, { "epoch": 0.02242152466367713, "grad_norm": 1.0383760929107666, "learning_rate": 0.0009972142954205736, "loss": 3.4068, "step": 330 }, { "epoch": 0.022761244734338906, "grad_norm": 0.9186046719551086, "learning_rate": 0.0009971718304117407, "loss": 3.7996, "step": 335 }, { "epoch": 0.02310096480500068, "grad_norm": 1.3555740118026733, "learning_rate": 0.000997129365402908, "loss": 3.7637, "step": 340 }, { "epoch": 0.023440684875662456, "grad_norm": 1.447969913482666, "learning_rate": 0.0009970869003940754, "loss": 3.7702, "step": 345 }, { "epoch": 0.02378040494632423, "grad_norm": 1.366459846496582, "learning_rate": 0.0009970444353852425, "loss": 3.598, "step": 350 }, { "epoch": 0.024120125016986002, "grad_norm": 1.2812645435333252, "learning_rate": 0.0009970019703764098, "loss": 3.9335, "step": 355 }, { "epoch": 0.02445984508764778, "grad_norm": 1.1071491241455078, "learning_rate": 0.0009969595053675772, "loss": 3.7283, "step": 360 }, { "epoch": 0.024799565158309552, "grad_norm": 1.1867613792419434, "learning_rate": 0.0009969170403587445, "loss": 3.7203, "step": 365 }, { "epoch": 0.02513928522897133, "grad_norm": 1.0471099615097046, "learning_rate": 0.0009968745753499116, "loss": 3.7861, "step": 370 }, { "epoch": 0.025479005299633102, "grad_norm": 1.1841450929641724, "learning_rate": 0.000996832110341079, "loss": 3.6114, "step": 375 }, { "epoch": 0.025818725370294875, "grad_norm": 1.0960756540298462, "learning_rate": 0.0009967896453322463, "loss": 3.831, "step": 380 }, { "epoch": 0.026158445440956652, "grad_norm": 1.2569679021835327, "learning_rate": 0.0009967471803234134, "loss": 3.867, "step": 385 }, { "epoch": 0.026498165511618425, "grad_norm": 1.0735892057418823, "learning_rate": 0.0009967047153145807, "loss": 3.565, "step": 390 }, { "epoch": 0.026837885582280202, "grad_norm": 1.145148754119873, "learning_rate": 0.000996662250305748, "loss": 3.3345, "step": 395 }, { "epoch": 0.027177605652941975, "grad_norm": 1.179948091506958, "learning_rate": 0.0009966197852969154, "loss": 3.5777, "step": 400 }, { "epoch": 0.027517325723603752, "grad_norm": 1.3917948007583618, "learning_rate": 0.0009965773202880827, "loss": 3.595, "step": 405 }, { "epoch": 0.027857045794265525, "grad_norm": 1.1740734577178955, "learning_rate": 0.0009965348552792499, "loss": 3.8909, "step": 410 }, { "epoch": 0.0281967658649273, "grad_norm": 1.4014570713043213, "learning_rate": 0.0009964923902704172, "loss": 3.8378, "step": 415 }, { "epoch": 0.028536485935589075, "grad_norm": 1.6106117963790894, "learning_rate": 0.0009964499252615845, "loss": 3.6799, "step": 420 }, { "epoch": 0.02887620600625085, "grad_norm": 1.2870150804519653, "learning_rate": 0.0009964074602527516, "loss": 3.8337, "step": 425 }, { "epoch": 0.029215926076912625, "grad_norm": 1.3392133712768555, "learning_rate": 0.000996364995243919, "loss": 3.7986, "step": 430 }, { "epoch": 0.0295556461475744, "grad_norm": 1.2135146856307983, "learning_rate": 0.0009963225302350863, "loss": 3.6767, "step": 435 }, { "epoch": 0.029895366218236172, "grad_norm": 1.1392208337783813, "learning_rate": 0.0009962800652262536, "loss": 3.714, "step": 440 }, { "epoch": 0.03023508628889795, "grad_norm": 1.2771044969558716, "learning_rate": 0.0009962376002174208, "loss": 3.582, "step": 445 }, { "epoch": 0.030574806359559722, "grad_norm": 1.1226322650909424, "learning_rate": 0.000996195135208588, "loss": 3.4606, "step": 450 }, { "epoch": 0.0309145264302215, "grad_norm": 1.0739678144454956, "learning_rate": 0.0009961526701997554, "loss": 3.6515, "step": 455 }, { "epoch": 0.03125424650088327, "grad_norm": 1.1153268814086914, "learning_rate": 0.0009961102051909226, "loss": 4.1085, "step": 460 }, { "epoch": 0.03159396657154505, "grad_norm": 1.5488002300262451, "learning_rate": 0.00099606774018209, "loss": 3.7204, "step": 465 }, { "epoch": 0.03193368664220682, "grad_norm": 1.2403671741485596, "learning_rate": 0.0009960252751732572, "loss": 3.7575, "step": 470 }, { "epoch": 0.032273406712868595, "grad_norm": 1.3695228099822998, "learning_rate": 0.0009959828101644246, "loss": 3.872, "step": 475 }, { "epoch": 0.03261312678353037, "grad_norm": 1.7740060091018677, "learning_rate": 0.0009959403451555919, "loss": 3.4737, "step": 480 }, { "epoch": 0.03295284685419215, "grad_norm": 1.3201332092285156, "learning_rate": 0.000995897880146759, "loss": 3.544, "step": 485 }, { "epoch": 0.03329256692485392, "grad_norm": 1.241077184677124, "learning_rate": 0.0009958554151379263, "loss": 3.6582, "step": 490 }, { "epoch": 0.033632286995515695, "grad_norm": 1.1933586597442627, "learning_rate": 0.0009958129501290937, "loss": 3.6964, "step": 495 }, { "epoch": 0.03397200706617747, "grad_norm": 1.2881439924240112, "learning_rate": 0.000995770485120261, "loss": 3.5458, "step": 500 }, { "epoch": 0.03431172713683924, "grad_norm": 1.2302244901657104, "learning_rate": 0.0009957280201114281, "loss": 3.662, "step": 505 }, { "epoch": 0.03465144720750102, "grad_norm": 1.4391858577728271, "learning_rate": 0.0009956855551025955, "loss": 3.8284, "step": 510 }, { "epoch": 0.034991167278162795, "grad_norm": 1.4019132852554321, "learning_rate": 0.0009956430900937628, "loss": 3.6246, "step": 515 }, { "epoch": 0.03533088734882457, "grad_norm": 1.5205888748168945, "learning_rate": 0.00099560062508493, "loss": 3.7115, "step": 520 }, { "epoch": 0.03567060741948634, "grad_norm": 1.0384796857833862, "learning_rate": 0.0009955581600760972, "loss": 3.6101, "step": 525 }, { "epoch": 0.03601032749014812, "grad_norm": 1.1187450885772705, "learning_rate": 0.0009955156950672646, "loss": 3.4346, "step": 530 }, { "epoch": 0.036350047560809895, "grad_norm": 1.4204978942871094, "learning_rate": 0.000995473230058432, "loss": 3.6988, "step": 535 }, { "epoch": 0.036689767631471665, "grad_norm": 1.2351288795471191, "learning_rate": 0.0009954307650495992, "loss": 3.5677, "step": 540 }, { "epoch": 0.03702948770213344, "grad_norm": 1.5657918453216553, "learning_rate": 0.0009953883000407664, "loss": 3.6555, "step": 545 }, { "epoch": 0.03736920777279522, "grad_norm": 1.2254866361618042, "learning_rate": 0.0009953458350319337, "loss": 3.8502, "step": 550 }, { "epoch": 0.03770892784345699, "grad_norm": 1.3199241161346436, "learning_rate": 0.000995303370023101, "loss": 3.6755, "step": 555 }, { "epoch": 0.038048647914118765, "grad_norm": 1.2795915603637695, "learning_rate": 0.0009952609050142682, "loss": 3.6961, "step": 560 }, { "epoch": 0.03838836798478054, "grad_norm": 1.3294647932052612, "learning_rate": 0.0009952184400054355, "loss": 3.6874, "step": 565 }, { "epoch": 0.03872808805544232, "grad_norm": 1.2587231397628784, "learning_rate": 0.0009951759749966028, "loss": 3.8312, "step": 570 }, { "epoch": 0.03906780812610409, "grad_norm": 1.173063039779663, "learning_rate": 0.0009951335099877702, "loss": 3.8307, "step": 575 }, { "epoch": 0.039407528196765865, "grad_norm": 1.143584132194519, "learning_rate": 0.0009950910449789373, "loss": 3.5473, "step": 580 }, { "epoch": 0.03974724826742764, "grad_norm": 1.2580666542053223, "learning_rate": 0.0009950485799701046, "loss": 3.5444, "step": 585 }, { "epoch": 0.04008696833808941, "grad_norm": 1.3526042699813843, "learning_rate": 0.000995006114961272, "loss": 3.4972, "step": 590 }, { "epoch": 0.04042668840875119, "grad_norm": 1.2848647832870483, "learning_rate": 0.0009949636499524393, "loss": 3.5678, "step": 595 }, { "epoch": 0.040766408479412965, "grad_norm": 1.2633363008499146, "learning_rate": 0.0009949211849436066, "loss": 3.595, "step": 600 }, { "epoch": 0.04110612855007474, "grad_norm": 1.5241575241088867, "learning_rate": 0.0009948787199347737, "loss": 3.6406, "step": 605 }, { "epoch": 0.04144584862073651, "grad_norm": 1.3159860372543335, "learning_rate": 0.000994836254925941, "loss": 3.7417, "step": 610 }, { "epoch": 0.04178556869139829, "grad_norm": 1.2887978553771973, "learning_rate": 0.0009947937899171084, "loss": 3.6631, "step": 615 }, { "epoch": 0.042125288762060065, "grad_norm": 1.2440882921218872, "learning_rate": 0.0009947513249082755, "loss": 3.4888, "step": 620 }, { "epoch": 0.042465008832721834, "grad_norm": 1.0129196643829346, "learning_rate": 0.0009947088598994428, "loss": 3.3319, "step": 625 }, { "epoch": 0.04280472890338361, "grad_norm": 1.2206847667694092, "learning_rate": 0.0009946663948906102, "loss": 3.5067, "step": 630 }, { "epoch": 0.04314444897404539, "grad_norm": 1.3315480947494507, "learning_rate": 0.0009946239298817775, "loss": 3.5444, "step": 635 }, { "epoch": 0.043484169044707165, "grad_norm": 1.2718719244003296, "learning_rate": 0.0009945814648729449, "loss": 3.644, "step": 640 }, { "epoch": 0.043823889115368934, "grad_norm": 1.295305848121643, "learning_rate": 0.000994538999864112, "loss": 3.8859, "step": 645 }, { "epoch": 0.04416360918603071, "grad_norm": 1.169413685798645, "learning_rate": 0.0009944965348552793, "loss": 3.6996, "step": 650 }, { "epoch": 0.04450332925669249, "grad_norm": 1.2588095664978027, "learning_rate": 0.0009944540698464466, "loss": 3.552, "step": 655 }, { "epoch": 0.04484304932735426, "grad_norm": 1.3797271251678467, "learning_rate": 0.0009944116048376138, "loss": 3.6202, "step": 660 }, { "epoch": 0.045182769398016034, "grad_norm": 1.4372137784957886, "learning_rate": 0.000994369139828781, "loss": 3.591, "step": 665 }, { "epoch": 0.04552248946867781, "grad_norm": 1.2383842468261719, "learning_rate": 0.0009943266748199484, "loss": 3.8526, "step": 670 }, { "epoch": 0.04586220953933958, "grad_norm": 1.4823582172393799, "learning_rate": 0.0009942842098111158, "loss": 3.7005, "step": 675 }, { "epoch": 0.04620192961000136, "grad_norm": 1.3869874477386475, "learning_rate": 0.0009942417448022829, "loss": 3.642, "step": 680 }, { "epoch": 0.046541649680663134, "grad_norm": 1.2444219589233398, "learning_rate": 0.0009941992797934502, "loss": 3.7279, "step": 685 }, { "epoch": 0.04688136975132491, "grad_norm": 1.3864883184432983, "learning_rate": 0.0009941568147846175, "loss": 3.6378, "step": 690 }, { "epoch": 0.04722108982198668, "grad_norm": 1.2136378288269043, "learning_rate": 0.0009941143497757847, "loss": 3.7933, "step": 695 }, { "epoch": 0.04756080989264846, "grad_norm": 0.9840254783630371, "learning_rate": 0.000994071884766952, "loss": 3.737, "step": 700 }, { "epoch": 0.047900529963310234, "grad_norm": 1.2723393440246582, "learning_rate": 0.0009940294197581193, "loss": 3.6793, "step": 705 }, { "epoch": 0.048240250033972004, "grad_norm": 1.1218743324279785, "learning_rate": 0.0009939869547492867, "loss": 3.6251, "step": 710 }, { "epoch": 0.04857997010463378, "grad_norm": 1.1279336214065552, "learning_rate": 0.000993944489740454, "loss": 3.6145, "step": 715 }, { "epoch": 0.04891969017529556, "grad_norm": 1.227635383605957, "learning_rate": 0.0009939020247316211, "loss": 3.6421, "step": 720 }, { "epoch": 0.049259410245957334, "grad_norm": 1.352430820465088, "learning_rate": 0.0009938595597227884, "loss": 3.4809, "step": 725 }, { "epoch": 0.049599130316619104, "grad_norm": 1.4641741514205933, "learning_rate": 0.0009938170947139558, "loss": 3.4592, "step": 730 }, { "epoch": 0.04993885038728088, "grad_norm": 2.0770106315612793, "learning_rate": 0.000993774629705123, "loss": 3.618, "step": 735 }, { "epoch": 0.05027857045794266, "grad_norm": 1.5777415037155151, "learning_rate": 0.0009937321646962902, "loss": 3.8616, "step": 740 }, { "epoch": 0.05061829052860443, "grad_norm": 1.1887397766113281, "learning_rate": 0.0009936896996874576, "loss": 3.4477, "step": 745 }, { "epoch": 0.050958010599266204, "grad_norm": 1.5631141662597656, "learning_rate": 0.000993647234678625, "loss": 3.6177, "step": 750 }, { "epoch": 0.05129773066992798, "grad_norm": 1.276875615119934, "learning_rate": 0.000993604769669792, "loss": 3.6333, "step": 755 }, { "epoch": 0.05163745074058975, "grad_norm": 1.4980111122131348, "learning_rate": 0.0009935623046609594, "loss": 3.7494, "step": 760 }, { "epoch": 0.05197717081125153, "grad_norm": 1.389067530632019, "learning_rate": 0.0009935198396521267, "loss": 3.5369, "step": 765 }, { "epoch": 0.052316890881913304, "grad_norm": 1.5423647165298462, "learning_rate": 0.0009934773746432938, "loss": 3.5512, "step": 770 }, { "epoch": 0.05265661095257508, "grad_norm": 1.302044153213501, "learning_rate": 0.0009934349096344614, "loss": 3.7269, "step": 775 }, { "epoch": 0.05299633102323685, "grad_norm": 1.2148503065109253, "learning_rate": 0.0009933924446256285, "loss": 3.7929, "step": 780 }, { "epoch": 0.05333605109389863, "grad_norm": 1.4769620895385742, "learning_rate": 0.0009933499796167958, "loss": 3.5976, "step": 785 }, { "epoch": 0.053675771164560404, "grad_norm": 1.2358802556991577, "learning_rate": 0.0009933075146079631, "loss": 3.6821, "step": 790 }, { "epoch": 0.054015491235222174, "grad_norm": 1.1979494094848633, "learning_rate": 0.0009932650495991303, "loss": 3.536, "step": 795 }, { "epoch": 0.05435521130588395, "grad_norm": 1.421705722808838, "learning_rate": 0.0009932225845902976, "loss": 3.8559, "step": 800 }, { "epoch": 0.05469493137654573, "grad_norm": 1.231278657913208, "learning_rate": 0.000993180119581465, "loss": 3.9031, "step": 805 }, { "epoch": 0.055034651447207504, "grad_norm": 1.295201063156128, "learning_rate": 0.0009931376545726323, "loss": 3.6431, "step": 810 }, { "epoch": 0.055374371517869274, "grad_norm": 1.652647852897644, "learning_rate": 0.0009930951895637994, "loss": 3.6835, "step": 815 }, { "epoch": 0.05571409158853105, "grad_norm": 1.679626703262329, "learning_rate": 0.0009930527245549667, "loss": 3.532, "step": 820 }, { "epoch": 0.05605381165919283, "grad_norm": 1.0830215215682983, "learning_rate": 0.000993010259546134, "loss": 3.5286, "step": 825 }, { "epoch": 0.0563935317298546, "grad_norm": 1.34091055393219, "learning_rate": 0.0009929677945373012, "loss": 3.5002, "step": 830 }, { "epoch": 0.056733251800516374, "grad_norm": 1.506014108657837, "learning_rate": 0.0009929253295284685, "loss": 3.6214, "step": 835 }, { "epoch": 0.05707297187117815, "grad_norm": 1.1148265600204468, "learning_rate": 0.0009928828645196358, "loss": 3.8489, "step": 840 }, { "epoch": 0.05741269194183993, "grad_norm": 1.4372700452804565, "learning_rate": 0.0009928403995108032, "loss": 3.7716, "step": 845 }, { "epoch": 0.0577524120125017, "grad_norm": 1.4518808126449585, "learning_rate": 0.0009927979345019705, "loss": 3.6803, "step": 850 }, { "epoch": 0.058092132083163474, "grad_norm": 1.1994529962539673, "learning_rate": 0.0009927554694931376, "loss": 3.6553, "step": 855 }, { "epoch": 0.05843185215382525, "grad_norm": 1.3031033277511597, "learning_rate": 0.000992713004484305, "loss": 3.3347, "step": 860 }, { "epoch": 0.05877157222448702, "grad_norm": 1.9486223459243774, "learning_rate": 0.0009926705394754723, "loss": 3.7248, "step": 865 }, { "epoch": 0.0591112922951488, "grad_norm": 1.3978136777877808, "learning_rate": 0.0009926280744666394, "loss": 3.6741, "step": 870 }, { "epoch": 0.059451012365810574, "grad_norm": 1.1675618886947632, "learning_rate": 0.0009925856094578067, "loss": 3.3729, "step": 875 }, { "epoch": 0.059790732436472344, "grad_norm": 1.423511266708374, "learning_rate": 0.000992543144448974, "loss": 3.554, "step": 880 }, { "epoch": 0.06013045250713412, "grad_norm": 1.5454918146133423, "learning_rate": 0.0009925006794401414, "loss": 3.5732, "step": 885 }, { "epoch": 0.0604701725777959, "grad_norm": 1.9197204113006592, "learning_rate": 0.0009924582144313085, "loss": 3.7401, "step": 890 }, { "epoch": 0.060809892648457674, "grad_norm": 1.2283180952072144, "learning_rate": 0.0009924157494224759, "loss": 3.7143, "step": 895 }, { "epoch": 0.061149612719119444, "grad_norm": 1.6237564086914062, "learning_rate": 0.0009923732844136432, "loss": 3.7629, "step": 900 }, { "epoch": 0.06148933278978122, "grad_norm": 1.4725292921066284, "learning_rate": 0.0009923308194048103, "loss": 3.6239, "step": 905 }, { "epoch": 0.061829052860443, "grad_norm": 1.2769253253936768, "learning_rate": 0.0009922883543959776, "loss": 3.5165, "step": 910 }, { "epoch": 0.06216877293110477, "grad_norm": 1.4218223094940186, "learning_rate": 0.000992245889387145, "loss": 3.7805, "step": 915 }, { "epoch": 0.06250849300176654, "grad_norm": 1.4126838445663452, "learning_rate": 0.0009922034243783123, "loss": 3.6625, "step": 920 }, { "epoch": 0.06284821307242831, "grad_norm": 1.4486182928085327, "learning_rate": 0.0009921609593694797, "loss": 3.6097, "step": 925 }, { "epoch": 0.0631879331430901, "grad_norm": 1.3419944047927856, "learning_rate": 0.0009921184943606468, "loss": 3.703, "step": 930 }, { "epoch": 0.06352765321375187, "grad_norm": 1.3316930532455444, "learning_rate": 0.000992076029351814, "loss": 3.8832, "step": 935 }, { "epoch": 0.06386737328441364, "grad_norm": 1.1278616189956665, "learning_rate": 0.0009920335643429814, "loss": 3.4324, "step": 940 }, { "epoch": 0.06420709335507542, "grad_norm": 1.2497245073318481, "learning_rate": 0.0009919910993341486, "loss": 3.5835, "step": 945 }, { "epoch": 0.06454681342573719, "grad_norm": 1.2282592058181763, "learning_rate": 0.000991948634325316, "loss": 3.5718, "step": 950 }, { "epoch": 0.06488653349639897, "grad_norm": 1.4528684616088867, "learning_rate": 0.0009919061693164832, "loss": 3.584, "step": 955 }, { "epoch": 0.06522625356706074, "grad_norm": 1.5997029542922974, "learning_rate": 0.0009918637043076506, "loss": 3.8078, "step": 960 }, { "epoch": 0.06556597363772251, "grad_norm": 1.2870607376098633, "learning_rate": 0.000991821239298818, "loss": 3.6934, "step": 965 }, { "epoch": 0.0659056937083843, "grad_norm": 1.9012479782104492, "learning_rate": 0.000991778774289985, "loss": 3.8905, "step": 970 }, { "epoch": 0.06624541377904607, "grad_norm": 1.3600386381149292, "learning_rate": 0.0009917363092811523, "loss": 3.8428, "step": 975 }, { "epoch": 0.06658513384970784, "grad_norm": 1.313541054725647, "learning_rate": 0.0009916938442723197, "loss": 3.6749, "step": 980 }, { "epoch": 0.06692485392036962, "grad_norm": 1.398751974105835, "learning_rate": 0.000991651379263487, "loss": 3.6958, "step": 985 }, { "epoch": 0.06726457399103139, "grad_norm": 1.2748618125915527, "learning_rate": 0.0009916089142546541, "loss": 3.6578, "step": 990 }, { "epoch": 0.06760429406169316, "grad_norm": 1.1404482126235962, "learning_rate": 0.0009915664492458215, "loss": 3.5086, "step": 995 }, { "epoch": 0.06794401413235494, "grad_norm": 1.580732822418213, "learning_rate": 0.0009915239842369888, "loss": 3.6695, "step": 1000 }, { "epoch": 0.06828373420301671, "grad_norm": 1.459945559501648, "learning_rate": 0.000991481519228156, "loss": 3.5856, "step": 1005 }, { "epoch": 0.06862345427367848, "grad_norm": 1.3169174194335938, "learning_rate": 0.0009914390542193233, "loss": 3.4034, "step": 1010 }, { "epoch": 0.06896317434434027, "grad_norm": 1.4648442268371582, "learning_rate": 0.0009913965892104906, "loss": 3.7299, "step": 1015 }, { "epoch": 0.06930289441500204, "grad_norm": 1.119935154914856, "learning_rate": 0.000991354124201658, "loss": 3.4913, "step": 1020 }, { "epoch": 0.0696426144856638, "grad_norm": 1.3011690378189087, "learning_rate": 0.0009913116591928253, "loss": 3.5204, "step": 1025 }, { "epoch": 0.06998233455632559, "grad_norm": 1.1822947263717651, "learning_rate": 0.0009912691941839924, "loss": 3.7027, "step": 1030 }, { "epoch": 0.07032205462698736, "grad_norm": 1.5324063301086426, "learning_rate": 0.0009912267291751597, "loss": 3.5566, "step": 1035 }, { "epoch": 0.07066177469764914, "grad_norm": 1.5661357641220093, "learning_rate": 0.000991184264166327, "loss": 3.6022, "step": 1040 }, { "epoch": 0.07100149476831091, "grad_norm": 1.8986066579818726, "learning_rate": 0.0009911417991574942, "loss": 3.5815, "step": 1045 }, { "epoch": 0.07134121483897268, "grad_norm": 1.6261504888534546, "learning_rate": 0.0009910993341486615, "loss": 3.9093, "step": 1050 }, { "epoch": 0.07168093490963447, "grad_norm": 1.663529396057129, "learning_rate": 0.0009910568691398288, "loss": 3.4708, "step": 1055 }, { "epoch": 0.07202065498029624, "grad_norm": 1.4680196046829224, "learning_rate": 0.0009910144041309962, "loss": 3.4974, "step": 1060 }, { "epoch": 0.072360375050958, "grad_norm": 1.210729956626892, "learning_rate": 0.0009909719391221633, "loss": 3.3582, "step": 1065 }, { "epoch": 0.07270009512161979, "grad_norm": 1.7765851020812988, "learning_rate": 0.0009909294741133306, "loss": 3.9412, "step": 1070 }, { "epoch": 0.07303981519228156, "grad_norm": 1.6754155158996582, "learning_rate": 0.000990887009104498, "loss": 3.5716, "step": 1075 }, { "epoch": 0.07337953526294333, "grad_norm": 1.6324594020843506, "learning_rate": 0.000990844544095665, "loss": 3.8601, "step": 1080 }, { "epoch": 0.07371925533360511, "grad_norm": 1.3559643030166626, "learning_rate": 0.0009908020790868326, "loss": 3.5629, "step": 1085 }, { "epoch": 0.07405897540426688, "grad_norm": 1.4925544261932373, "learning_rate": 0.0009907596140779997, "loss": 3.7117, "step": 1090 }, { "epoch": 0.07439869547492865, "grad_norm": 1.4625900983810425, "learning_rate": 0.000990717149069167, "loss": 3.651, "step": 1095 }, { "epoch": 0.07473841554559044, "grad_norm": 1.4238613843917847, "learning_rate": 0.0009906746840603344, "loss": 3.3139, "step": 1100 }, { "epoch": 0.0750781356162522, "grad_norm": 1.3859649896621704, "learning_rate": 0.0009906322190515015, "loss": 3.835, "step": 1105 }, { "epoch": 0.07541785568691398, "grad_norm": 1.3886597156524658, "learning_rate": 0.0009905897540426689, "loss": 3.6103, "step": 1110 }, { "epoch": 0.07575757575757576, "grad_norm": 1.2994500398635864, "learning_rate": 0.0009905472890338362, "loss": 3.7226, "step": 1115 }, { "epoch": 0.07609729582823753, "grad_norm": 1.1909898519515991, "learning_rate": 0.0009905048240250035, "loss": 3.5325, "step": 1120 }, { "epoch": 0.07643701589889931, "grad_norm": 1.311832070350647, "learning_rate": 0.0009904623590161706, "loss": 3.6582, "step": 1125 }, { "epoch": 0.07677673596956108, "grad_norm": 1.7130597829818726, "learning_rate": 0.000990419894007338, "loss": 3.5546, "step": 1130 }, { "epoch": 0.07711645604022285, "grad_norm": 1.4920973777770996, "learning_rate": 0.0009903774289985053, "loss": 3.7109, "step": 1135 }, { "epoch": 0.07745617611088464, "grad_norm": 1.6897900104522705, "learning_rate": 0.0009903349639896724, "loss": 3.3856, "step": 1140 }, { "epoch": 0.0777958961815464, "grad_norm": 1.6766341924667358, "learning_rate": 0.0009902924989808398, "loss": 3.5284, "step": 1145 }, { "epoch": 0.07813561625220818, "grad_norm": 1.622188687324524, "learning_rate": 0.000990250033972007, "loss": 3.6166, "step": 1150 }, { "epoch": 0.07847533632286996, "grad_norm": 1.4141714572906494, "learning_rate": 0.0009902075689631744, "loss": 3.6401, "step": 1155 }, { "epoch": 0.07881505639353173, "grad_norm": 1.2582435607910156, "learning_rate": 0.0009901651039543418, "loss": 3.439, "step": 1160 }, { "epoch": 0.0791547764641935, "grad_norm": 1.6595185995101929, "learning_rate": 0.0009901226389455089, "loss": 3.7264, "step": 1165 }, { "epoch": 0.07949449653485528, "grad_norm": 1.416305661201477, "learning_rate": 0.0009900801739366762, "loss": 3.6789, "step": 1170 }, { "epoch": 0.07983421660551705, "grad_norm": 1.4789602756500244, "learning_rate": 0.0009900377089278435, "loss": 3.4536, "step": 1175 }, { "epoch": 0.08017393667617882, "grad_norm": 1.5110453367233276, "learning_rate": 0.0009899952439190107, "loss": 3.9809, "step": 1180 }, { "epoch": 0.0805136567468406, "grad_norm": 1.1299697160720825, "learning_rate": 0.000989952778910178, "loss": 3.6269, "step": 1185 }, { "epoch": 0.08085337681750238, "grad_norm": 1.340451955795288, "learning_rate": 0.0009899103139013453, "loss": 3.715, "step": 1190 }, { "epoch": 0.08119309688816416, "grad_norm": 1.3992185592651367, "learning_rate": 0.0009898678488925127, "loss": 3.3253, "step": 1195 }, { "epoch": 0.08153281695882593, "grad_norm": 1.5825345516204834, "learning_rate": 0.0009898253838836798, "loss": 3.75, "step": 1200 }, { "epoch": 0.0818725370294877, "grad_norm": 1.9631842374801636, "learning_rate": 0.0009897829188748471, "loss": 3.8312, "step": 1205 }, { "epoch": 0.08221225710014948, "grad_norm": 1.4425073862075806, "learning_rate": 0.0009897404538660145, "loss": 3.478, "step": 1210 }, { "epoch": 0.08255197717081125, "grad_norm": 1.6919792890548706, "learning_rate": 0.0009896979888571816, "loss": 3.4799, "step": 1215 }, { "epoch": 0.08289169724147302, "grad_norm": 1.4208625555038452, "learning_rate": 0.000989655523848349, "loss": 3.4996, "step": 1220 }, { "epoch": 0.0832314173121348, "grad_norm": 1.3654673099517822, "learning_rate": 0.0009896130588395162, "loss": 3.58, "step": 1225 }, { "epoch": 0.08357113738279658, "grad_norm": 1.5514477491378784, "learning_rate": 0.0009895705938306836, "loss": 3.8466, "step": 1230 }, { "epoch": 0.08391085745345835, "grad_norm": 1.548609733581543, "learning_rate": 0.000989528128821851, "loss": 3.5666, "step": 1235 }, { "epoch": 0.08425057752412013, "grad_norm": 1.7257474660873413, "learning_rate": 0.000989485663813018, "loss": 3.6852, "step": 1240 }, { "epoch": 0.0845902975947819, "grad_norm": 1.3918873071670532, "learning_rate": 0.0009894431988041854, "loss": 3.4254, "step": 1245 }, { "epoch": 0.08493001766544367, "grad_norm": 1.6741485595703125, "learning_rate": 0.0009894007337953527, "loss": 3.7226, "step": 1250 }, { "epoch": 0.08526973773610545, "grad_norm": 1.368111252784729, "learning_rate": 0.0009893582687865198, "loss": 3.8294, "step": 1255 }, { "epoch": 0.08560945780676722, "grad_norm": 1.4372892379760742, "learning_rate": 0.0009893158037776871, "loss": 3.4844, "step": 1260 }, { "epoch": 0.08594917787742899, "grad_norm": 1.553418755531311, "learning_rate": 0.0009892733387688545, "loss": 3.6032, "step": 1265 }, { "epoch": 0.08628889794809078, "grad_norm": 1.2568933963775635, "learning_rate": 0.0009892308737600218, "loss": 3.664, "step": 1270 }, { "epoch": 0.08662861801875255, "grad_norm": 1.6155232191085815, "learning_rate": 0.0009891884087511891, "loss": 3.6298, "step": 1275 }, { "epoch": 0.08696833808941433, "grad_norm": 1.25331449508667, "learning_rate": 0.0009891459437423563, "loss": 3.7289, "step": 1280 }, { "epoch": 0.0873080581600761, "grad_norm": 1.425113320350647, "learning_rate": 0.0009891034787335236, "loss": 4.0093, "step": 1285 }, { "epoch": 0.08764777823073787, "grad_norm": 1.246638298034668, "learning_rate": 0.000989061013724691, "loss": 3.5345, "step": 1290 }, { "epoch": 0.08798749830139965, "grad_norm": 1.3937548398971558, "learning_rate": 0.0009890185487158583, "loss": 3.621, "step": 1295 }, { "epoch": 0.08832721837206142, "grad_norm": 1.4195553064346313, "learning_rate": 0.0009889760837070254, "loss": 3.8028, "step": 1300 }, { "epoch": 0.08866693844272319, "grad_norm": 1.6123952865600586, "learning_rate": 0.0009889336186981927, "loss": 3.6028, "step": 1305 }, { "epoch": 0.08900665851338498, "grad_norm": 1.1660856008529663, "learning_rate": 0.00098889115368936, "loss": 3.7334, "step": 1310 }, { "epoch": 0.08934637858404675, "grad_norm": 1.776261329650879, "learning_rate": 0.0009888486886805272, "loss": 3.5574, "step": 1315 }, { "epoch": 0.08968609865470852, "grad_norm": 1.347998857498169, "learning_rate": 0.0009888062236716945, "loss": 3.3267, "step": 1320 }, { "epoch": 0.0900258187253703, "grad_norm": 1.5156413316726685, "learning_rate": 0.0009887637586628618, "loss": 3.3519, "step": 1325 }, { "epoch": 0.09036553879603207, "grad_norm": 1.7387906312942505, "learning_rate": 0.0009887212936540292, "loss": 3.7161, "step": 1330 }, { "epoch": 0.09070525886669384, "grad_norm": 1.9375509023666382, "learning_rate": 0.0009886788286451965, "loss": 3.6976, "step": 1335 }, { "epoch": 0.09104497893735562, "grad_norm": 1.4051861763000488, "learning_rate": 0.0009886363636363636, "loss": 3.7862, "step": 1340 }, { "epoch": 0.09138469900801739, "grad_norm": 1.4070665836334229, "learning_rate": 0.000988593898627531, "loss": 3.3223, "step": 1345 }, { "epoch": 0.09172441907867916, "grad_norm": 1.618043065071106, "learning_rate": 0.0009885514336186983, "loss": 3.4402, "step": 1350 }, { "epoch": 0.09206413914934095, "grad_norm": 1.1309304237365723, "learning_rate": 0.0009885089686098654, "loss": 3.678, "step": 1355 }, { "epoch": 0.09240385922000272, "grad_norm": 1.7721068859100342, "learning_rate": 0.0009884665036010327, "loss": 3.574, "step": 1360 }, { "epoch": 0.0927435792906645, "grad_norm": 1.438754677772522, "learning_rate": 0.0009884240385922, "loss": 3.605, "step": 1365 }, { "epoch": 0.09308329936132627, "grad_norm": 1.2320460081100464, "learning_rate": 0.0009883815735833674, "loss": 3.7119, "step": 1370 }, { "epoch": 0.09342301943198804, "grad_norm": 1.4146790504455566, "learning_rate": 0.0009883391085745345, "loss": 3.795, "step": 1375 }, { "epoch": 0.09376273950264982, "grad_norm": 1.6451289653778076, "learning_rate": 0.0009882966435657019, "loss": 3.5301, "step": 1380 }, { "epoch": 0.09410245957331159, "grad_norm": 1.7030775547027588, "learning_rate": 0.0009882541785568692, "loss": 3.6517, "step": 1385 }, { "epoch": 0.09444217964397336, "grad_norm": 1.3709867000579834, "learning_rate": 0.0009882117135480363, "loss": 3.6157, "step": 1390 }, { "epoch": 0.09478189971463515, "grad_norm": 1.399248719215393, "learning_rate": 0.0009881692485392037, "loss": 3.6352, "step": 1395 }, { "epoch": 0.09512161978529692, "grad_norm": 1.4439579248428345, "learning_rate": 0.000988126783530371, "loss": 3.6226, "step": 1400 }, { "epoch": 0.09546133985595869, "grad_norm": 1.199981927871704, "learning_rate": 0.0009880843185215383, "loss": 3.6088, "step": 1405 }, { "epoch": 0.09580105992662047, "grad_norm": 1.306014895439148, "learning_rate": 0.0009880418535127057, "loss": 3.6647, "step": 1410 }, { "epoch": 0.09614077999728224, "grad_norm": 1.3821322917938232, "learning_rate": 0.0009879993885038728, "loss": 3.6863, "step": 1415 }, { "epoch": 0.09648050006794401, "grad_norm": 1.3198941946029663, "learning_rate": 0.0009879569234950401, "loss": 3.775, "step": 1420 }, { "epoch": 0.09682022013860579, "grad_norm": 1.7156963348388672, "learning_rate": 0.0009879144584862074, "loss": 3.6923, "step": 1425 }, { "epoch": 0.09715994020926756, "grad_norm": 1.2551462650299072, "learning_rate": 0.0009878719934773746, "loss": 3.519, "step": 1430 }, { "epoch": 0.09749966027992933, "grad_norm": 1.541696548461914, "learning_rate": 0.000987829528468542, "loss": 3.5644, "step": 1435 }, { "epoch": 0.09783938035059112, "grad_norm": 1.6970123052597046, "learning_rate": 0.0009877870634597092, "loss": 3.5933, "step": 1440 }, { "epoch": 0.09817910042125289, "grad_norm": 1.274652361869812, "learning_rate": 0.0009877445984508766, "loss": 3.4039, "step": 1445 }, { "epoch": 0.09851882049191467, "grad_norm": 1.377434492111206, "learning_rate": 0.0009877021334420437, "loss": 3.6108, "step": 1450 }, { "epoch": 0.09885854056257644, "grad_norm": 1.3028285503387451, "learning_rate": 0.000987659668433211, "loss": 3.6447, "step": 1455 }, { "epoch": 0.09919826063323821, "grad_norm": 1.284833550453186, "learning_rate": 0.0009876172034243783, "loss": 3.6828, "step": 1460 }, { "epoch": 0.09953798070389999, "grad_norm": 2.180715560913086, "learning_rate": 0.0009875747384155455, "loss": 3.6913, "step": 1465 }, { "epoch": 0.09987770077456176, "grad_norm": 1.5001219511032104, "learning_rate": 0.000987532273406713, "loss": 3.7564, "step": 1470 }, { "epoch": 0.10021742084522353, "grad_norm": 1.3779727220535278, "learning_rate": 0.0009874898083978801, "loss": 3.4246, "step": 1475 }, { "epoch": 0.10055714091588532, "grad_norm": 1.7945916652679443, "learning_rate": 0.0009874473433890475, "loss": 3.6703, "step": 1480 }, { "epoch": 0.10089686098654709, "grad_norm": 1.44007408618927, "learning_rate": 0.0009874048783802148, "loss": 3.5834, "step": 1485 }, { "epoch": 0.10123658105720885, "grad_norm": 1.454373836517334, "learning_rate": 0.000987362413371382, "loss": 3.6041, "step": 1490 }, { "epoch": 0.10157630112787064, "grad_norm": 1.2343708276748657, "learning_rate": 0.0009873199483625493, "loss": 3.5909, "step": 1495 }, { "epoch": 0.10191602119853241, "grad_norm": 1.4894696474075317, "learning_rate": 0.0009872774833537166, "loss": 3.8197, "step": 1500 }, { "epoch": 0.10225574126919418, "grad_norm": 1.581842064857483, "learning_rate": 0.000987235018344884, "loss": 3.7142, "step": 1505 }, { "epoch": 0.10259546133985596, "grad_norm": 1.4353221654891968, "learning_rate": 0.000987192553336051, "loss": 3.511, "step": 1510 }, { "epoch": 0.10293518141051773, "grad_norm": 1.3302139043807983, "learning_rate": 0.0009871500883272184, "loss": 3.5372, "step": 1515 }, { "epoch": 0.1032749014811795, "grad_norm": 1.3703844547271729, "learning_rate": 0.0009871076233183857, "loss": 3.5101, "step": 1520 }, { "epoch": 0.10361462155184128, "grad_norm": 1.5488308668136597, "learning_rate": 0.0009870651583095528, "loss": 3.7636, "step": 1525 }, { "epoch": 0.10395434162250305, "grad_norm": 1.4070602655410767, "learning_rate": 0.0009870226933007202, "loss": 3.6355, "step": 1530 }, { "epoch": 0.10429406169316484, "grad_norm": 1.3398648500442505, "learning_rate": 0.0009869802282918875, "loss": 3.8088, "step": 1535 }, { "epoch": 0.10463378176382661, "grad_norm": 1.5523875951766968, "learning_rate": 0.0009869377632830548, "loss": 3.7478, "step": 1540 }, { "epoch": 0.10497350183448838, "grad_norm": 1.177014946937561, "learning_rate": 0.0009868952982742222, "loss": 3.6128, "step": 1545 }, { "epoch": 0.10531322190515016, "grad_norm": 1.6369737386703491, "learning_rate": 0.0009868528332653893, "loss": 3.6919, "step": 1550 }, { "epoch": 0.10565294197581193, "grad_norm": 1.450143575668335, "learning_rate": 0.0009868103682565566, "loss": 3.5896, "step": 1555 }, { "epoch": 0.1059926620464737, "grad_norm": 1.60457181930542, "learning_rate": 0.000986767903247724, "loss": 3.6932, "step": 1560 }, { "epoch": 0.10633238211713548, "grad_norm": 1.875244379043579, "learning_rate": 0.000986725438238891, "loss": 3.7041, "step": 1565 }, { "epoch": 0.10667210218779725, "grad_norm": 1.5752156972885132, "learning_rate": 0.0009866829732300584, "loss": 3.5056, "step": 1570 }, { "epoch": 0.10701182225845902, "grad_norm": 1.529388666152954, "learning_rate": 0.0009866405082212257, "loss": 3.7209, "step": 1575 }, { "epoch": 0.10735154232912081, "grad_norm": 1.3317933082580566, "learning_rate": 0.000986598043212393, "loss": 3.6511, "step": 1580 }, { "epoch": 0.10769126239978258, "grad_norm": 1.3510174751281738, "learning_rate": 0.0009865555782035602, "loss": 3.6768, "step": 1585 }, { "epoch": 0.10803098247044435, "grad_norm": 2.065546751022339, "learning_rate": 0.0009865131131947275, "loss": 3.4777, "step": 1590 }, { "epoch": 0.10837070254110613, "grad_norm": 1.2832310199737549, "learning_rate": 0.0009864706481858949, "loss": 3.7352, "step": 1595 }, { "epoch": 0.1087104226117679, "grad_norm": 1.0940709114074707, "learning_rate": 0.000986428183177062, "loss": 3.554, "step": 1600 }, { "epoch": 0.10905014268242967, "grad_norm": 1.3805588483810425, "learning_rate": 0.0009863857181682295, "loss": 3.6533, "step": 1605 }, { "epoch": 0.10938986275309145, "grad_norm": 1.7584441900253296, "learning_rate": 0.0009863432531593966, "loss": 3.7884, "step": 1610 }, { "epoch": 0.10972958282375322, "grad_norm": 1.3596646785736084, "learning_rate": 0.000986300788150564, "loss": 3.5469, "step": 1615 }, { "epoch": 0.11006930289441501, "grad_norm": 1.5852384567260742, "learning_rate": 0.0009862583231417313, "loss": 3.4777, "step": 1620 }, { "epoch": 0.11040902296507678, "grad_norm": 1.396229863166809, "learning_rate": 0.0009862158581328984, "loss": 3.5064, "step": 1625 }, { "epoch": 0.11074874303573855, "grad_norm": 1.679211974143982, "learning_rate": 0.0009861733931240658, "loss": 3.6843, "step": 1630 }, { "epoch": 0.11108846310640033, "grad_norm": 1.603892207145691, "learning_rate": 0.000986130928115233, "loss": 3.6466, "step": 1635 }, { "epoch": 0.1114281831770621, "grad_norm": 1.5274556875228882, "learning_rate": 0.0009860884631064004, "loss": 3.7456, "step": 1640 }, { "epoch": 0.11176790324772387, "grad_norm": 1.3056048154830933, "learning_rate": 0.0009860459980975678, "loss": 3.5955, "step": 1645 }, { "epoch": 0.11210762331838565, "grad_norm": 1.2977254390716553, "learning_rate": 0.0009860035330887349, "loss": 3.4829, "step": 1650 }, { "epoch": 0.11244734338904742, "grad_norm": 1.6938495635986328, "learning_rate": 0.0009859610680799022, "loss": 3.6734, "step": 1655 }, { "epoch": 0.1127870634597092, "grad_norm": 1.4662179946899414, "learning_rate": 0.0009859186030710696, "loss": 3.6567, "step": 1660 }, { "epoch": 0.11312678353037098, "grad_norm": 1.765249490737915, "learning_rate": 0.0009858761380622367, "loss": 3.7111, "step": 1665 }, { "epoch": 0.11346650360103275, "grad_norm": 1.6638092994689941, "learning_rate": 0.000985833673053404, "loss": 3.684, "step": 1670 }, { "epoch": 0.11380622367169452, "grad_norm": 1.5604588985443115, "learning_rate": 0.0009857912080445713, "loss": 3.6605, "step": 1675 }, { "epoch": 0.1141459437423563, "grad_norm": 1.53115713596344, "learning_rate": 0.0009857487430357387, "loss": 3.5717, "step": 1680 }, { "epoch": 0.11448566381301807, "grad_norm": 1.3386611938476562, "learning_rate": 0.0009857062780269058, "loss": 3.5396, "step": 1685 }, { "epoch": 0.11482538388367985, "grad_norm": 1.4295798540115356, "learning_rate": 0.0009856638130180731, "loss": 3.4297, "step": 1690 }, { "epoch": 0.11516510395434162, "grad_norm": 1.3638650178909302, "learning_rate": 0.0009856213480092405, "loss": 3.5681, "step": 1695 }, { "epoch": 0.1155048240250034, "grad_norm": 1.731117606163025, "learning_rate": 0.0009855788830004076, "loss": 3.7121, "step": 1700 }, { "epoch": 0.11584454409566518, "grad_norm": 1.298640489578247, "learning_rate": 0.000985536417991575, "loss": 3.434, "step": 1705 }, { "epoch": 0.11618426416632695, "grad_norm": 1.2832605838775635, "learning_rate": 0.0009854939529827422, "loss": 3.7066, "step": 1710 }, { "epoch": 0.11652398423698872, "grad_norm": 1.6455832719802856, "learning_rate": 0.0009854514879739096, "loss": 3.29, "step": 1715 }, { "epoch": 0.1168637043076505, "grad_norm": 1.5087333917617798, "learning_rate": 0.000985409022965077, "loss": 3.7762, "step": 1720 }, { "epoch": 0.11720342437831227, "grad_norm": 1.6693843603134155, "learning_rate": 0.000985366557956244, "loss": 3.4438, "step": 1725 }, { "epoch": 0.11754314444897404, "grad_norm": 1.596191167831421, "learning_rate": 0.0009853240929474114, "loss": 3.475, "step": 1730 }, { "epoch": 0.11788286451963582, "grad_norm": 2.212299346923828, "learning_rate": 0.0009852816279385787, "loss": 3.5831, "step": 1735 }, { "epoch": 0.1182225845902976, "grad_norm": 1.6261988878250122, "learning_rate": 0.0009852391629297458, "loss": 3.6697, "step": 1740 }, { "epoch": 0.11856230466095936, "grad_norm": 1.4875317811965942, "learning_rate": 0.0009851966979209132, "loss": 3.8372, "step": 1745 }, { "epoch": 0.11890202473162115, "grad_norm": 1.4666495323181152, "learning_rate": 0.0009851542329120805, "loss": 3.6312, "step": 1750 }, { "epoch": 0.11924174480228292, "grad_norm": 1.576835036277771, "learning_rate": 0.0009851117679032478, "loss": 3.7334, "step": 1755 }, { "epoch": 0.11958146487294469, "grad_norm": 1.4243022203445435, "learning_rate": 0.000985069302894415, "loss": 3.7569, "step": 1760 }, { "epoch": 0.11992118494360647, "grad_norm": 1.6233004331588745, "learning_rate": 0.0009850268378855823, "loss": 3.6194, "step": 1765 }, { "epoch": 0.12026090501426824, "grad_norm": 1.5484246015548706, "learning_rate": 0.0009849843728767496, "loss": 3.6621, "step": 1770 }, { "epoch": 0.12060062508493002, "grad_norm": 1.596327781677246, "learning_rate": 0.0009849419078679167, "loss": 3.5639, "step": 1775 }, { "epoch": 0.1209403451555918, "grad_norm": 1.4914765357971191, "learning_rate": 0.0009848994428590843, "loss": 3.6827, "step": 1780 }, { "epoch": 0.12128006522625356, "grad_norm": 1.5215301513671875, "learning_rate": 0.0009848569778502514, "loss": 3.8319, "step": 1785 }, { "epoch": 0.12161978529691535, "grad_norm": 1.8367743492126465, "learning_rate": 0.0009848145128414187, "loss": 3.8227, "step": 1790 }, { "epoch": 0.12195950536757712, "grad_norm": 1.5875188112258911, "learning_rate": 0.000984772047832586, "loss": 3.4961, "step": 1795 }, { "epoch": 0.12229922543823889, "grad_norm": 1.4967621564865112, "learning_rate": 0.0009847295828237532, "loss": 3.536, "step": 1800 }, { "epoch": 0.12263894550890067, "grad_norm": 1.6719038486480713, "learning_rate": 0.0009846871178149205, "loss": 3.6564, "step": 1805 }, { "epoch": 0.12297866557956244, "grad_norm": 1.381834626197815, "learning_rate": 0.0009846446528060878, "loss": 3.5881, "step": 1810 }, { "epoch": 0.12331838565022421, "grad_norm": 1.60707688331604, "learning_rate": 0.0009846021877972552, "loss": 3.4669, "step": 1815 }, { "epoch": 0.123658105720886, "grad_norm": 1.5398857593536377, "learning_rate": 0.0009845597227884223, "loss": 3.5731, "step": 1820 }, { "epoch": 0.12399782579154776, "grad_norm": 1.5795985460281372, "learning_rate": 0.0009845172577795896, "loss": 3.6247, "step": 1825 }, { "epoch": 0.12433754586220953, "grad_norm": 1.272557020187378, "learning_rate": 0.000984474792770757, "loss": 3.6432, "step": 1830 }, { "epoch": 0.12467726593287132, "grad_norm": 1.7734211683273315, "learning_rate": 0.000984432327761924, "loss": 3.6241, "step": 1835 }, { "epoch": 0.1250169860035331, "grad_norm": 1.742016077041626, "learning_rate": 0.0009843898627530914, "loss": 3.6881, "step": 1840 }, { "epoch": 0.12535670607419486, "grad_norm": 1.783451795578003, "learning_rate": 0.0009843473977442588, "loss": 3.5275, "step": 1845 }, { "epoch": 0.12569642614485663, "grad_norm": 1.7526910305023193, "learning_rate": 0.000984304932735426, "loss": 3.6957, "step": 1850 }, { "epoch": 0.12603614621551842, "grad_norm": 1.5722473859786987, "learning_rate": 0.0009842624677265934, "loss": 3.5564, "step": 1855 }, { "epoch": 0.1263758662861802, "grad_norm": 1.6460167169570923, "learning_rate": 0.0009842200027177605, "loss": 3.6688, "step": 1860 }, { "epoch": 0.12671558635684196, "grad_norm": 1.5810211896896362, "learning_rate": 0.0009841775377089279, "loss": 3.5373, "step": 1865 }, { "epoch": 0.12705530642750373, "grad_norm": 1.3832409381866455, "learning_rate": 0.0009841350727000952, "loss": 3.7114, "step": 1870 }, { "epoch": 0.1273950264981655, "grad_norm": 1.363203525543213, "learning_rate": 0.0009840926076912623, "loss": 3.5152, "step": 1875 }, { "epoch": 0.12773474656882727, "grad_norm": 1.695926308631897, "learning_rate": 0.0009840501426824297, "loss": 3.6585, "step": 1880 }, { "epoch": 0.12807446663948907, "grad_norm": 1.3788173198699951, "learning_rate": 0.000984007677673597, "loss": 3.7423, "step": 1885 }, { "epoch": 0.12841418671015084, "grad_norm": 1.6451996564865112, "learning_rate": 0.0009839652126647643, "loss": 3.1701, "step": 1890 }, { "epoch": 0.1287539067808126, "grad_norm": 1.5548279285430908, "learning_rate": 0.0009839227476559314, "loss": 3.6702, "step": 1895 }, { "epoch": 0.12909362685147438, "grad_norm": 1.4653241634368896, "learning_rate": 0.0009838802826470988, "loss": 3.5132, "step": 1900 }, { "epoch": 0.12943334692213615, "grad_norm": 1.4085623025894165, "learning_rate": 0.0009838378176382661, "loss": 3.3914, "step": 1905 }, { "epoch": 0.12977306699279795, "grad_norm": 1.4944549798965454, "learning_rate": 0.0009837953526294332, "loss": 3.6733, "step": 1910 }, { "epoch": 0.13011278706345972, "grad_norm": 1.2528958320617676, "learning_rate": 0.0009837528876206006, "loss": 3.6938, "step": 1915 }, { "epoch": 0.1304525071341215, "grad_norm": 1.7908958196640015, "learning_rate": 0.000983710422611768, "loss": 3.4692, "step": 1920 }, { "epoch": 0.13079222720478326, "grad_norm": 1.563193917274475, "learning_rate": 0.0009836679576029352, "loss": 3.6949, "step": 1925 }, { "epoch": 0.13113194727544503, "grad_norm": 1.8266597986221313, "learning_rate": 0.0009836254925941026, "loss": 3.7263, "step": 1930 }, { "epoch": 0.1314716673461068, "grad_norm": 1.884680986404419, "learning_rate": 0.0009835830275852697, "loss": 3.5584, "step": 1935 }, { "epoch": 0.1318113874167686, "grad_norm": 1.3397033214569092, "learning_rate": 0.000983540562576437, "loss": 3.176, "step": 1940 }, { "epoch": 0.13215110748743036, "grad_norm": 2.0837697982788086, "learning_rate": 0.0009834980975676044, "loss": 3.5171, "step": 1945 }, { "epoch": 0.13249082755809213, "grad_norm": 3.1914007663726807, "learning_rate": 0.0009834556325587715, "loss": 3.6832, "step": 1950 }, { "epoch": 0.1328305476287539, "grad_norm": 1.4502021074295044, "learning_rate": 0.000983413167549939, "loss": 3.5734, "step": 1955 }, { "epoch": 0.13317026769941567, "grad_norm": 1.6925923824310303, "learning_rate": 0.0009833707025411061, "loss": 3.6637, "step": 1960 }, { "epoch": 0.13350998777007744, "grad_norm": 1.6796473264694214, "learning_rate": 0.0009833282375322735, "loss": 3.3099, "step": 1965 }, { "epoch": 0.13384970784073924, "grad_norm": 1.9272830486297607, "learning_rate": 0.0009832857725234408, "loss": 3.5751, "step": 1970 }, { "epoch": 0.134189427911401, "grad_norm": 1.4302752017974854, "learning_rate": 0.000983243307514608, "loss": 3.7596, "step": 1975 }, { "epoch": 0.13452914798206278, "grad_norm": 1.6137275695800781, "learning_rate": 0.0009832008425057753, "loss": 3.5431, "step": 1980 }, { "epoch": 0.13486886805272455, "grad_norm": 1.4306612014770508, "learning_rate": 0.0009831583774969426, "loss": 3.703, "step": 1985 }, { "epoch": 0.13520858812338632, "grad_norm": 1.7709165811538696, "learning_rate": 0.00098311591248811, "loss": 3.6164, "step": 1990 }, { "epoch": 0.13554830819404812, "grad_norm": 1.4961203336715698, "learning_rate": 0.000983073447479277, "loss": 3.6668, "step": 1995 }, { "epoch": 0.1358880282647099, "grad_norm": 1.7368316650390625, "learning_rate": 0.0009830309824704444, "loss": 3.5135, "step": 2000 }, { "epoch": 0.13622774833537166, "grad_norm": 1.8079270124435425, "learning_rate": 0.0009829885174616117, "loss": 3.5255, "step": 2005 }, { "epoch": 0.13656746840603343, "grad_norm": 1.4980137348175049, "learning_rate": 0.0009829460524527788, "loss": 3.8637, "step": 2010 }, { "epoch": 0.1369071884766952, "grad_norm": 1.3247063159942627, "learning_rate": 0.0009829035874439462, "loss": 3.6331, "step": 2015 }, { "epoch": 0.13724690854735697, "grad_norm": 1.8562628030776978, "learning_rate": 0.0009828611224351135, "loss": 3.8494, "step": 2020 }, { "epoch": 0.13758662861801876, "grad_norm": 1.4243454933166504, "learning_rate": 0.0009828186574262808, "loss": 3.5358, "step": 2025 }, { "epoch": 0.13792634868868053, "grad_norm": 2.3523330688476562, "learning_rate": 0.0009827761924174482, "loss": 3.4978, "step": 2030 }, { "epoch": 0.1382660687593423, "grad_norm": 1.5877039432525635, "learning_rate": 0.0009827337274086153, "loss": 3.8544, "step": 2035 }, { "epoch": 0.13860578883000407, "grad_norm": 1.2510793209075928, "learning_rate": 0.0009826912623997826, "loss": 3.6947, "step": 2040 }, { "epoch": 0.13894550890066584, "grad_norm": 1.249427318572998, "learning_rate": 0.00098264879739095, "loss": 3.6374, "step": 2045 }, { "epoch": 0.1392852289713276, "grad_norm": 1.485474705696106, "learning_rate": 0.000982606332382117, "loss": 3.6277, "step": 2050 }, { "epoch": 0.1396249490419894, "grad_norm": 1.5889174938201904, "learning_rate": 0.0009825638673732844, "loss": 3.7389, "step": 2055 }, { "epoch": 0.13996466911265118, "grad_norm": 1.4678095579147339, "learning_rate": 0.0009825214023644517, "loss": 3.7397, "step": 2060 }, { "epoch": 0.14030438918331295, "grad_norm": 1.7524747848510742, "learning_rate": 0.000982478937355619, "loss": 3.7461, "step": 2065 }, { "epoch": 0.14064410925397472, "grad_norm": 1.6985596418380737, "learning_rate": 0.0009824364723467862, "loss": 3.5837, "step": 2070 }, { "epoch": 0.1409838293246365, "grad_norm": 1.6223387718200684, "learning_rate": 0.0009823940073379535, "loss": 3.5057, "step": 2075 }, { "epoch": 0.1413235493952983, "grad_norm": 1.1698862314224243, "learning_rate": 0.0009823515423291209, "loss": 3.6639, "step": 2080 }, { "epoch": 0.14166326946596006, "grad_norm": 1.496017336845398, "learning_rate": 0.000982309077320288, "loss": 3.7473, "step": 2085 }, { "epoch": 0.14200298953662183, "grad_norm": 1.5143980979919434, "learning_rate": 0.0009822666123114553, "loss": 3.5946, "step": 2090 }, { "epoch": 0.1423427096072836, "grad_norm": 1.366355299949646, "learning_rate": 0.0009822241473026226, "loss": 3.6816, "step": 2095 }, { "epoch": 0.14268242967794537, "grad_norm": 1.3716347217559814, "learning_rate": 0.00098218168229379, "loss": 3.5324, "step": 2100 }, { "epoch": 0.14302214974860714, "grad_norm": 1.6487678289413452, "learning_rate": 0.0009821392172849573, "loss": 3.7119, "step": 2105 }, { "epoch": 0.14336186981926893, "grad_norm": 1.4769998788833618, "learning_rate": 0.0009820967522761244, "loss": 3.651, "step": 2110 }, { "epoch": 0.1437015898899307, "grad_norm": 1.6071019172668457, "learning_rate": 0.0009820542872672918, "loss": 3.2595, "step": 2115 }, { "epoch": 0.14404130996059247, "grad_norm": 1.5638588666915894, "learning_rate": 0.000982011822258459, "loss": 3.8102, "step": 2120 }, { "epoch": 0.14438103003125424, "grad_norm": 1.6208633184432983, "learning_rate": 0.0009819693572496262, "loss": 3.6031, "step": 2125 }, { "epoch": 0.144720750101916, "grad_norm": 1.7414592504501343, "learning_rate": 0.0009819268922407936, "loss": 3.6251, "step": 2130 }, { "epoch": 0.14506047017257778, "grad_norm": 1.9641668796539307, "learning_rate": 0.0009818844272319609, "loss": 3.6077, "step": 2135 }, { "epoch": 0.14540019024323958, "grad_norm": 1.5200024843215942, "learning_rate": 0.0009818419622231282, "loss": 3.5732, "step": 2140 }, { "epoch": 0.14573991031390135, "grad_norm": 1.3843169212341309, "learning_rate": 0.0009817994972142953, "loss": 3.6454, "step": 2145 }, { "epoch": 0.14607963038456312, "grad_norm": 1.6818506717681885, "learning_rate": 0.0009817570322054627, "loss": 3.4587, "step": 2150 }, { "epoch": 0.1464193504552249, "grad_norm": 1.4759942293167114, "learning_rate": 0.00098171456719663, "loss": 4.0735, "step": 2155 }, { "epoch": 0.14675907052588666, "grad_norm": 1.4700796604156494, "learning_rate": 0.0009816721021877971, "loss": 3.6081, "step": 2160 }, { "epoch": 0.14709879059654846, "grad_norm": 2.3422741889953613, "learning_rate": 0.0009816296371789647, "loss": 3.5907, "step": 2165 }, { "epoch": 0.14743851066721023, "grad_norm": 1.6670564413070679, "learning_rate": 0.0009815871721701318, "loss": 3.5018, "step": 2170 }, { "epoch": 0.147778230737872, "grad_norm": 1.5539835691452026, "learning_rate": 0.0009815447071612991, "loss": 3.823, "step": 2175 }, { "epoch": 0.14811795080853377, "grad_norm": 1.4906344413757324, "learning_rate": 0.0009815022421524665, "loss": 3.5977, "step": 2180 }, { "epoch": 0.14845767087919554, "grad_norm": 1.5581475496292114, "learning_rate": 0.0009814597771436336, "loss": 3.6575, "step": 2185 }, { "epoch": 0.1487973909498573, "grad_norm": 1.5716432332992554, "learning_rate": 0.000981417312134801, "loss": 3.678, "step": 2190 }, { "epoch": 0.1491371110205191, "grad_norm": 1.514636754989624, "learning_rate": 0.0009813748471259682, "loss": 3.6173, "step": 2195 }, { "epoch": 0.14947683109118087, "grad_norm": 1.6809401512145996, "learning_rate": 0.0009813323821171356, "loss": 3.6701, "step": 2200 }, { "epoch": 0.14981655116184264, "grad_norm": 1.434201717376709, "learning_rate": 0.0009812899171083027, "loss": 3.6482, "step": 2205 }, { "epoch": 0.1501562712325044, "grad_norm": 1.415789246559143, "learning_rate": 0.00098124745209947, "loss": 3.8108, "step": 2210 }, { "epoch": 0.15049599130316618, "grad_norm": 1.4784072637557983, "learning_rate": 0.0009812049870906374, "loss": 3.6888, "step": 2215 }, { "epoch": 0.15083571137382795, "grad_norm": 1.5312238931655884, "learning_rate": 0.0009811625220818045, "loss": 3.7092, "step": 2220 }, { "epoch": 0.15117543144448975, "grad_norm": 1.4530200958251953, "learning_rate": 0.0009811200570729718, "loss": 3.9502, "step": 2225 }, { "epoch": 0.15151515151515152, "grad_norm": 1.6811603307724, "learning_rate": 0.0009810775920641392, "loss": 3.489, "step": 2230 }, { "epoch": 0.1518548715858133, "grad_norm": 1.4092496633529663, "learning_rate": 0.0009810351270553065, "loss": 3.7487, "step": 2235 }, { "epoch": 0.15219459165647506, "grad_norm": 1.3281621932983398, "learning_rate": 0.0009809926620464738, "loss": 3.7677, "step": 2240 }, { "epoch": 0.15253431172713683, "grad_norm": 1.184536099433899, "learning_rate": 0.000980950197037641, "loss": 3.7379, "step": 2245 }, { "epoch": 0.15287403179779863, "grad_norm": 1.6326584815979004, "learning_rate": 0.0009809077320288083, "loss": 3.4478, "step": 2250 }, { "epoch": 0.1532137518684604, "grad_norm": 1.5946354866027832, "learning_rate": 0.0009808652670199756, "loss": 3.7436, "step": 2255 }, { "epoch": 0.15355347193912217, "grad_norm": 1.90010404586792, "learning_rate": 0.0009808228020111427, "loss": 3.8395, "step": 2260 }, { "epoch": 0.15389319200978394, "grad_norm": 1.3810259103775024, "learning_rate": 0.00098078033700231, "loss": 3.6795, "step": 2265 }, { "epoch": 0.1542329120804457, "grad_norm": 1.4807720184326172, "learning_rate": 0.0009807378719934774, "loss": 3.7352, "step": 2270 }, { "epoch": 0.15457263215110748, "grad_norm": 1.760892391204834, "learning_rate": 0.0009806954069846447, "loss": 3.5944, "step": 2275 }, { "epoch": 0.15491235222176927, "grad_norm": 1.82829749584198, "learning_rate": 0.0009806529419758118, "loss": 3.4511, "step": 2280 }, { "epoch": 0.15525207229243104, "grad_norm": 1.5986131429672241, "learning_rate": 0.0009806104769669792, "loss": 3.5287, "step": 2285 }, { "epoch": 0.1555917923630928, "grad_norm": 1.57267427444458, "learning_rate": 0.0009805680119581465, "loss": 3.5084, "step": 2290 }, { "epoch": 0.15593151243375458, "grad_norm": 1.7204653024673462, "learning_rate": 0.0009805255469493139, "loss": 3.593, "step": 2295 }, { "epoch": 0.15627123250441635, "grad_norm": 1.772196650505066, "learning_rate": 0.0009804830819404812, "loss": 3.5722, "step": 2300 }, { "epoch": 0.15661095257507815, "grad_norm": 1.689474105834961, "learning_rate": 0.0009804406169316483, "loss": 3.7607, "step": 2305 }, { "epoch": 0.15695067264573992, "grad_norm": 1.6052523851394653, "learning_rate": 0.0009803981519228156, "loss": 3.4318, "step": 2310 }, { "epoch": 0.1572903927164017, "grad_norm": 1.4640703201293945, "learning_rate": 0.000980355686913983, "loss": 3.6623, "step": 2315 }, { "epoch": 0.15763011278706346, "grad_norm": 1.6628209352493286, "learning_rate": 0.00098031322190515, "loss": 3.6323, "step": 2320 }, { "epoch": 0.15796983285772523, "grad_norm": 1.6328890323638916, "learning_rate": 0.0009802707568963174, "loss": 3.7948, "step": 2325 }, { "epoch": 0.158309552928387, "grad_norm": 1.3697395324707031, "learning_rate": 0.0009802282918874848, "loss": 3.49, "step": 2330 }, { "epoch": 0.1586492729990488, "grad_norm": 1.4242842197418213, "learning_rate": 0.000980185826878652, "loss": 3.5727, "step": 2335 }, { "epoch": 0.15898899306971057, "grad_norm": 1.350804090499878, "learning_rate": 0.0009801433618698194, "loss": 3.7307, "step": 2340 }, { "epoch": 0.15932871314037234, "grad_norm": 1.4764645099639893, "learning_rate": 0.0009801008968609865, "loss": 3.5755, "step": 2345 }, { "epoch": 0.1596684332110341, "grad_norm": 1.7613804340362549, "learning_rate": 0.0009800584318521539, "loss": 3.8836, "step": 2350 }, { "epoch": 0.16000815328169588, "grad_norm": 1.3071836233139038, "learning_rate": 0.0009800159668433212, "loss": 3.6969, "step": 2355 }, { "epoch": 0.16034787335235764, "grad_norm": 1.4613136053085327, "learning_rate": 0.0009799735018344883, "loss": 3.6523, "step": 2360 }, { "epoch": 0.16068759342301944, "grad_norm": 1.4233888387680054, "learning_rate": 0.0009799310368256557, "loss": 3.3493, "step": 2365 }, { "epoch": 0.1610273134936812, "grad_norm": 1.8347879648208618, "learning_rate": 0.000979888571816823, "loss": 3.5762, "step": 2370 }, { "epoch": 0.16136703356434298, "grad_norm": 1.3644919395446777, "learning_rate": 0.0009798461068079903, "loss": 3.6458, "step": 2375 }, { "epoch": 0.16170675363500475, "grad_norm": 1.7376693487167358, "learning_rate": 0.0009798036417991574, "loss": 3.5453, "step": 2380 }, { "epoch": 0.16204647370566652, "grad_norm": 1.5231566429138184, "learning_rate": 0.0009797611767903248, "loss": 3.5549, "step": 2385 }, { "epoch": 0.16238619377632832, "grad_norm": 1.5512444972991943, "learning_rate": 0.0009797187117814921, "loss": 3.7112, "step": 2390 }, { "epoch": 0.1627259138469901, "grad_norm": 2.1508119106292725, "learning_rate": 0.0009796762467726592, "loss": 3.5026, "step": 2395 }, { "epoch": 0.16306563391765186, "grad_norm": 1.413411259651184, "learning_rate": 0.0009796337817638266, "loss": 3.7132, "step": 2400 }, { "epoch": 0.16340535398831363, "grad_norm": 1.2609660625457764, "learning_rate": 0.000979591316754994, "loss": 3.448, "step": 2405 }, { "epoch": 0.1637450740589754, "grad_norm": 1.2715903520584106, "learning_rate": 0.0009795488517461612, "loss": 3.8045, "step": 2410 }, { "epoch": 0.16408479412963717, "grad_norm": 1.5833157300949097, "learning_rate": 0.0009795063867373286, "loss": 3.7088, "step": 2415 }, { "epoch": 0.16442451420029897, "grad_norm": 2.219149351119995, "learning_rate": 0.0009794639217284957, "loss": 3.6086, "step": 2420 }, { "epoch": 0.16476423427096074, "grad_norm": 1.5064111948013306, "learning_rate": 0.000979421456719663, "loss": 3.5336, "step": 2425 }, { "epoch": 0.1651039543416225, "grad_norm": 1.7809966802597046, "learning_rate": 0.0009793789917108304, "loss": 3.7051, "step": 2430 }, { "epoch": 0.16544367441228428, "grad_norm": 1.5655831098556519, "learning_rate": 0.0009793365267019975, "loss": 3.8463, "step": 2435 }, { "epoch": 0.16578339448294604, "grad_norm": 1.5614906549453735, "learning_rate": 0.0009792940616931648, "loss": 3.642, "step": 2440 }, { "epoch": 0.16612311455360781, "grad_norm": 2.362762928009033, "learning_rate": 0.0009792515966843321, "loss": 3.6891, "step": 2445 }, { "epoch": 0.1664628346242696, "grad_norm": 1.501413106918335, "learning_rate": 0.0009792091316754995, "loss": 3.6171, "step": 2450 }, { "epoch": 0.16680255469493138, "grad_norm": 2.256915807723999, "learning_rate": 0.0009791666666666666, "loss": 3.6768, "step": 2455 }, { "epoch": 0.16714227476559315, "grad_norm": 1.7794028520584106, "learning_rate": 0.000979124201657834, "loss": 3.6435, "step": 2460 }, { "epoch": 0.16748199483625492, "grad_norm": 1.6963998079299927, "learning_rate": 0.0009790817366490013, "loss": 3.6189, "step": 2465 }, { "epoch": 0.1678217149069167, "grad_norm": 1.8519346714019775, "learning_rate": 0.0009790392716401684, "loss": 3.7108, "step": 2470 }, { "epoch": 0.1681614349775785, "grad_norm": 2.1102397441864014, "learning_rate": 0.000978996806631336, "loss": 3.7544, "step": 2475 }, { "epoch": 0.16850115504824026, "grad_norm": 1.5748518705368042, "learning_rate": 0.000978954341622503, "loss": 3.674, "step": 2480 }, { "epoch": 0.16884087511890203, "grad_norm": 1.7097774744033813, "learning_rate": 0.0009789118766136704, "loss": 3.6847, "step": 2485 }, { "epoch": 0.1691805951895638, "grad_norm": 1.2228971719741821, "learning_rate": 0.0009788694116048377, "loss": 3.5979, "step": 2490 }, { "epoch": 0.16952031526022557, "grad_norm": 1.4365841150283813, "learning_rate": 0.0009788269465960048, "loss": 3.6751, "step": 2495 }, { "epoch": 0.16986003533088734, "grad_norm": 1.4034360647201538, "learning_rate": 0.0009787844815871722, "loss": 3.8977, "step": 2500 }, { "epoch": 0.17019975540154914, "grad_norm": 1.4915817975997925, "learning_rate": 0.0009787420165783395, "loss": 3.7726, "step": 2505 }, { "epoch": 0.1705394754722109, "grad_norm": 1.4016443490982056, "learning_rate": 0.0009786995515695068, "loss": 3.8148, "step": 2510 }, { "epoch": 0.17087919554287267, "grad_norm": 1.1967729330062866, "learning_rate": 0.000978657086560674, "loss": 3.7107, "step": 2515 }, { "epoch": 0.17121891561353444, "grad_norm": 1.2432351112365723, "learning_rate": 0.0009786146215518413, "loss": 3.7886, "step": 2520 }, { "epoch": 0.17155863568419621, "grad_norm": 1.4306658506393433, "learning_rate": 0.0009785721565430086, "loss": 3.8005, "step": 2525 }, { "epoch": 0.17189835575485798, "grad_norm": 1.6265136003494263, "learning_rate": 0.0009785296915341757, "loss": 3.5877, "step": 2530 }, { "epoch": 0.17223807582551978, "grad_norm": 1.7646020650863647, "learning_rate": 0.000978487226525343, "loss": 3.8095, "step": 2535 }, { "epoch": 0.17257779589618155, "grad_norm": 1.6824792623519897, "learning_rate": 0.0009784447615165104, "loss": 3.4917, "step": 2540 }, { "epoch": 0.17291751596684332, "grad_norm": 1.4893559217453003, "learning_rate": 0.0009784022965076777, "loss": 3.5358, "step": 2545 }, { "epoch": 0.1732572360375051, "grad_norm": 1.3174116611480713, "learning_rate": 0.000978359831498845, "loss": 3.7545, "step": 2550 }, { "epoch": 0.17359695610816686, "grad_norm": 1.5271480083465576, "learning_rate": 0.0009783173664900122, "loss": 3.466, "step": 2555 }, { "epoch": 0.17393667617882866, "grad_norm": 1.5200154781341553, "learning_rate": 0.0009782749014811795, "loss": 3.4105, "step": 2560 }, { "epoch": 0.17427639624949043, "grad_norm": 2.1685256958007812, "learning_rate": 0.0009782324364723469, "loss": 3.6261, "step": 2565 }, { "epoch": 0.1746161163201522, "grad_norm": 1.7946327924728394, "learning_rate": 0.000978189971463514, "loss": 3.6576, "step": 2570 }, { "epoch": 0.17495583639081397, "grad_norm": 2.0578863620758057, "learning_rate": 0.0009781475064546813, "loss": 3.6348, "step": 2575 }, { "epoch": 0.17529555646147574, "grad_norm": 1.889652132987976, "learning_rate": 0.0009781050414458487, "loss": 3.7684, "step": 2580 }, { "epoch": 0.1756352765321375, "grad_norm": 1.3674612045288086, "learning_rate": 0.000978062576437016, "loss": 3.4492, "step": 2585 }, { "epoch": 0.1759749966027993, "grad_norm": 1.433403730392456, "learning_rate": 0.000978020111428183, "loss": 3.5226, "step": 2590 }, { "epoch": 0.17631471667346107, "grad_norm": 1.8125085830688477, "learning_rate": 0.0009779776464193504, "loss": 3.624, "step": 2595 }, { "epoch": 0.17665443674412284, "grad_norm": 1.6072306632995605, "learning_rate": 0.0009779351814105178, "loss": 3.46, "step": 2600 }, { "epoch": 0.17699415681478461, "grad_norm": 1.8555312156677246, "learning_rate": 0.0009778927164016849, "loss": 3.949, "step": 2605 }, { "epoch": 0.17733387688544638, "grad_norm": 1.6817352771759033, "learning_rate": 0.0009778502513928522, "loss": 3.6648, "step": 2610 }, { "epoch": 0.17767359695610815, "grad_norm": 1.3218599557876587, "learning_rate": 0.0009778077863840196, "loss": 3.2859, "step": 2615 }, { "epoch": 0.17801331702676995, "grad_norm": 1.7759932279586792, "learning_rate": 0.000977765321375187, "loss": 3.638, "step": 2620 }, { "epoch": 0.17835303709743172, "grad_norm": 1.903882622718811, "learning_rate": 0.0009777228563663542, "loss": 3.5548, "step": 2625 }, { "epoch": 0.1786927571680935, "grad_norm": 1.5970239639282227, "learning_rate": 0.0009776803913575213, "loss": 3.4743, "step": 2630 }, { "epoch": 0.17903247723875526, "grad_norm": 1.4655603170394897, "learning_rate": 0.0009776379263486887, "loss": 3.5351, "step": 2635 }, { "epoch": 0.17937219730941703, "grad_norm": 1.5063499212265015, "learning_rate": 0.000977595461339856, "loss": 3.6803, "step": 2640 }, { "epoch": 0.17971191738007883, "grad_norm": 1.2608065605163574, "learning_rate": 0.0009775529963310231, "loss": 3.6894, "step": 2645 }, { "epoch": 0.1800516374507406, "grad_norm": 1.802455186843872, "learning_rate": 0.0009775105313221907, "loss": 3.8698, "step": 2650 }, { "epoch": 0.18039135752140237, "grad_norm": 1.72782564163208, "learning_rate": 0.0009774680663133578, "loss": 3.6442, "step": 2655 }, { "epoch": 0.18073107759206414, "grad_norm": 1.453452467918396, "learning_rate": 0.0009774256013045251, "loss": 3.5482, "step": 2660 }, { "epoch": 0.1810707976627259, "grad_norm": 1.700177550315857, "learning_rate": 0.0009773831362956925, "loss": 3.751, "step": 2665 }, { "epoch": 0.18141051773338768, "grad_norm": 1.9176737070083618, "learning_rate": 0.0009773406712868596, "loss": 3.5671, "step": 2670 }, { "epoch": 0.18175023780404947, "grad_norm": 2.1612565517425537, "learning_rate": 0.000977298206278027, "loss": 3.5475, "step": 2675 }, { "epoch": 0.18208995787471124, "grad_norm": 1.8925360441207886, "learning_rate": 0.0009772557412691943, "loss": 3.7452, "step": 2680 }, { "epoch": 0.18242967794537301, "grad_norm": 1.1991963386535645, "learning_rate": 0.0009772132762603616, "loss": 3.4967, "step": 2685 }, { "epoch": 0.18276939801603478, "grad_norm": 1.4775481224060059, "learning_rate": 0.0009771708112515287, "loss": 3.5963, "step": 2690 }, { "epoch": 0.18310911808669655, "grad_norm": 1.4831944704055786, "learning_rate": 0.000977128346242696, "loss": 3.2257, "step": 2695 }, { "epoch": 0.18344883815735832, "grad_norm": 1.6668437719345093, "learning_rate": 0.0009770858812338634, "loss": 3.7864, "step": 2700 }, { "epoch": 0.18378855822802012, "grad_norm": 1.7211939096450806, "learning_rate": 0.0009770434162250305, "loss": 3.6545, "step": 2705 }, { "epoch": 0.1841282782986819, "grad_norm": 1.717040777206421, "learning_rate": 0.0009770009512161978, "loss": 3.5201, "step": 2710 }, { "epoch": 0.18446799836934366, "grad_norm": 1.6626328229904175, "learning_rate": 0.0009769584862073652, "loss": 3.7098, "step": 2715 }, { "epoch": 0.18480771844000543, "grad_norm": 2.0395946502685547, "learning_rate": 0.0009769160211985325, "loss": 3.6739, "step": 2720 }, { "epoch": 0.1851474385106672, "grad_norm": 1.64044988155365, "learning_rate": 0.0009768735561896998, "loss": 3.7125, "step": 2725 }, { "epoch": 0.185487158581329, "grad_norm": 1.8709701299667358, "learning_rate": 0.000976831091180867, "loss": 3.864, "step": 2730 }, { "epoch": 0.18582687865199077, "grad_norm": 1.5390570163726807, "learning_rate": 0.0009767886261720343, "loss": 3.7712, "step": 2735 }, { "epoch": 0.18616659872265254, "grad_norm": 1.3606011867523193, "learning_rate": 0.0009767461611632016, "loss": 3.4496, "step": 2740 }, { "epoch": 0.1865063187933143, "grad_norm": 1.3847099542617798, "learning_rate": 0.0009767036961543687, "loss": 3.5368, "step": 2745 }, { "epoch": 0.18684603886397608, "grad_norm": 1.908473014831543, "learning_rate": 0.000976661231145536, "loss": 3.6071, "step": 2750 }, { "epoch": 0.18718575893463785, "grad_norm": 1.7166317701339722, "learning_rate": 0.0009766187661367034, "loss": 3.8897, "step": 2755 }, { "epoch": 0.18752547900529964, "grad_norm": 1.6361627578735352, "learning_rate": 0.0009765763011278707, "loss": 3.7046, "step": 2760 }, { "epoch": 0.18786519907596141, "grad_norm": 1.6097424030303955, "learning_rate": 0.0009765338361190379, "loss": 3.7124, "step": 2765 }, { "epoch": 0.18820491914662318, "grad_norm": 1.2336642742156982, "learning_rate": 0.0009764913711102053, "loss": 3.6199, "step": 2770 }, { "epoch": 0.18854463921728495, "grad_norm": 1.6966456174850464, "learning_rate": 0.0009764489061013725, "loss": 3.7765, "step": 2775 }, { "epoch": 0.18888435928794672, "grad_norm": 1.3463594913482666, "learning_rate": 0.0009764064410925397, "loss": 3.5501, "step": 2780 }, { "epoch": 0.1892240793586085, "grad_norm": 1.522887110710144, "learning_rate": 0.0009763639760837071, "loss": 3.3813, "step": 2785 }, { "epoch": 0.1895637994292703, "grad_norm": 1.6350263357162476, "learning_rate": 0.0009763215110748743, "loss": 3.6281, "step": 2790 }, { "epoch": 0.18990351949993206, "grad_norm": 1.727765679359436, "learning_rate": 0.0009762790460660415, "loss": 3.5397, "step": 2795 }, { "epoch": 0.19024323957059383, "grad_norm": 1.7535374164581299, "learning_rate": 0.0009762365810572089, "loss": 3.5596, "step": 2800 }, { "epoch": 0.1905829596412556, "grad_norm": 1.9534871578216553, "learning_rate": 0.0009761941160483762, "loss": 3.6847, "step": 2805 }, { "epoch": 0.19092267971191737, "grad_norm": 1.4299077987670898, "learning_rate": 0.0009761516510395434, "loss": 3.26, "step": 2810 }, { "epoch": 0.19126239978257917, "grad_norm": 1.9872045516967773, "learning_rate": 0.0009761091860307108, "loss": 3.6732, "step": 2815 }, { "epoch": 0.19160211985324094, "grad_norm": 1.6322970390319824, "learning_rate": 0.000976066721021878, "loss": 3.8256, "step": 2820 }, { "epoch": 0.1919418399239027, "grad_norm": 1.3001114130020142, "learning_rate": 0.0009760242560130452, "loss": 3.5386, "step": 2825 }, { "epoch": 0.19228155999456448, "grad_norm": 1.3839998245239258, "learning_rate": 0.0009759817910042125, "loss": 3.8535, "step": 2830 }, { "epoch": 0.19262128006522625, "grad_norm": 1.8503060340881348, "learning_rate": 0.0009759393259953798, "loss": 3.5642, "step": 2835 }, { "epoch": 0.19296100013588802, "grad_norm": 1.7502989768981934, "learning_rate": 0.0009758968609865471, "loss": 3.6501, "step": 2840 }, { "epoch": 0.19330072020654981, "grad_norm": 1.2248778343200684, "learning_rate": 0.0009758543959777144, "loss": 3.7317, "step": 2845 }, { "epoch": 0.19364044027721158, "grad_norm": 1.273302674293518, "learning_rate": 0.0009758119309688817, "loss": 3.6715, "step": 2850 }, { "epoch": 0.19398016034787335, "grad_norm": 1.6813809871673584, "learning_rate": 0.0009757694659600489, "loss": 3.7491, "step": 2855 }, { "epoch": 0.19431988041853512, "grad_norm": 1.5744856595993042, "learning_rate": 0.0009757270009512162, "loss": 3.5999, "step": 2860 }, { "epoch": 0.1946596004891969, "grad_norm": 2.306447982788086, "learning_rate": 0.0009756845359423835, "loss": 3.6704, "step": 2865 }, { "epoch": 0.19499932055985866, "grad_norm": 1.8247658014297485, "learning_rate": 0.0009756420709335507, "loss": 3.5323, "step": 2870 }, { "epoch": 0.19533904063052046, "grad_norm": 1.8864924907684326, "learning_rate": 0.0009755996059247181, "loss": 3.5689, "step": 2875 }, { "epoch": 0.19567876070118223, "grad_norm": 1.6240260601043701, "learning_rate": 0.0009755571409158853, "loss": 3.7785, "step": 2880 }, { "epoch": 0.196018480771844, "grad_norm": 1.6396430730819702, "learning_rate": 0.0009755146759070526, "loss": 3.6095, "step": 2885 }, { "epoch": 0.19635820084250577, "grad_norm": 1.3351521492004395, "learning_rate": 0.0009754722108982199, "loss": 3.045, "step": 2890 }, { "epoch": 0.19669792091316754, "grad_norm": 2.159721851348877, "learning_rate": 0.0009754297458893871, "loss": 3.3302, "step": 2895 }, { "epoch": 0.19703764098382934, "grad_norm": 1.9599130153656006, "learning_rate": 0.0009753872808805544, "loss": 3.6946, "step": 2900 }, { "epoch": 0.1973773610544911, "grad_norm": 2.012549877166748, "learning_rate": 0.0009753448158717217, "loss": 3.4619, "step": 2905 }, { "epoch": 0.19771708112515288, "grad_norm": 2.1888575553894043, "learning_rate": 0.000975302350862889, "loss": 3.6465, "step": 2910 }, { "epoch": 0.19805680119581465, "grad_norm": 1.7850854396820068, "learning_rate": 0.0009752598858540563, "loss": 3.6091, "step": 2915 }, { "epoch": 0.19839652126647642, "grad_norm": 1.5179373025894165, "learning_rate": 0.0009752174208452236, "loss": 3.637, "step": 2920 }, { "epoch": 0.1987362413371382, "grad_norm": 1.4729807376861572, "learning_rate": 0.0009751749558363908, "loss": 3.5974, "step": 2925 }, { "epoch": 0.19907596140779998, "grad_norm": 1.926791787147522, "learning_rate": 0.000975132490827558, "loss": 3.4231, "step": 2930 }, { "epoch": 0.19941568147846175, "grad_norm": 1.7324408292770386, "learning_rate": 0.0009750900258187254, "loss": 3.7765, "step": 2935 }, { "epoch": 0.19975540154912352, "grad_norm": 1.963240623474121, "learning_rate": 0.0009750475608098926, "loss": 3.3694, "step": 2940 }, { "epoch": 0.2000951216197853, "grad_norm": 1.822563648223877, "learning_rate": 0.0009750050958010599, "loss": 3.7174, "step": 2945 }, { "epoch": 0.20043484169044706, "grad_norm": 1.7617613077163696, "learning_rate": 0.0009749626307922273, "loss": 3.7488, "step": 2950 }, { "epoch": 0.20077456176110883, "grad_norm": 1.4578311443328857, "learning_rate": 0.0009749201657833945, "loss": 3.942, "step": 2955 }, { "epoch": 0.20111428183177063, "grad_norm": 1.9655464887619019, "learning_rate": 0.0009748777007745617, "loss": 3.7811, "step": 2960 }, { "epoch": 0.2014540019024324, "grad_norm": 1.3440335988998413, "learning_rate": 0.0009748352357657291, "loss": 3.6012, "step": 2965 }, { "epoch": 0.20179372197309417, "grad_norm": 1.5009187459945679, "learning_rate": 0.0009747927707568963, "loss": 3.6189, "step": 2970 }, { "epoch": 0.20213344204375594, "grad_norm": 1.7840576171875, "learning_rate": 0.0009747503057480636, "loss": 3.5828, "step": 2975 }, { "epoch": 0.2024731621144177, "grad_norm": 1.6678260564804077, "learning_rate": 0.000974707840739231, "loss": 3.5884, "step": 2980 }, { "epoch": 0.2028128821850795, "grad_norm": 1.855973720550537, "learning_rate": 0.0009746653757303982, "loss": 3.4608, "step": 2985 }, { "epoch": 0.20315260225574128, "grad_norm": 1.2886548042297363, "learning_rate": 0.0009746229107215655, "loss": 3.5846, "step": 2990 }, { "epoch": 0.20349232232640305, "grad_norm": 2.264889717102051, "learning_rate": 0.0009745804457127327, "loss": 3.7039, "step": 2995 }, { "epoch": 0.20383204239706482, "grad_norm": 1.2245405912399292, "learning_rate": 0.0009745379807039, "loss": 3.569, "step": 3000 }, { "epoch": 0.2041717624677266, "grad_norm": 2.050222635269165, "learning_rate": 0.0009744955156950673, "loss": 3.6731, "step": 3005 }, { "epoch": 0.20451148253838836, "grad_norm": 1.6480886936187744, "learning_rate": 0.0009744530506862345, "loss": 3.7254, "step": 3010 }, { "epoch": 0.20485120260905015, "grad_norm": 1.6805343627929688, "learning_rate": 0.0009744105856774019, "loss": 3.729, "step": 3015 }, { "epoch": 0.20519092267971192, "grad_norm": 1.5769882202148438, "learning_rate": 0.0009743681206685692, "loss": 3.6959, "step": 3020 }, { "epoch": 0.2055306427503737, "grad_norm": 1.5858368873596191, "learning_rate": 0.0009743256556597364, "loss": 3.6977, "step": 3025 }, { "epoch": 0.20587036282103546, "grad_norm": 1.8833190202713013, "learning_rate": 0.0009742831906509036, "loss": 3.6785, "step": 3030 }, { "epoch": 0.20621008289169723, "grad_norm": 1.3727554082870483, "learning_rate": 0.000974240725642071, "loss": 3.573, "step": 3035 }, { "epoch": 0.206549802962359, "grad_norm": 1.8456344604492188, "learning_rate": 0.0009741982606332382, "loss": 3.6588, "step": 3040 }, { "epoch": 0.2068895230330208, "grad_norm": 1.7069212198257446, "learning_rate": 0.0009741557956244054, "loss": 3.5512, "step": 3045 }, { "epoch": 0.20722924310368257, "grad_norm": 1.581652045249939, "learning_rate": 0.0009741133306155729, "loss": 3.6082, "step": 3050 }, { "epoch": 0.20756896317434434, "grad_norm": 1.7028703689575195, "learning_rate": 0.0009740708656067401, "loss": 3.4206, "step": 3055 }, { "epoch": 0.2079086832450061, "grad_norm": 1.990170955657959, "learning_rate": 0.0009740284005979073, "loss": 3.643, "step": 3060 }, { "epoch": 0.20824840331566788, "grad_norm": 1.3313696384429932, "learning_rate": 0.0009739859355890747, "loss": 3.4865, "step": 3065 }, { "epoch": 0.20858812338632968, "grad_norm": 1.5822806358337402, "learning_rate": 0.0009739434705802419, "loss": 3.7569, "step": 3070 }, { "epoch": 0.20892784345699145, "grad_norm": 1.3438655138015747, "learning_rate": 0.0009739010055714091, "loss": 3.4517, "step": 3075 }, { "epoch": 0.20926756352765322, "grad_norm": 1.9917546510696411, "learning_rate": 0.0009738585405625766, "loss": 3.503, "step": 3080 }, { "epoch": 0.209607283598315, "grad_norm": 1.930161476135254, "learning_rate": 0.0009738160755537438, "loss": 3.8194, "step": 3085 }, { "epoch": 0.20994700366897676, "grad_norm": 2.0597267150878906, "learning_rate": 0.000973773610544911, "loss": 3.6311, "step": 3090 }, { "epoch": 0.21028672373963853, "grad_norm": 1.6020134687423706, "learning_rate": 0.0009737311455360783, "loss": 3.7384, "step": 3095 }, { "epoch": 0.21062644381030032, "grad_norm": 1.7552719116210938, "learning_rate": 0.0009736886805272456, "loss": 3.8468, "step": 3100 }, { "epoch": 0.2109661638809621, "grad_norm": 2.1212337017059326, "learning_rate": 0.0009736462155184128, "loss": 3.624, "step": 3105 }, { "epoch": 0.21130588395162386, "grad_norm": 1.7892045974731445, "learning_rate": 0.0009736037505095801, "loss": 3.8513, "step": 3110 }, { "epoch": 0.21164560402228563, "grad_norm": 1.4878493547439575, "learning_rate": 0.0009735612855007475, "loss": 3.6569, "step": 3115 }, { "epoch": 0.2119853240929474, "grad_norm": 2.761162281036377, "learning_rate": 0.0009735188204919147, "loss": 3.7429, "step": 3120 }, { "epoch": 0.21232504416360917, "grad_norm": 1.8270341157913208, "learning_rate": 0.000973476355483082, "loss": 3.3809, "step": 3125 }, { "epoch": 0.21266476423427097, "grad_norm": 1.8215960264205933, "learning_rate": 0.0009734338904742492, "loss": 3.8046, "step": 3130 }, { "epoch": 0.21300448430493274, "grad_norm": 1.78193199634552, "learning_rate": 0.0009733914254654165, "loss": 3.6578, "step": 3135 }, { "epoch": 0.2133442043755945, "grad_norm": 1.4396066665649414, "learning_rate": 0.0009733489604565838, "loss": 3.5976, "step": 3140 }, { "epoch": 0.21368392444625628, "grad_norm": 1.778668761253357, "learning_rate": 0.000973306495447751, "loss": 3.707, "step": 3145 }, { "epoch": 0.21402364451691805, "grad_norm": 1.567152500152588, "learning_rate": 0.0009732640304389184, "loss": 3.3303, "step": 3150 }, { "epoch": 0.21436336458757985, "grad_norm": 1.894097089767456, "learning_rate": 0.0009732215654300857, "loss": 3.6451, "step": 3155 }, { "epoch": 0.21470308465824162, "grad_norm": 1.6708694696426392, "learning_rate": 0.0009731791004212529, "loss": 3.6178, "step": 3160 }, { "epoch": 0.2150428047289034, "grad_norm": 1.2481448650360107, "learning_rate": 0.0009731366354124202, "loss": 3.6078, "step": 3165 }, { "epoch": 0.21538252479956516, "grad_norm": 2.525425910949707, "learning_rate": 0.0009730941704035875, "loss": 3.5082, "step": 3170 }, { "epoch": 0.21572224487022693, "grad_norm": 1.5190715789794922, "learning_rate": 0.0009730517053947547, "loss": 3.6035, "step": 3175 }, { "epoch": 0.2160619649408887, "grad_norm": 1.4746878147125244, "learning_rate": 0.0009730092403859219, "loss": 3.5659, "step": 3180 }, { "epoch": 0.2164016850115505, "grad_norm": 1.73853600025177, "learning_rate": 0.0009729667753770894, "loss": 3.6335, "step": 3185 }, { "epoch": 0.21674140508221226, "grad_norm": 2.0687599182128906, "learning_rate": 0.0009729243103682566, "loss": 3.8954, "step": 3190 }, { "epoch": 0.21708112515287403, "grad_norm": 1.5243850946426392, "learning_rate": 0.0009728818453594238, "loss": 3.5053, "step": 3195 }, { "epoch": 0.2174208452235358, "grad_norm": 1.7339084148406982, "learning_rate": 0.0009728393803505912, "loss": 3.6778, "step": 3200 }, { "epoch": 0.21776056529419757, "grad_norm": 1.9418963193893433, "learning_rate": 0.0009727969153417584, "loss": 3.9515, "step": 3205 }, { "epoch": 0.21810028536485934, "grad_norm": 1.7466559410095215, "learning_rate": 0.0009727544503329256, "loss": 3.7576, "step": 3210 }, { "epoch": 0.21844000543552114, "grad_norm": 1.608738899230957, "learning_rate": 0.000972711985324093, "loss": 3.7112, "step": 3215 }, { "epoch": 0.2187797255061829, "grad_norm": 1.7004413604736328, "learning_rate": 0.0009726695203152603, "loss": 3.6095, "step": 3220 }, { "epoch": 0.21911944557684468, "grad_norm": 1.7089710235595703, "learning_rate": 0.0009726270553064275, "loss": 3.499, "step": 3225 }, { "epoch": 0.21945916564750645, "grad_norm": 1.5873345136642456, "learning_rate": 0.0009725845902975948, "loss": 3.681, "step": 3230 }, { "epoch": 0.21979888571816822, "grad_norm": 1.5240744352340698, "learning_rate": 0.0009725421252887621, "loss": 3.5551, "step": 3235 }, { "epoch": 0.22013860578883002, "grad_norm": 1.682077407836914, "learning_rate": 0.0009724996602799293, "loss": 3.5778, "step": 3240 }, { "epoch": 0.22047832585949179, "grad_norm": 1.5575213432312012, "learning_rate": 0.0009724571952710966, "loss": 3.5494, "step": 3245 }, { "epoch": 0.22081804593015356, "grad_norm": 1.7041542530059814, "learning_rate": 0.0009724147302622639, "loss": 3.6803, "step": 3250 }, { "epoch": 0.22115776600081533, "grad_norm": 1.4244800806045532, "learning_rate": 0.0009723722652534312, "loss": 3.5464, "step": 3255 }, { "epoch": 0.2214974860714771, "grad_norm": 1.8236292600631714, "learning_rate": 0.0009723298002445985, "loss": 3.6312, "step": 3260 }, { "epoch": 0.22183720614213887, "grad_norm": 1.419805645942688, "learning_rate": 0.0009722873352357658, "loss": 3.6268, "step": 3265 }, { "epoch": 0.22217692621280066, "grad_norm": 2.0565903186798096, "learning_rate": 0.000972244870226933, "loss": 3.7162, "step": 3270 }, { "epoch": 0.22251664628346243, "grad_norm": 1.7434332370758057, "learning_rate": 0.0009722024052181003, "loss": 3.7761, "step": 3275 }, { "epoch": 0.2228563663541242, "grad_norm": 1.362023949623108, "learning_rate": 0.0009721599402092675, "loss": 3.438, "step": 3280 }, { "epoch": 0.22319608642478597, "grad_norm": 2.182844877243042, "learning_rate": 0.0009721174752004348, "loss": 3.879, "step": 3285 }, { "epoch": 0.22353580649544774, "grad_norm": 2.4993979930877686, "learning_rate": 0.0009720750101916022, "loss": 3.5805, "step": 3290 }, { "epoch": 0.22387552656610954, "grad_norm": 1.8630638122558594, "learning_rate": 0.0009720325451827694, "loss": 3.373, "step": 3295 }, { "epoch": 0.2242152466367713, "grad_norm": 1.6448603868484497, "learning_rate": 0.0009719900801739367, "loss": 3.5075, "step": 3300 }, { "epoch": 0.22455496670743308, "grad_norm": 1.551476001739502, "learning_rate": 0.000971947615165104, "loss": 3.8181, "step": 3305 }, { "epoch": 0.22489468677809485, "grad_norm": 1.7270959615707397, "learning_rate": 0.0009719051501562712, "loss": 3.2645, "step": 3310 }, { "epoch": 0.22523440684875662, "grad_norm": 1.867315411567688, "learning_rate": 0.0009718626851474386, "loss": 3.5005, "step": 3315 }, { "epoch": 0.2255741269194184, "grad_norm": 1.6810345649719238, "learning_rate": 0.0009718202201386058, "loss": 3.5567, "step": 3320 }, { "epoch": 0.22591384699008019, "grad_norm": 1.692305564880371, "learning_rate": 0.0009717777551297731, "loss": 3.493, "step": 3325 }, { "epoch": 0.22625356706074196, "grad_norm": 1.4942119121551514, "learning_rate": 0.0009717352901209404, "loss": 3.717, "step": 3330 }, { "epoch": 0.22659328713140373, "grad_norm": 1.3117021322250366, "learning_rate": 0.0009716928251121077, "loss": 3.4913, "step": 3335 }, { "epoch": 0.2269330072020655, "grad_norm": 3.075709819793701, "learning_rate": 0.0009716503601032749, "loss": 3.4442, "step": 3340 }, { "epoch": 0.22727272727272727, "grad_norm": 1.6416103839874268, "learning_rate": 0.0009716078950944422, "loss": 3.6626, "step": 3345 }, { "epoch": 0.22761244734338903, "grad_norm": 1.5191508531570435, "learning_rate": 0.0009715654300856095, "loss": 3.7117, "step": 3350 }, { "epoch": 0.22795216741405083, "grad_norm": 1.558527946472168, "learning_rate": 0.0009715229650767767, "loss": 3.5951, "step": 3355 }, { "epoch": 0.2282918874847126, "grad_norm": 1.3513563871383667, "learning_rate": 0.0009714805000679441, "loss": 3.6298, "step": 3360 }, { "epoch": 0.22863160755537437, "grad_norm": 1.572306752204895, "learning_rate": 0.0009714380350591114, "loss": 3.7857, "step": 3365 }, { "epoch": 0.22897132762603614, "grad_norm": 2.4153125286102295, "learning_rate": 0.0009713955700502786, "loss": 3.6556, "step": 3370 }, { "epoch": 0.2293110476966979, "grad_norm": 1.5385267734527588, "learning_rate": 0.0009713531050414459, "loss": 3.7016, "step": 3375 }, { "epoch": 0.2296507677673597, "grad_norm": 2.1543445587158203, "learning_rate": 0.0009713106400326131, "loss": 3.6415, "step": 3380 }, { "epoch": 0.22999048783802148, "grad_norm": 1.5918980836868286, "learning_rate": 0.0009712681750237804, "loss": 3.6128, "step": 3385 }, { "epoch": 0.23033020790868325, "grad_norm": 1.4948588609695435, "learning_rate": 0.0009712257100149477, "loss": 3.5352, "step": 3390 }, { "epoch": 0.23066992797934502, "grad_norm": 1.594613790512085, "learning_rate": 0.000971183245006115, "loss": 3.6577, "step": 3395 }, { "epoch": 0.2310096480500068, "grad_norm": 2.06317138671875, "learning_rate": 0.0009711407799972823, "loss": 3.5601, "step": 3400 }, { "epoch": 0.23134936812066856, "grad_norm": 1.711429238319397, "learning_rate": 0.0009710983149884496, "loss": 3.7124, "step": 3405 }, { "epoch": 0.23168908819133036, "grad_norm": 1.6341688632965088, "learning_rate": 0.0009710558499796168, "loss": 3.6318, "step": 3410 }, { "epoch": 0.23202880826199213, "grad_norm": 1.93048095703125, "learning_rate": 0.000971013384970784, "loss": 3.7056, "step": 3415 }, { "epoch": 0.2323685283326539, "grad_norm": 1.3146165609359741, "learning_rate": 0.0009709709199619514, "loss": 3.6826, "step": 3420 }, { "epoch": 0.23270824840331567, "grad_norm": 1.7832953929901123, "learning_rate": 0.0009709284549531186, "loss": 3.7418, "step": 3425 }, { "epoch": 0.23304796847397743, "grad_norm": 1.7903310060501099, "learning_rate": 0.0009708859899442859, "loss": 3.5686, "step": 3430 }, { "epoch": 0.2333876885446392, "grad_norm": 1.2633881568908691, "learning_rate": 0.0009708435249354533, "loss": 3.711, "step": 3435 }, { "epoch": 0.233727408615301, "grad_norm": 1.9596319198608398, "learning_rate": 0.0009708010599266205, "loss": 3.4723, "step": 3440 }, { "epoch": 0.23406712868596277, "grad_norm": 2.3950085639953613, "learning_rate": 0.0009707585949177877, "loss": 3.5341, "step": 3445 }, { "epoch": 0.23440684875662454, "grad_norm": 1.6751304864883423, "learning_rate": 0.0009707161299089551, "loss": 3.5013, "step": 3450 }, { "epoch": 0.2347465688272863, "grad_norm": 1.9356188774108887, "learning_rate": 0.0009706736649001223, "loss": 3.688, "step": 3455 }, { "epoch": 0.23508628889794808, "grad_norm": 1.8382556438446045, "learning_rate": 0.0009706311998912895, "loss": 3.6246, "step": 3460 }, { "epoch": 0.23542600896860988, "grad_norm": 1.4776370525360107, "learning_rate": 0.000970588734882457, "loss": 3.7805, "step": 3465 }, { "epoch": 0.23576572903927165, "grad_norm": 1.7259351015090942, "learning_rate": 0.0009705462698736242, "loss": 3.5107, "step": 3470 }, { "epoch": 0.23610544910993342, "grad_norm": 1.8737868070602417, "learning_rate": 0.0009705038048647914, "loss": 3.5869, "step": 3475 }, { "epoch": 0.2364451691805952, "grad_norm": 2.066587448120117, "learning_rate": 0.0009704613398559587, "loss": 3.7153, "step": 3480 }, { "epoch": 0.23678488925125696, "grad_norm": 1.7350093126296997, "learning_rate": 0.000970418874847126, "loss": 3.7077, "step": 3485 }, { "epoch": 0.23712460932191873, "grad_norm": 1.527101755142212, "learning_rate": 0.0009703764098382932, "loss": 3.4359, "step": 3490 }, { "epoch": 0.23746432939258053, "grad_norm": 1.4248772859573364, "learning_rate": 0.0009703339448294605, "loss": 3.5128, "step": 3495 }, { "epoch": 0.2378040494632423, "grad_norm": 1.8192975521087646, "learning_rate": 0.0009702914798206279, "loss": 3.6664, "step": 3500 }, { "epoch": 0.23814376953390406, "grad_norm": 1.910526990890503, "learning_rate": 0.0009702490148117951, "loss": 3.7242, "step": 3505 }, { "epoch": 0.23848348960456583, "grad_norm": 2.245896816253662, "learning_rate": 0.0009702065498029624, "loss": 3.553, "step": 3510 }, { "epoch": 0.2388232096752276, "grad_norm": 1.6178159713745117, "learning_rate": 0.0009701640847941296, "loss": 3.6349, "step": 3515 }, { "epoch": 0.23916292974588937, "grad_norm": 1.7230315208435059, "learning_rate": 0.0009701216197852969, "loss": 3.6099, "step": 3520 }, { "epoch": 0.23950264981655117, "grad_norm": 1.4561729431152344, "learning_rate": 0.0009700791547764642, "loss": 3.6646, "step": 3525 }, { "epoch": 0.23984236988721294, "grad_norm": 1.7546006441116333, "learning_rate": 0.0009700366897676314, "loss": 3.6292, "step": 3530 }, { "epoch": 0.2401820899578747, "grad_norm": 1.970445990562439, "learning_rate": 0.0009699942247587988, "loss": 3.427, "step": 3535 }, { "epoch": 0.24052181002853648, "grad_norm": 1.726250171661377, "learning_rate": 0.0009699517597499661, "loss": 3.6239, "step": 3540 }, { "epoch": 0.24086153009919825, "grad_norm": 1.9119539260864258, "learning_rate": 0.0009699092947411333, "loss": 3.731, "step": 3545 }, { "epoch": 0.24120125016986005, "grad_norm": 2.2245447635650635, "learning_rate": 0.0009698668297323006, "loss": 3.8043, "step": 3550 }, { "epoch": 0.24154097024052182, "grad_norm": 1.6345648765563965, "learning_rate": 0.0009698243647234679, "loss": 3.7873, "step": 3555 }, { "epoch": 0.2418806903111836, "grad_norm": 2.1006290912628174, "learning_rate": 0.0009697818997146351, "loss": 3.5648, "step": 3560 }, { "epoch": 0.24222041038184536, "grad_norm": 1.5427998304367065, "learning_rate": 0.0009697394347058023, "loss": 3.5496, "step": 3565 }, { "epoch": 0.24256013045250713, "grad_norm": 2.069324254989624, "learning_rate": 0.0009696969696969698, "loss": 3.458, "step": 3570 }, { "epoch": 0.2428998505231689, "grad_norm": 1.586151361465454, "learning_rate": 0.000969654504688137, "loss": 3.7314, "step": 3575 }, { "epoch": 0.2432395705938307, "grad_norm": 1.575217843055725, "learning_rate": 0.0009696120396793042, "loss": 3.7173, "step": 3580 }, { "epoch": 0.24357929066449246, "grad_norm": 1.4543596506118774, "learning_rate": 0.0009695695746704716, "loss": 3.7505, "step": 3585 }, { "epoch": 0.24391901073515423, "grad_norm": 1.7749935388565063, "learning_rate": 0.0009695271096616388, "loss": 3.3979, "step": 3590 }, { "epoch": 0.244258730805816, "grad_norm": 1.8667887449264526, "learning_rate": 0.000969484644652806, "loss": 3.5816, "step": 3595 }, { "epoch": 0.24459845087647777, "grad_norm": 1.6694843769073486, "learning_rate": 0.0009694421796439735, "loss": 3.5191, "step": 3600 }, { "epoch": 0.24493817094713954, "grad_norm": 1.828382968902588, "learning_rate": 0.0009693997146351407, "loss": 3.6015, "step": 3605 }, { "epoch": 0.24527789101780134, "grad_norm": 2.1065683364868164, "learning_rate": 0.0009693572496263079, "loss": 3.6825, "step": 3610 }, { "epoch": 0.2456176110884631, "grad_norm": 1.4302717447280884, "learning_rate": 0.0009693147846174752, "loss": 3.7965, "step": 3615 }, { "epoch": 0.24595733115912488, "grad_norm": 1.5248844623565674, "learning_rate": 0.0009692723196086425, "loss": 3.4003, "step": 3620 }, { "epoch": 0.24629705122978665, "grad_norm": 2.3446950912475586, "learning_rate": 0.0009692298545998097, "loss": 3.6161, "step": 3625 }, { "epoch": 0.24663677130044842, "grad_norm": 1.678837776184082, "learning_rate": 0.000969187389590977, "loss": 3.5703, "step": 3630 }, { "epoch": 0.24697649137111022, "grad_norm": 1.9363974332809448, "learning_rate": 0.0009691449245821444, "loss": 3.7458, "step": 3635 }, { "epoch": 0.247316211441772, "grad_norm": 1.5760730504989624, "learning_rate": 0.0009691024595733116, "loss": 3.7657, "step": 3640 }, { "epoch": 0.24765593151243376, "grad_norm": 1.7256523370742798, "learning_rate": 0.0009690599945644789, "loss": 3.7871, "step": 3645 }, { "epoch": 0.24799565158309553, "grad_norm": 1.6078699827194214, "learning_rate": 0.0009690175295556462, "loss": 3.6317, "step": 3650 }, { "epoch": 0.2483353716537573, "grad_norm": 1.8344146013259888, "learning_rate": 0.0009689750645468135, "loss": 3.4139, "step": 3655 }, { "epoch": 0.24867509172441907, "grad_norm": 1.7905937433242798, "learning_rate": 0.0009689325995379807, "loss": 3.5409, "step": 3660 }, { "epoch": 0.24901481179508086, "grad_norm": 1.5891785621643066, "learning_rate": 0.0009688901345291479, "loss": 3.4363, "step": 3665 }, { "epoch": 0.24935453186574263, "grad_norm": 1.8572617769241333, "learning_rate": 0.0009688476695203154, "loss": 3.7486, "step": 3670 }, { "epoch": 0.2496942519364044, "grad_norm": 2.162363052368164, "learning_rate": 0.0009688052045114826, "loss": 3.9067, "step": 3675 }, { "epoch": 0.2500339720070662, "grad_norm": 1.6457287073135376, "learning_rate": 0.0009687627395026498, "loss": 3.6501, "step": 3680 }, { "epoch": 0.25037369207772797, "grad_norm": 1.724884271621704, "learning_rate": 0.0009687202744938172, "loss": 3.6146, "step": 3685 }, { "epoch": 0.2507134121483897, "grad_norm": 1.5351073741912842, "learning_rate": 0.0009686778094849844, "loss": 3.5878, "step": 3690 }, { "epoch": 0.2510531322190515, "grad_norm": 1.5459001064300537, "learning_rate": 0.0009686353444761516, "loss": 3.7147, "step": 3695 }, { "epoch": 0.25139285228971325, "grad_norm": 2.0692145824432373, "learning_rate": 0.000968592879467319, "loss": 3.7617, "step": 3700 }, { "epoch": 0.25173257236037505, "grad_norm": 1.5834602117538452, "learning_rate": 0.0009685504144584863, "loss": 3.6386, "step": 3705 }, { "epoch": 0.25207229243103685, "grad_norm": 1.8701739311218262, "learning_rate": 0.0009685079494496535, "loss": 3.6181, "step": 3710 }, { "epoch": 0.2524120125016986, "grad_norm": 2.139824867248535, "learning_rate": 0.0009684654844408209, "loss": 3.5964, "step": 3715 }, { "epoch": 0.2527517325723604, "grad_norm": 1.3451875448226929, "learning_rate": 0.0009684230194319881, "loss": 3.6033, "step": 3720 }, { "epoch": 0.25309145264302213, "grad_norm": 1.3682301044464111, "learning_rate": 0.0009683805544231553, "loss": 3.5994, "step": 3725 }, { "epoch": 0.2534311727136839, "grad_norm": 2.1373167037963867, "learning_rate": 0.0009683380894143226, "loss": 3.8574, "step": 3730 }, { "epoch": 0.2537708927843457, "grad_norm": 1.8511145114898682, "learning_rate": 0.0009682956244054899, "loss": 3.5804, "step": 3735 }, { "epoch": 0.25411061285500747, "grad_norm": 1.7723479270935059, "learning_rate": 0.0009682531593966572, "loss": 3.6533, "step": 3740 }, { "epoch": 0.25445033292566926, "grad_norm": 2.0140340328216553, "learning_rate": 0.0009682106943878245, "loss": 3.5062, "step": 3745 }, { "epoch": 0.254790052996331, "grad_norm": 1.5292110443115234, "learning_rate": 0.0009681682293789918, "loss": 3.3989, "step": 3750 }, { "epoch": 0.2551297730669928, "grad_norm": 1.9412097930908203, "learning_rate": 0.000968125764370159, "loss": 3.79, "step": 3755 }, { "epoch": 0.25546949313765455, "grad_norm": 1.786798357963562, "learning_rate": 0.0009680832993613263, "loss": 3.5993, "step": 3760 }, { "epoch": 0.25580921320831634, "grad_norm": 1.4246013164520264, "learning_rate": 0.0009680408343524935, "loss": 3.5159, "step": 3765 }, { "epoch": 0.25614893327897814, "grad_norm": 1.8703291416168213, "learning_rate": 0.0009679983693436608, "loss": 3.5863, "step": 3770 }, { "epoch": 0.2564886533496399, "grad_norm": 1.4903159141540527, "learning_rate": 0.0009679559043348282, "loss": 3.5354, "step": 3775 }, { "epoch": 0.2568283734203017, "grad_norm": 1.869640588760376, "learning_rate": 0.0009679134393259954, "loss": 3.7511, "step": 3780 }, { "epoch": 0.2571680934909634, "grad_norm": 1.5485059022903442, "learning_rate": 0.0009678709743171627, "loss": 3.6186, "step": 3785 }, { "epoch": 0.2575078135616252, "grad_norm": 1.626159429550171, "learning_rate": 0.00096782850930833, "loss": 3.7843, "step": 3790 }, { "epoch": 0.257847533632287, "grad_norm": 1.839573860168457, "learning_rate": 0.0009677860442994972, "loss": 3.5739, "step": 3795 }, { "epoch": 0.25818725370294876, "grad_norm": 1.5869240760803223, "learning_rate": 0.0009677435792906644, "loss": 3.522, "step": 3800 }, { "epoch": 0.25852697377361056, "grad_norm": 2.1464288234710693, "learning_rate": 0.0009677011142818318, "loss": 3.5965, "step": 3805 }, { "epoch": 0.2588666938442723, "grad_norm": 2.306097984313965, "learning_rate": 0.0009676586492729991, "loss": 3.5387, "step": 3810 }, { "epoch": 0.2592064139149341, "grad_norm": 1.5503933429718018, "learning_rate": 0.0009676161842641663, "loss": 3.5844, "step": 3815 }, { "epoch": 0.2595461339855959, "grad_norm": 2.036761522293091, "learning_rate": 0.0009675737192553337, "loss": 3.4397, "step": 3820 }, { "epoch": 0.25988585405625764, "grad_norm": 1.7309776544570923, "learning_rate": 0.0009675312542465009, "loss": 3.7255, "step": 3825 }, { "epoch": 0.26022557412691943, "grad_norm": 2.1113598346710205, "learning_rate": 0.0009674887892376681, "loss": 3.4569, "step": 3830 }, { "epoch": 0.2605652941975812, "grad_norm": 1.7755528688430786, "learning_rate": 0.0009674463242288355, "loss": 3.5644, "step": 3835 }, { "epoch": 0.260905014268243, "grad_norm": 1.6741244792938232, "learning_rate": 0.0009674038592200027, "loss": 3.6285, "step": 3840 }, { "epoch": 0.2612447343389047, "grad_norm": 1.5721482038497925, "learning_rate": 0.00096736139421117, "loss": 3.8229, "step": 3845 }, { "epoch": 0.2615844544095665, "grad_norm": 1.9812613725662231, "learning_rate": 0.0009673189292023374, "loss": 3.7272, "step": 3850 }, { "epoch": 0.2619241744802283, "grad_norm": 1.9409090280532837, "learning_rate": 0.0009672764641935046, "loss": 3.482, "step": 3855 }, { "epoch": 0.26226389455089005, "grad_norm": 2.186992645263672, "learning_rate": 0.0009672339991846718, "loss": 3.6958, "step": 3860 }, { "epoch": 0.26260361462155185, "grad_norm": 1.9167529344558716, "learning_rate": 0.0009671915341758391, "loss": 3.5717, "step": 3865 }, { "epoch": 0.2629433346922136, "grad_norm": 1.4346433877944946, "learning_rate": 0.0009671490691670064, "loss": 3.4325, "step": 3870 }, { "epoch": 0.2632830547628754, "grad_norm": 1.8077160120010376, "learning_rate": 0.0009671066041581736, "loss": 3.7566, "step": 3875 }, { "epoch": 0.2636227748335372, "grad_norm": 1.5360581874847412, "learning_rate": 0.000967064139149341, "loss": 3.716, "step": 3880 }, { "epoch": 0.26396249490419893, "grad_norm": 1.7265392541885376, "learning_rate": 0.0009670216741405083, "loss": 3.6937, "step": 3885 }, { "epoch": 0.2643022149748607, "grad_norm": 1.432693362236023, "learning_rate": 0.0009669792091316755, "loss": 3.6952, "step": 3890 }, { "epoch": 0.26464193504552247, "grad_norm": 2.05922269821167, "learning_rate": 0.0009669367441228428, "loss": 3.7013, "step": 3895 }, { "epoch": 0.26498165511618427, "grad_norm": 1.7383133172988892, "learning_rate": 0.00096689427911401, "loss": 3.4831, "step": 3900 }, { "epoch": 0.26532137518684606, "grad_norm": 2.1428592205047607, "learning_rate": 0.0009668518141051773, "loss": 3.9576, "step": 3905 }, { "epoch": 0.2656610952575078, "grad_norm": 1.999027967453003, "learning_rate": 0.0009668093490963446, "loss": 3.4684, "step": 3910 }, { "epoch": 0.2660008153281696, "grad_norm": 1.9469701051712036, "learning_rate": 0.0009667668840875119, "loss": 3.8283, "step": 3915 }, { "epoch": 0.26634053539883135, "grad_norm": 1.9492524862289429, "learning_rate": 0.0009667244190786792, "loss": 3.7456, "step": 3920 }, { "epoch": 0.26668025546949314, "grad_norm": 1.8076868057250977, "learning_rate": 0.0009666819540698465, "loss": 3.6567, "step": 3925 }, { "epoch": 0.2670199755401549, "grad_norm": 1.6729258298873901, "learning_rate": 0.0009666394890610137, "loss": 3.4418, "step": 3930 }, { "epoch": 0.2673596956108167, "grad_norm": 1.61036217212677, "learning_rate": 0.000966597024052181, "loss": 3.8201, "step": 3935 }, { "epoch": 0.2676994156814785, "grad_norm": 1.403557538986206, "learning_rate": 0.0009665545590433483, "loss": 3.7321, "step": 3940 }, { "epoch": 0.2680391357521402, "grad_norm": 1.9819769859313965, "learning_rate": 0.0009665120940345155, "loss": 3.6179, "step": 3945 }, { "epoch": 0.268378855822802, "grad_norm": 1.6981037855148315, "learning_rate": 0.0009664696290256829, "loss": 3.8366, "step": 3950 }, { "epoch": 0.26871857589346376, "grad_norm": 1.8278923034667969, "learning_rate": 0.0009664271640168502, "loss": 3.4319, "step": 3955 }, { "epoch": 0.26905829596412556, "grad_norm": 1.5373165607452393, "learning_rate": 0.0009663846990080174, "loss": 3.701, "step": 3960 }, { "epoch": 0.26939801603478736, "grad_norm": 1.8037666082382202, "learning_rate": 0.0009663422339991846, "loss": 3.8431, "step": 3965 }, { "epoch": 0.2697377361054491, "grad_norm": 1.696169137954712, "learning_rate": 0.000966299768990352, "loss": 3.4934, "step": 3970 }, { "epoch": 0.2700774561761109, "grad_norm": 1.84587824344635, "learning_rate": 0.0009662573039815192, "loss": 3.8646, "step": 3975 }, { "epoch": 0.27041717624677264, "grad_norm": 1.8249062299728394, "learning_rate": 0.0009662148389726864, "loss": 3.5966, "step": 3980 }, { "epoch": 0.27075689631743444, "grad_norm": 1.5445632934570312, "learning_rate": 0.0009661723739638539, "loss": 3.8253, "step": 3985 }, { "epoch": 0.27109661638809623, "grad_norm": 1.4405039548873901, "learning_rate": 0.0009661299089550211, "loss": 3.6326, "step": 3990 }, { "epoch": 0.271436336458758, "grad_norm": 1.6463878154754639, "learning_rate": 0.0009660874439461884, "loss": 3.5708, "step": 3995 }, { "epoch": 0.2717760565294198, "grad_norm": 1.9302623271942139, "learning_rate": 0.0009660449789373557, "loss": 3.9446, "step": 4000 }, { "epoch": 0.2721157766000815, "grad_norm": 1.7145631313323975, "learning_rate": 0.0009660025139285229, "loss": 3.6468, "step": 4005 }, { "epoch": 0.2724554966707433, "grad_norm": 1.4514961242675781, "learning_rate": 0.0009659600489196902, "loss": 3.4958, "step": 4010 }, { "epoch": 0.27279521674140506, "grad_norm": 1.7700506448745728, "learning_rate": 0.0009659175839108574, "loss": 3.8218, "step": 4015 }, { "epoch": 0.27313493681206685, "grad_norm": 1.8147835731506348, "learning_rate": 0.0009658751189020248, "loss": 3.5446, "step": 4020 }, { "epoch": 0.27347465688272865, "grad_norm": 1.6446049213409424, "learning_rate": 0.0009658326538931921, "loss": 3.4946, "step": 4025 }, { "epoch": 0.2738143769533904, "grad_norm": 1.769670844078064, "learning_rate": 0.0009657901888843593, "loss": 3.4033, "step": 4030 }, { "epoch": 0.2741540970240522, "grad_norm": 1.5639005899429321, "learning_rate": 0.0009657477238755266, "loss": 3.6366, "step": 4035 }, { "epoch": 0.27449381709471393, "grad_norm": 1.5217583179473877, "learning_rate": 0.0009657052588666939, "loss": 3.5, "step": 4040 }, { "epoch": 0.27483353716537573, "grad_norm": 2.4929685592651367, "learning_rate": 0.0009656627938578611, "loss": 3.8172, "step": 4045 }, { "epoch": 0.2751732572360375, "grad_norm": 2.0449206829071045, "learning_rate": 0.0009656203288490283, "loss": 3.6627, "step": 4050 }, { "epoch": 0.27551297730669927, "grad_norm": 1.4060708284378052, "learning_rate": 0.0009655778638401958, "loss": 3.6906, "step": 4055 }, { "epoch": 0.27585269737736107, "grad_norm": 2.017481803894043, "learning_rate": 0.000965535398831363, "loss": 3.6342, "step": 4060 }, { "epoch": 0.2761924174480228, "grad_norm": 1.8777480125427246, "learning_rate": 0.0009654929338225302, "loss": 3.564, "step": 4065 }, { "epoch": 0.2765321375186846, "grad_norm": 1.7424068450927734, "learning_rate": 0.0009654504688136976, "loss": 3.5091, "step": 4070 }, { "epoch": 0.2768718575893464, "grad_norm": 1.7502319812774658, "learning_rate": 0.0009654080038048648, "loss": 3.7366, "step": 4075 }, { "epoch": 0.27721157766000815, "grad_norm": 1.6185507774353027, "learning_rate": 0.000965365538796032, "loss": 3.6344, "step": 4080 }, { "epoch": 0.27755129773066994, "grad_norm": 1.7748173475265503, "learning_rate": 0.0009653230737871994, "loss": 3.3857, "step": 4085 }, { "epoch": 0.2778910178013317, "grad_norm": 1.5433307886123657, "learning_rate": 0.0009652806087783667, "loss": 3.6057, "step": 4090 }, { "epoch": 0.2782307378719935, "grad_norm": 1.730972170829773, "learning_rate": 0.0009652381437695339, "loss": 3.6057, "step": 4095 }, { "epoch": 0.2785704579426552, "grad_norm": 1.5919970273971558, "learning_rate": 0.0009651956787607013, "loss": 3.6549, "step": 4100 }, { "epoch": 0.278910178013317, "grad_norm": 1.5249508619308472, "learning_rate": 0.0009651532137518685, "loss": 3.4902, "step": 4105 }, { "epoch": 0.2792498980839788, "grad_norm": 1.5137068033218384, "learning_rate": 0.0009651107487430357, "loss": 3.6551, "step": 4110 }, { "epoch": 0.27958961815464056, "grad_norm": 1.5921529531478882, "learning_rate": 0.000965068283734203, "loss": 3.7007, "step": 4115 }, { "epoch": 0.27992933822530236, "grad_norm": 1.2765605449676514, "learning_rate": 0.0009650258187253703, "loss": 3.6642, "step": 4120 }, { "epoch": 0.2802690582959641, "grad_norm": 1.8880406618118286, "learning_rate": 0.0009649833537165376, "loss": 3.537, "step": 4125 }, { "epoch": 0.2806087783666259, "grad_norm": 2.0080697536468506, "learning_rate": 0.0009649408887077049, "loss": 3.5881, "step": 4130 }, { "epoch": 0.2809484984372877, "grad_norm": 1.4283020496368408, "learning_rate": 0.0009648984236988722, "loss": 3.5138, "step": 4135 }, { "epoch": 0.28128821850794944, "grad_norm": 1.5652830600738525, "learning_rate": 0.0009648559586900394, "loss": 3.5431, "step": 4140 }, { "epoch": 0.28162793857861124, "grad_norm": 1.693231225013733, "learning_rate": 0.0009648134936812067, "loss": 3.5907, "step": 4145 }, { "epoch": 0.281967658649273, "grad_norm": 1.868565320968628, "learning_rate": 0.000964771028672374, "loss": 3.8873, "step": 4150 }, { "epoch": 0.2823073787199348, "grad_norm": 1.5878775119781494, "learning_rate": 0.0009647285636635412, "loss": 3.7668, "step": 4155 }, { "epoch": 0.2826470987905966, "grad_norm": 1.7374308109283447, "learning_rate": 0.0009646860986547086, "loss": 3.6262, "step": 4160 }, { "epoch": 0.2829868188612583, "grad_norm": 1.7633397579193115, "learning_rate": 0.0009646436336458758, "loss": 3.3699, "step": 4165 }, { "epoch": 0.2833265389319201, "grad_norm": 1.4700124263763428, "learning_rate": 0.0009646011686370431, "loss": 3.6163, "step": 4170 }, { "epoch": 0.28366625900258186, "grad_norm": 1.7117177248001099, "learning_rate": 0.0009645587036282104, "loss": 3.6568, "step": 4175 }, { "epoch": 0.28400597907324365, "grad_norm": 1.9400917291641235, "learning_rate": 0.0009645162386193776, "loss": 3.2294, "step": 4180 }, { "epoch": 0.2843456991439054, "grad_norm": 1.7652840614318848, "learning_rate": 0.0009644737736105449, "loss": 3.6948, "step": 4185 }, { "epoch": 0.2846854192145672, "grad_norm": 2.0903868675231934, "learning_rate": 0.0009644313086017123, "loss": 3.4629, "step": 4190 }, { "epoch": 0.285025139285229, "grad_norm": 1.9681673049926758, "learning_rate": 0.0009643888435928795, "loss": 3.6617, "step": 4195 }, { "epoch": 0.28536485935589073, "grad_norm": 1.6500506401062012, "learning_rate": 0.0009643463785840467, "loss": 3.6201, "step": 4200 }, { "epoch": 0.28570457942655253, "grad_norm": 1.4277184009552002, "learning_rate": 0.0009643039135752141, "loss": 3.7806, "step": 4205 }, { "epoch": 0.28604429949721427, "grad_norm": 1.8762825727462769, "learning_rate": 0.0009642614485663813, "loss": 3.6302, "step": 4210 }, { "epoch": 0.28638401956787607, "grad_norm": 1.847631573677063, "learning_rate": 0.0009642189835575485, "loss": 3.7213, "step": 4215 }, { "epoch": 0.28672373963853787, "grad_norm": 1.8317886590957642, "learning_rate": 0.0009641765185487159, "loss": 3.8915, "step": 4220 }, { "epoch": 0.2870634597091996, "grad_norm": 1.966576337814331, "learning_rate": 0.0009641340535398832, "loss": 3.7474, "step": 4225 }, { "epoch": 0.2874031797798614, "grad_norm": 1.6569159030914307, "learning_rate": 0.0009640915885310504, "loss": 3.6776, "step": 4230 }, { "epoch": 0.28774289985052315, "grad_norm": 1.3935604095458984, "learning_rate": 0.0009640491235222178, "loss": 3.4517, "step": 4235 }, { "epoch": 0.28808261992118495, "grad_norm": 1.7122223377227783, "learning_rate": 0.000964006658513385, "loss": 3.8475, "step": 4240 }, { "epoch": 0.28842233999184674, "grad_norm": 1.476290225982666, "learning_rate": 0.0009639641935045522, "loss": 3.7044, "step": 4245 }, { "epoch": 0.2887620600625085, "grad_norm": 2.0352444648742676, "learning_rate": 0.0009639217284957195, "loss": 3.5102, "step": 4250 }, { "epoch": 0.2891017801331703, "grad_norm": 1.5057474374771118, "learning_rate": 0.0009638792634868868, "loss": 3.4824, "step": 4255 }, { "epoch": 0.289441500203832, "grad_norm": 1.9914487600326538, "learning_rate": 0.0009638367984780541, "loss": 3.882, "step": 4260 }, { "epoch": 0.2897812202744938, "grad_norm": 1.7217937707901, "learning_rate": 0.0009637943334692214, "loss": 3.5113, "step": 4265 }, { "epoch": 0.29012094034515556, "grad_norm": 1.8722842931747437, "learning_rate": 0.0009637518684603887, "loss": 3.6011, "step": 4270 }, { "epoch": 0.29046066041581736, "grad_norm": 1.8265751600265503, "learning_rate": 0.0009637094034515559, "loss": 3.7969, "step": 4275 }, { "epoch": 0.29080038048647916, "grad_norm": 2.2584569454193115, "learning_rate": 0.0009636669384427232, "loss": 3.5652, "step": 4280 }, { "epoch": 0.2911401005571409, "grad_norm": 1.4789308309555054, "learning_rate": 0.0009636244734338905, "loss": 3.7359, "step": 4285 }, { "epoch": 0.2914798206278027, "grad_norm": 1.9247461557388306, "learning_rate": 0.0009635820084250577, "loss": 3.5496, "step": 4290 }, { "epoch": 0.29181954069846444, "grad_norm": 1.6868678331375122, "learning_rate": 0.0009635395434162251, "loss": 3.8345, "step": 4295 }, { "epoch": 0.29215926076912624, "grad_norm": 1.692295789718628, "learning_rate": 0.0009634970784073923, "loss": 3.429, "step": 4300 }, { "epoch": 0.29249898083978804, "grad_norm": 2.065774917602539, "learning_rate": 0.0009634546133985596, "loss": 3.4131, "step": 4305 }, { "epoch": 0.2928387009104498, "grad_norm": 1.8265520334243774, "learning_rate": 0.0009634121483897269, "loss": 3.7385, "step": 4310 }, { "epoch": 0.2931784209811116, "grad_norm": 2.14809513092041, "learning_rate": 0.0009633696833808941, "loss": 3.5045, "step": 4315 }, { "epoch": 0.2935181410517733, "grad_norm": 1.8787513971328735, "learning_rate": 0.0009633272183720614, "loss": 3.6499, "step": 4320 }, { "epoch": 0.2938578611224351, "grad_norm": 1.9599782228469849, "learning_rate": 0.0009632847533632287, "loss": 3.5595, "step": 4325 }, { "epoch": 0.2941975811930969, "grad_norm": 1.800569772720337, "learning_rate": 0.000963242288354396, "loss": 3.5415, "step": 4330 }, { "epoch": 0.29453730126375866, "grad_norm": 1.8433942794799805, "learning_rate": 0.0009631998233455634, "loss": 3.829, "step": 4335 }, { "epoch": 0.29487702133442045, "grad_norm": 1.8461289405822754, "learning_rate": 0.0009631573583367306, "loss": 3.6265, "step": 4340 }, { "epoch": 0.2952167414050822, "grad_norm": 1.6573734283447266, "learning_rate": 0.0009631148933278978, "loss": 3.1554, "step": 4345 }, { "epoch": 0.295556461475744, "grad_norm": 1.726784110069275, "learning_rate": 0.0009630724283190651, "loss": 3.4415, "step": 4350 }, { "epoch": 0.29589618154640573, "grad_norm": 1.8154672384262085, "learning_rate": 0.0009630299633102324, "loss": 3.6968, "step": 4355 }, { "epoch": 0.29623590161706753, "grad_norm": 1.3928115367889404, "learning_rate": 0.0009629874983013996, "loss": 3.4535, "step": 4360 }, { "epoch": 0.29657562168772933, "grad_norm": 1.6339256763458252, "learning_rate": 0.000962945033292567, "loss": 3.7666, "step": 4365 }, { "epoch": 0.29691534175839107, "grad_norm": 1.5277798175811768, "learning_rate": 0.0009629025682837343, "loss": 3.7725, "step": 4370 }, { "epoch": 0.29725506182905287, "grad_norm": 1.9884668588638306, "learning_rate": 0.0009628601032749015, "loss": 3.6011, "step": 4375 }, { "epoch": 0.2975947818997146, "grad_norm": 1.9573463201522827, "learning_rate": 0.0009628176382660688, "loss": 3.6411, "step": 4380 }, { "epoch": 0.2979345019703764, "grad_norm": 1.4585015773773193, "learning_rate": 0.0009627751732572361, "loss": 3.6714, "step": 4385 }, { "epoch": 0.2982742220410382, "grad_norm": 1.7396798133850098, "learning_rate": 0.0009627327082484033, "loss": 3.791, "step": 4390 }, { "epoch": 0.29861394211169995, "grad_norm": 1.717230200767517, "learning_rate": 0.0009626902432395706, "loss": 3.7842, "step": 4395 }, { "epoch": 0.29895366218236175, "grad_norm": 1.7708522081375122, "learning_rate": 0.000962647778230738, "loss": 3.6534, "step": 4400 }, { "epoch": 0.2992933822530235, "grad_norm": 2.0133562088012695, "learning_rate": 0.0009626053132219052, "loss": 3.6198, "step": 4405 }, { "epoch": 0.2996331023236853, "grad_norm": 2.118305206298828, "learning_rate": 0.0009625628482130725, "loss": 3.8663, "step": 4410 }, { "epoch": 0.2999728223943471, "grad_norm": 2.027229070663452, "learning_rate": 0.0009625203832042397, "loss": 3.785, "step": 4415 }, { "epoch": 0.3003125424650088, "grad_norm": 2.260509729385376, "learning_rate": 0.000962477918195407, "loss": 3.6043, "step": 4420 }, { "epoch": 0.3006522625356706, "grad_norm": 1.3884100914001465, "learning_rate": 0.0009624354531865743, "loss": 3.8195, "step": 4425 }, { "epoch": 0.30099198260633236, "grad_norm": 1.9947293996810913, "learning_rate": 0.0009623929881777415, "loss": 3.8195, "step": 4430 }, { "epoch": 0.30133170267699416, "grad_norm": 6.82991886138916, "learning_rate": 0.0009623505231689089, "loss": 3.8864, "step": 4435 }, { "epoch": 0.3016714227476559, "grad_norm": 1.7218948602676392, "learning_rate": 0.0009623080581600762, "loss": 3.4885, "step": 4440 }, { "epoch": 0.3020111428183177, "grad_norm": 1.827734112739563, "learning_rate": 0.0009622655931512434, "loss": 3.6275, "step": 4445 }, { "epoch": 0.3023508628889795, "grad_norm": 1.633695125579834, "learning_rate": 0.0009622231281424106, "loss": 3.7592, "step": 4450 }, { "epoch": 0.30269058295964124, "grad_norm": 1.4888650178909302, "learning_rate": 0.000962180663133578, "loss": 3.3469, "step": 4455 }, { "epoch": 0.30303030303030304, "grad_norm": 1.937571406364441, "learning_rate": 0.0009621381981247452, "loss": 3.695, "step": 4460 }, { "epoch": 0.3033700231009648, "grad_norm": 1.6187472343444824, "learning_rate": 0.0009620957331159124, "loss": 3.5907, "step": 4465 }, { "epoch": 0.3037097431716266, "grad_norm": 1.762279987335205, "learning_rate": 0.0009620532681070799, "loss": 3.489, "step": 4470 }, { "epoch": 0.3040494632422884, "grad_norm": 1.7699568271636963, "learning_rate": 0.0009620108030982471, "loss": 3.7165, "step": 4475 }, { "epoch": 0.3043891833129501, "grad_norm": 1.8643450736999512, "learning_rate": 0.0009619683380894143, "loss": 3.7593, "step": 4480 }, { "epoch": 0.3047289033836119, "grad_norm": 1.5991002321243286, "learning_rate": 0.0009619258730805817, "loss": 3.5584, "step": 4485 }, { "epoch": 0.30506862345427366, "grad_norm": 1.5436351299285889, "learning_rate": 0.0009618834080717489, "loss": 3.888, "step": 4490 }, { "epoch": 0.30540834352493546, "grad_norm": 2.4400722980499268, "learning_rate": 0.0009618409430629161, "loss": 3.6704, "step": 4495 }, { "epoch": 0.30574806359559725, "grad_norm": 1.7560056447982788, "learning_rate": 0.0009617984780540834, "loss": 3.6812, "step": 4500 }, { "epoch": 0.306087783666259, "grad_norm": 1.5960654020309448, "learning_rate": 0.0009617560130452508, "loss": 3.5877, "step": 4505 }, { "epoch": 0.3064275037369208, "grad_norm": 1.7182023525238037, "learning_rate": 0.000961713548036418, "loss": 3.8574, "step": 4510 }, { "epoch": 0.30676722380758253, "grad_norm": 1.3595679998397827, "learning_rate": 0.0009616710830275853, "loss": 3.632, "step": 4515 }, { "epoch": 0.30710694387824433, "grad_norm": 1.8546289205551147, "learning_rate": 0.0009616286180187526, "loss": 3.7505, "step": 4520 }, { "epoch": 0.3074466639489061, "grad_norm": 1.821366310119629, "learning_rate": 0.0009615861530099198, "loss": 3.4981, "step": 4525 }, { "epoch": 0.30778638401956787, "grad_norm": 2.3496289253234863, "learning_rate": 0.0009615436880010871, "loss": 3.7395, "step": 4530 }, { "epoch": 0.30812610409022967, "grad_norm": 1.7546576261520386, "learning_rate": 0.0009615012229922543, "loss": 3.4429, "step": 4535 }, { "epoch": 0.3084658241608914, "grad_norm": 1.709243893623352, "learning_rate": 0.0009614587579834217, "loss": 3.5741, "step": 4540 }, { "epoch": 0.3088055442315532, "grad_norm": 2.1002631187438965, "learning_rate": 0.000961416292974589, "loss": 3.4268, "step": 4545 }, { "epoch": 0.30914526430221495, "grad_norm": 1.7308261394500732, "learning_rate": 0.0009613738279657562, "loss": 3.5153, "step": 4550 }, { "epoch": 0.30948498437287675, "grad_norm": 1.3629120588302612, "learning_rate": 0.0009613313629569235, "loss": 3.6491, "step": 4555 }, { "epoch": 0.30982470444353855, "grad_norm": 1.5385903120040894, "learning_rate": 0.0009612888979480908, "loss": 3.6766, "step": 4560 }, { "epoch": 0.3101644245142003, "grad_norm": 1.7650549411773682, "learning_rate": 0.000961246432939258, "loss": 3.6895, "step": 4565 }, { "epoch": 0.3105041445848621, "grad_norm": 1.726330041885376, "learning_rate": 0.0009612039679304253, "loss": 3.8093, "step": 4570 }, { "epoch": 0.3108438646555238, "grad_norm": 1.8897746801376343, "learning_rate": 0.0009611615029215927, "loss": 3.4061, "step": 4575 }, { "epoch": 0.3111835847261856, "grad_norm": 2.175757646560669, "learning_rate": 0.0009611190379127599, "loss": 3.9189, "step": 4580 }, { "epoch": 0.3115233047968474, "grad_norm": 1.5923641920089722, "learning_rate": 0.0009610765729039272, "loss": 3.5199, "step": 4585 }, { "epoch": 0.31186302486750916, "grad_norm": 2.130966901779175, "learning_rate": 0.0009610341078950945, "loss": 3.6632, "step": 4590 }, { "epoch": 0.31220274493817096, "grad_norm": 1.5993167161941528, "learning_rate": 0.0009609916428862617, "loss": 3.6269, "step": 4595 }, { "epoch": 0.3125424650088327, "grad_norm": 1.7660778760910034, "learning_rate": 0.0009609491778774289, "loss": 3.704, "step": 4600 }, { "epoch": 0.3128821850794945, "grad_norm": 1.5169384479522705, "learning_rate": 0.0009609067128685963, "loss": 3.6609, "step": 4605 }, { "epoch": 0.3132219051501563, "grad_norm": 2.044022798538208, "learning_rate": 0.0009608642478597636, "loss": 3.6195, "step": 4610 }, { "epoch": 0.31356162522081804, "grad_norm": 1.4076106548309326, "learning_rate": 0.0009608217828509308, "loss": 3.5338, "step": 4615 }, { "epoch": 0.31390134529147984, "grad_norm": 1.789122223854065, "learning_rate": 0.0009607793178420982, "loss": 3.7976, "step": 4620 }, { "epoch": 0.3142410653621416, "grad_norm": 1.5733217000961304, "learning_rate": 0.0009607368528332654, "loss": 3.7263, "step": 4625 }, { "epoch": 0.3145807854328034, "grad_norm": 1.5583852529525757, "learning_rate": 0.0009606943878244326, "loss": 3.4168, "step": 4630 }, { "epoch": 0.3149205055034651, "grad_norm": 2.0423219203948975, "learning_rate": 0.0009606519228156, "loss": 3.6516, "step": 4635 }, { "epoch": 0.3152602255741269, "grad_norm": 1.640925645828247, "learning_rate": 0.0009606094578067672, "loss": 3.75, "step": 4640 }, { "epoch": 0.3155999456447887, "grad_norm": 1.7036598920822144, "learning_rate": 0.0009605669927979345, "loss": 3.6335, "step": 4645 }, { "epoch": 0.31593966571545046, "grad_norm": 1.7674905061721802, "learning_rate": 0.0009605245277891018, "loss": 3.5798, "step": 4650 }, { "epoch": 0.31627938578611225, "grad_norm": 2.0204219818115234, "learning_rate": 0.0009604820627802691, "loss": 3.5546, "step": 4655 }, { "epoch": 0.316619105856774, "grad_norm": 1.7841860055923462, "learning_rate": 0.0009604395977714363, "loss": 3.8959, "step": 4660 }, { "epoch": 0.3169588259274358, "grad_norm": 2.125293731689453, "learning_rate": 0.0009603971327626036, "loss": 3.6278, "step": 4665 }, { "epoch": 0.3172985459980976, "grad_norm": 1.8986178636550903, "learning_rate": 0.0009603546677537709, "loss": 3.7869, "step": 4670 }, { "epoch": 0.31763826606875933, "grad_norm": 1.6017260551452637, "learning_rate": 0.0009603122027449382, "loss": 3.652, "step": 4675 }, { "epoch": 0.31797798613942113, "grad_norm": 1.5089112520217896, "learning_rate": 0.0009602697377361055, "loss": 3.6376, "step": 4680 }, { "epoch": 0.3183177062100829, "grad_norm": 1.2352797985076904, "learning_rate": 0.0009602272727272728, "loss": 3.7568, "step": 4685 }, { "epoch": 0.31865742628074467, "grad_norm": 1.5885947942733765, "learning_rate": 0.0009601848077184401, "loss": 3.7009, "step": 4690 }, { "epoch": 0.31899714635140647, "grad_norm": 1.5579224824905396, "learning_rate": 0.0009601423427096073, "loss": 3.7814, "step": 4695 }, { "epoch": 0.3193368664220682, "grad_norm": 1.3489257097244263, "learning_rate": 0.0009600998777007745, "loss": 3.6089, "step": 4700 }, { "epoch": 0.31967658649273, "grad_norm": 1.5003325939178467, "learning_rate": 0.0009600574126919419, "loss": 3.6292, "step": 4705 }, { "epoch": 0.32001630656339175, "grad_norm": 2.188148260116577, "learning_rate": 0.0009600149476831091, "loss": 3.4559, "step": 4710 }, { "epoch": 0.32035602663405355, "grad_norm": 1.592934250831604, "learning_rate": 0.0009599724826742764, "loss": 3.6685, "step": 4715 }, { "epoch": 0.3206957467047153, "grad_norm": 1.4508335590362549, "learning_rate": 0.0009599300176654438, "loss": 3.7543, "step": 4720 }, { "epoch": 0.3210354667753771, "grad_norm": 1.4529234170913696, "learning_rate": 0.000959887552656611, "loss": 3.7188, "step": 4725 }, { "epoch": 0.3213751868460389, "grad_norm": 1.5931645631790161, "learning_rate": 0.0009598450876477782, "loss": 3.4817, "step": 4730 }, { "epoch": 0.3217149069167006, "grad_norm": 1.6924811601638794, "learning_rate": 0.0009598026226389456, "loss": 3.3628, "step": 4735 }, { "epoch": 0.3220546269873624, "grad_norm": 1.829940676689148, "learning_rate": 0.0009597601576301128, "loss": 3.4514, "step": 4740 }, { "epoch": 0.32239434705802417, "grad_norm": 1.7842330932617188, "learning_rate": 0.00095971769262128, "loss": 3.4524, "step": 4745 }, { "epoch": 0.32273406712868596, "grad_norm": 1.7125890254974365, "learning_rate": 0.0009596752276124474, "loss": 3.5906, "step": 4750 }, { "epoch": 0.32307378719934776, "grad_norm": 1.9754678010940552, "learning_rate": 0.0009596327626036147, "loss": 3.7282, "step": 4755 }, { "epoch": 0.3234135072700095, "grad_norm": 1.5882364511489868, "learning_rate": 0.0009595902975947819, "loss": 3.5175, "step": 4760 }, { "epoch": 0.3237532273406713, "grad_norm": 1.9508289098739624, "learning_rate": 0.0009595478325859492, "loss": 3.7888, "step": 4765 }, { "epoch": 0.32409294741133304, "grad_norm": 2.1181235313415527, "learning_rate": 0.0009595053675771165, "loss": 3.6724, "step": 4770 }, { "epoch": 0.32443266748199484, "grad_norm": 1.664820671081543, "learning_rate": 0.0009594629025682837, "loss": 3.4688, "step": 4775 }, { "epoch": 0.32477238755265664, "grad_norm": 1.5311378240585327, "learning_rate": 0.0009594204375594511, "loss": 3.7072, "step": 4780 }, { "epoch": 0.3251121076233184, "grad_norm": 1.5501508712768555, "learning_rate": 0.0009593779725506184, "loss": 3.5718, "step": 4785 }, { "epoch": 0.3254518276939802, "grad_norm": 1.7016221284866333, "learning_rate": 0.0009593355075417856, "loss": 3.4284, "step": 4790 }, { "epoch": 0.3257915477646419, "grad_norm": 1.593835473060608, "learning_rate": 0.0009592930425329529, "loss": 3.6192, "step": 4795 }, { "epoch": 0.3261312678353037, "grad_norm": 2.1881868839263916, "learning_rate": 0.0009592505775241201, "loss": 3.4746, "step": 4800 }, { "epoch": 0.32647098790596546, "grad_norm": 2.2252373695373535, "learning_rate": 0.0009592081125152874, "loss": 3.5938, "step": 4805 }, { "epoch": 0.32681070797662726, "grad_norm": 1.6178091764450073, "learning_rate": 0.0009591656475064547, "loss": 3.8377, "step": 4810 }, { "epoch": 0.32715042804728905, "grad_norm": 1.9008928537368774, "learning_rate": 0.000959123182497622, "loss": 3.3432, "step": 4815 }, { "epoch": 0.3274901481179508, "grad_norm": 2.2918407917022705, "learning_rate": 0.0009590807174887893, "loss": 3.5853, "step": 4820 }, { "epoch": 0.3278298681886126, "grad_norm": 1.4001307487487793, "learning_rate": 0.0009590382524799566, "loss": 3.6767, "step": 4825 }, { "epoch": 0.32816958825927434, "grad_norm": 2.0307059288024902, "learning_rate": 0.0009589957874711238, "loss": 3.7137, "step": 4830 }, { "epoch": 0.32850930832993613, "grad_norm": 1.951454758644104, "learning_rate": 0.000958953322462291, "loss": 3.626, "step": 4835 }, { "epoch": 0.32884902840059793, "grad_norm": 1.6488828659057617, "learning_rate": 0.0009589108574534584, "loss": 3.8751, "step": 4840 }, { "epoch": 0.3291887484712597, "grad_norm": 1.8698573112487793, "learning_rate": 0.0009588683924446256, "loss": 3.663, "step": 4845 }, { "epoch": 0.32952846854192147, "grad_norm": 2.4414093494415283, "learning_rate": 0.0009588259274357929, "loss": 3.6994, "step": 4850 }, { "epoch": 0.3298681886125832, "grad_norm": 2.1786701679229736, "learning_rate": 0.0009587834624269603, "loss": 3.7527, "step": 4855 }, { "epoch": 0.330207908683245, "grad_norm": 1.9059362411499023, "learning_rate": 0.0009587409974181275, "loss": 3.354, "step": 4860 }, { "epoch": 0.3305476287539068, "grad_norm": 1.9830327033996582, "learning_rate": 0.0009586985324092947, "loss": 3.7783, "step": 4865 }, { "epoch": 0.33088734882456855, "grad_norm": 1.6345735788345337, "learning_rate": 0.0009586560674004621, "loss": 3.9087, "step": 4870 }, { "epoch": 0.33122706889523035, "grad_norm": 1.7530065774917603, "learning_rate": 0.0009586136023916293, "loss": 3.3989, "step": 4875 }, { "epoch": 0.3315667889658921, "grad_norm": 2.088876247406006, "learning_rate": 0.0009585711373827965, "loss": 3.3487, "step": 4880 }, { "epoch": 0.3319065090365539, "grad_norm": 1.4738149642944336, "learning_rate": 0.000958528672373964, "loss": 3.7101, "step": 4885 }, { "epoch": 0.33224622910721563, "grad_norm": 2.1633594036102295, "learning_rate": 0.0009584862073651312, "loss": 3.6162, "step": 4890 }, { "epoch": 0.3325859491778774, "grad_norm": 1.6331466436386108, "learning_rate": 0.0009584437423562984, "loss": 3.8887, "step": 4895 }, { "epoch": 0.3329256692485392, "grad_norm": 1.99294912815094, "learning_rate": 0.0009584012773474657, "loss": 3.7171, "step": 4900 }, { "epoch": 0.33326538931920097, "grad_norm": 2.144409656524658, "learning_rate": 0.000958358812338633, "loss": 3.6097, "step": 4905 }, { "epoch": 0.33360510938986276, "grad_norm": 1.6122113466262817, "learning_rate": 0.0009583163473298002, "loss": 3.7955, "step": 4910 }, { "epoch": 0.3339448294605245, "grad_norm": 2.0634355545043945, "learning_rate": 0.0009582738823209675, "loss": 3.7492, "step": 4915 }, { "epoch": 0.3342845495311863, "grad_norm": 1.804072618484497, "learning_rate": 0.0009582314173121349, "loss": 3.5493, "step": 4920 }, { "epoch": 0.3346242696018481, "grad_norm": 1.5780940055847168, "learning_rate": 0.0009581889523033021, "loss": 3.8242, "step": 4925 }, { "epoch": 0.33496398967250984, "grad_norm": 1.8066766262054443, "learning_rate": 0.0009581464872944694, "loss": 4.1298, "step": 4930 }, { "epoch": 0.33530370974317164, "grad_norm": 1.9861552715301514, "learning_rate": 0.0009581040222856366, "loss": 3.7171, "step": 4935 }, { "epoch": 0.3356434298138334, "grad_norm": 1.6573377847671509, "learning_rate": 0.0009580615572768039, "loss": 3.4722, "step": 4940 }, { "epoch": 0.3359831498844952, "grad_norm": 1.3645802736282349, "learning_rate": 0.0009580190922679712, "loss": 3.7165, "step": 4945 }, { "epoch": 0.336322869955157, "grad_norm": 1.961704134941101, "learning_rate": 0.0009579766272591384, "loss": 3.6198, "step": 4950 }, { "epoch": 0.3366625900258187, "grad_norm": 1.354878544807434, "learning_rate": 0.0009579341622503058, "loss": 3.7677, "step": 4955 }, { "epoch": 0.3370023100964805, "grad_norm": 1.5021557807922363, "learning_rate": 0.0009578916972414731, "loss": 3.6077, "step": 4960 }, { "epoch": 0.33734203016714226, "grad_norm": 1.323206901550293, "learning_rate": 0.0009578492322326403, "loss": 3.6129, "step": 4965 }, { "epoch": 0.33768175023780406, "grad_norm": 2.577991485595703, "learning_rate": 0.0009578067672238076, "loss": 3.4535, "step": 4970 }, { "epoch": 0.3380214703084658, "grad_norm": 2.0060956478118896, "learning_rate": 0.0009577643022149749, "loss": 3.5626, "step": 4975 }, { "epoch": 0.3383611903791276, "grad_norm": 1.8886144161224365, "learning_rate": 0.0009577218372061421, "loss": 3.6185, "step": 4980 }, { "epoch": 0.3387009104497894, "grad_norm": 1.4815874099731445, "learning_rate": 0.0009576793721973093, "loss": 3.5193, "step": 4985 }, { "epoch": 0.33904063052045114, "grad_norm": 2.4101786613464355, "learning_rate": 0.0009576369071884768, "loss": 3.6364, "step": 4990 }, { "epoch": 0.33938035059111293, "grad_norm": 1.4750378131866455, "learning_rate": 0.000957594442179644, "loss": 3.7048, "step": 4995 }, { "epoch": 0.3397200706617747, "grad_norm": 1.743328332901001, "learning_rate": 0.0009575519771708112, "loss": 3.6421, "step": 5000 }, { "epoch": 0.3400597907324365, "grad_norm": 1.4775193929672241, "learning_rate": 0.0009575095121619786, "loss": 3.724, "step": 5005 }, { "epoch": 0.34039951080309827, "grad_norm": 1.9036693572998047, "learning_rate": 0.0009574670471531458, "loss": 3.6973, "step": 5010 }, { "epoch": 0.34073923087376, "grad_norm": 1.520323395729065, "learning_rate": 0.0009574245821443131, "loss": 3.3913, "step": 5015 }, { "epoch": 0.3410789509444218, "grad_norm": 1.6961185932159424, "learning_rate": 0.0009573821171354804, "loss": 3.3857, "step": 5020 }, { "epoch": 0.34141867101508355, "grad_norm": 1.978361964225769, "learning_rate": 0.0009573396521266477, "loss": 3.4154, "step": 5025 }, { "epoch": 0.34175839108574535, "grad_norm": 2.0239884853363037, "learning_rate": 0.000957297187117815, "loss": 3.577, "step": 5030 }, { "epoch": 0.34209811115640715, "grad_norm": 2.0914976596832275, "learning_rate": 0.0009572547221089822, "loss": 3.4256, "step": 5035 }, { "epoch": 0.3424378312270689, "grad_norm": 2.0736749172210693, "learning_rate": 0.0009572122571001495, "loss": 3.4548, "step": 5040 }, { "epoch": 0.3427775512977307, "grad_norm": 1.8459951877593994, "learning_rate": 0.0009571697920913168, "loss": 3.6163, "step": 5045 }, { "epoch": 0.34311727136839243, "grad_norm": 2.1826579570770264, "learning_rate": 0.000957127327082484, "loss": 3.6993, "step": 5050 }, { "epoch": 0.3434569914390542, "grad_norm": 1.2753912210464478, "learning_rate": 0.0009570848620736513, "loss": 3.823, "step": 5055 }, { "epoch": 0.34379671150971597, "grad_norm": 2.2393875122070312, "learning_rate": 0.0009570423970648187, "loss": 3.3987, "step": 5060 }, { "epoch": 0.34413643158037777, "grad_norm": 1.5779478549957275, "learning_rate": 0.0009569999320559859, "loss": 3.2583, "step": 5065 }, { "epoch": 0.34447615165103956, "grad_norm": 1.695564866065979, "learning_rate": 0.0009569574670471532, "loss": 3.5827, "step": 5070 }, { "epoch": 0.3448158717217013, "grad_norm": 1.7117983102798462, "learning_rate": 0.0009569150020383205, "loss": 3.496, "step": 5075 }, { "epoch": 0.3451555917923631, "grad_norm": 3.520897626876831, "learning_rate": 0.0009568725370294877, "loss": 3.5715, "step": 5080 }, { "epoch": 0.34549531186302485, "grad_norm": 2.1806907653808594, "learning_rate": 0.0009568300720206549, "loss": 3.5519, "step": 5085 }, { "epoch": 0.34583503193368664, "grad_norm": 1.9565421342849731, "learning_rate": 0.0009567876070118223, "loss": 3.5096, "step": 5090 }, { "epoch": 0.34617475200434844, "grad_norm": 1.902738332748413, "learning_rate": 0.0009567451420029896, "loss": 3.6879, "step": 5095 }, { "epoch": 0.3465144720750102, "grad_norm": 1.7809371948242188, "learning_rate": 0.0009567026769941568, "loss": 3.3172, "step": 5100 }, { "epoch": 0.346854192145672, "grad_norm": 1.4868241548538208, "learning_rate": 0.0009566602119853242, "loss": 3.555, "step": 5105 }, { "epoch": 0.3471939122163337, "grad_norm": 1.4154189825057983, "learning_rate": 0.0009566177469764914, "loss": 3.6878, "step": 5110 }, { "epoch": 0.3475336322869955, "grad_norm": 1.7660696506500244, "learning_rate": 0.0009565752819676586, "loss": 3.6656, "step": 5115 }, { "epoch": 0.3478733523576573, "grad_norm": 1.7691024541854858, "learning_rate": 0.000956532816958826, "loss": 3.6549, "step": 5120 }, { "epoch": 0.34821307242831906, "grad_norm": 1.5541496276855469, "learning_rate": 0.0009564903519499932, "loss": 3.6924, "step": 5125 }, { "epoch": 0.34855279249898086, "grad_norm": 1.421266794204712, "learning_rate": 0.0009564478869411605, "loss": 3.4113, "step": 5130 }, { "epoch": 0.3488925125696426, "grad_norm": 1.6152160167694092, "learning_rate": 0.0009564054219323279, "loss": 3.4554, "step": 5135 }, { "epoch": 0.3492322326403044, "grad_norm": 1.2821738719940186, "learning_rate": 0.0009563629569234951, "loss": 3.5225, "step": 5140 }, { "epoch": 0.34957195271096614, "grad_norm": 1.6337780952453613, "learning_rate": 0.0009563204919146623, "loss": 3.4394, "step": 5145 }, { "epoch": 0.34991167278162794, "grad_norm": 1.7031718492507935, "learning_rate": 0.0009562780269058296, "loss": 3.8029, "step": 5150 }, { "epoch": 0.35025139285228973, "grad_norm": 1.6353247165679932, "learning_rate": 0.0009562355618969969, "loss": 3.541, "step": 5155 }, { "epoch": 0.3505911129229515, "grad_norm": 1.7339218854904175, "learning_rate": 0.0009561930968881641, "loss": 3.6803, "step": 5160 }, { "epoch": 0.3509308329936133, "grad_norm": 1.6476813554763794, "learning_rate": 0.0009561506318793315, "loss": 3.593, "step": 5165 }, { "epoch": 0.351270553064275, "grad_norm": 1.4442944526672363, "learning_rate": 0.0009561081668704988, "loss": 3.5451, "step": 5170 }, { "epoch": 0.3516102731349368, "grad_norm": 1.89138662815094, "learning_rate": 0.000956065701861666, "loss": 3.3623, "step": 5175 }, { "epoch": 0.3519499932055986, "grad_norm": 2.7446646690368652, "learning_rate": 0.0009560232368528333, "loss": 3.7952, "step": 5180 }, { "epoch": 0.35228971327626035, "grad_norm": 2.1763226985931396, "learning_rate": 0.0009559807718440005, "loss": 3.6291, "step": 5185 }, { "epoch": 0.35262943334692215, "grad_norm": 1.6760846376419067, "learning_rate": 0.0009559383068351678, "loss": 3.7011, "step": 5190 }, { "epoch": 0.3529691534175839, "grad_norm": 1.7570613622665405, "learning_rate": 0.0009558958418263351, "loss": 3.5231, "step": 5195 }, { "epoch": 0.3533088734882457, "grad_norm": 2.1079940795898438, "learning_rate": 0.0009558533768175024, "loss": 3.7729, "step": 5200 }, { "epoch": 0.3536485935589075, "grad_norm": 1.8115911483764648, "learning_rate": 0.0009558109118086697, "loss": 3.5624, "step": 5205 }, { "epoch": 0.35398831362956923, "grad_norm": 1.8732284307479858, "learning_rate": 0.000955768446799837, "loss": 3.655, "step": 5210 }, { "epoch": 0.354328033700231, "grad_norm": 1.5626914501190186, "learning_rate": 0.0009557259817910042, "loss": 3.7778, "step": 5215 }, { "epoch": 0.35466775377089277, "grad_norm": 2.107003927230835, "learning_rate": 0.0009556835167821714, "loss": 3.7979, "step": 5220 }, { "epoch": 0.35500747384155457, "grad_norm": 1.6422622203826904, "learning_rate": 0.0009556410517733388, "loss": 3.5614, "step": 5225 }, { "epoch": 0.3553471939122163, "grad_norm": 1.791274070739746, "learning_rate": 0.000955598586764506, "loss": 3.6431, "step": 5230 }, { "epoch": 0.3556869139828781, "grad_norm": 1.7012755870819092, "learning_rate": 0.0009555561217556733, "loss": 3.3442, "step": 5235 }, { "epoch": 0.3560266340535399, "grad_norm": 1.3982805013656616, "learning_rate": 0.0009555136567468407, "loss": 3.6659, "step": 5240 }, { "epoch": 0.35636635412420165, "grad_norm": 1.8701908588409424, "learning_rate": 0.0009554711917380079, "loss": 3.5321, "step": 5245 }, { "epoch": 0.35670607419486344, "grad_norm": 1.650595784187317, "learning_rate": 0.0009554287267291751, "loss": 3.9245, "step": 5250 }, { "epoch": 0.3570457942655252, "grad_norm": 1.459466814994812, "learning_rate": 0.0009553862617203425, "loss": 3.4784, "step": 5255 }, { "epoch": 0.357385514336187, "grad_norm": 1.5409417152404785, "learning_rate": 0.0009553437967115097, "loss": 3.4976, "step": 5260 }, { "epoch": 0.3577252344068488, "grad_norm": 1.7502360343933105, "learning_rate": 0.0009553013317026769, "loss": 3.2851, "step": 5265 }, { "epoch": 0.3580649544775105, "grad_norm": 1.6417348384857178, "learning_rate": 0.0009552588666938444, "loss": 3.5837, "step": 5270 }, { "epoch": 0.3584046745481723, "grad_norm": 1.6994739770889282, "learning_rate": 0.0009552164016850116, "loss": 3.8143, "step": 5275 }, { "epoch": 0.35874439461883406, "grad_norm": 1.501734972000122, "learning_rate": 0.0009551739366761788, "loss": 3.6152, "step": 5280 }, { "epoch": 0.35908411468949586, "grad_norm": 2.0963146686553955, "learning_rate": 0.0009551314716673461, "loss": 3.6446, "step": 5285 }, { "epoch": 0.35942383476015766, "grad_norm": 2.1708428859710693, "learning_rate": 0.0009550890066585134, "loss": 3.7662, "step": 5290 }, { "epoch": 0.3597635548308194, "grad_norm": 2.102781057357788, "learning_rate": 0.0009550465416496806, "loss": 3.7437, "step": 5295 }, { "epoch": 0.3601032749014812, "grad_norm": 1.5938975811004639, "learning_rate": 0.000955004076640848, "loss": 3.7962, "step": 5300 }, { "epoch": 0.36044299497214294, "grad_norm": 1.4903939962387085, "learning_rate": 0.0009549616116320153, "loss": 3.7498, "step": 5305 }, { "epoch": 0.36078271504280474, "grad_norm": 1.4701544046401978, "learning_rate": 0.0009549191466231825, "loss": 3.7485, "step": 5310 }, { "epoch": 0.3611224351134665, "grad_norm": 1.5980491638183594, "learning_rate": 0.0009548766816143498, "loss": 3.8614, "step": 5315 }, { "epoch": 0.3614621551841283, "grad_norm": 1.8773425817489624, "learning_rate": 0.000954834216605517, "loss": 3.52, "step": 5320 }, { "epoch": 0.3618018752547901, "grad_norm": 1.8470879793167114, "learning_rate": 0.0009547917515966843, "loss": 3.8344, "step": 5325 }, { "epoch": 0.3621415953254518, "grad_norm": 1.5870519876480103, "learning_rate": 0.0009547492865878516, "loss": 3.665, "step": 5330 }, { "epoch": 0.3624813153961136, "grad_norm": 2.3206946849823, "learning_rate": 0.0009547068215790189, "loss": 3.9554, "step": 5335 }, { "epoch": 0.36282103546677535, "grad_norm": 1.9054632186889648, "learning_rate": 0.0009546643565701862, "loss": 3.5466, "step": 5340 }, { "epoch": 0.36316075553743715, "grad_norm": 1.9206182956695557, "learning_rate": 0.0009546218915613535, "loss": 3.4292, "step": 5345 }, { "epoch": 0.36350047560809895, "grad_norm": 1.8389546871185303, "learning_rate": 0.0009545794265525207, "loss": 3.701, "step": 5350 }, { "epoch": 0.3638401956787607, "grad_norm": 1.4730939865112305, "learning_rate": 0.0009545369615436881, "loss": 3.3086, "step": 5355 }, { "epoch": 0.3641799157494225, "grad_norm": 1.7451221942901611, "learning_rate": 0.0009544944965348553, "loss": 3.4786, "step": 5360 }, { "epoch": 0.36451963582008423, "grad_norm": 2.5829529762268066, "learning_rate": 0.0009544520315260225, "loss": 3.7204, "step": 5365 }, { "epoch": 0.36485935589074603, "grad_norm": 2.111727476119995, "learning_rate": 0.00095440956651719, "loss": 3.4162, "step": 5370 }, { "epoch": 0.3651990759614078, "grad_norm": 1.6573864221572876, "learning_rate": 0.0009543671015083572, "loss": 3.5311, "step": 5375 }, { "epoch": 0.36553879603206957, "grad_norm": 1.9111045598983765, "learning_rate": 0.0009543246364995244, "loss": 3.4589, "step": 5380 }, { "epoch": 0.36587851610273137, "grad_norm": 1.4844567775726318, "learning_rate": 0.0009542821714906917, "loss": 3.6542, "step": 5385 }, { "epoch": 0.3662182361733931, "grad_norm": 1.7163769006729126, "learning_rate": 0.000954239706481859, "loss": 3.6406, "step": 5390 }, { "epoch": 0.3665579562440549, "grad_norm": 1.5493077039718628, "learning_rate": 0.0009541972414730262, "loss": 3.4182, "step": 5395 }, { "epoch": 0.36689767631471665, "grad_norm": 1.7546207904815674, "learning_rate": 0.0009541547764641935, "loss": 3.579, "step": 5400 }, { "epoch": 0.36723739638537845, "grad_norm": 2.211155414581299, "learning_rate": 0.0009541123114553609, "loss": 3.4929, "step": 5405 }, { "epoch": 0.36757711645604024, "grad_norm": 1.781893014907837, "learning_rate": 0.0009540698464465281, "loss": 3.5479, "step": 5410 }, { "epoch": 0.367916836526702, "grad_norm": 1.5182545185089111, "learning_rate": 0.0009540273814376954, "loss": 3.6776, "step": 5415 }, { "epoch": 0.3682565565973638, "grad_norm": 1.5315370559692383, "learning_rate": 0.0009539849164288627, "loss": 3.5446, "step": 5420 }, { "epoch": 0.3685962766680255, "grad_norm": 2.3500819206237793, "learning_rate": 0.0009539424514200299, "loss": 3.3865, "step": 5425 }, { "epoch": 0.3689359967386873, "grad_norm": 1.5453088283538818, "learning_rate": 0.0009538999864111972, "loss": 3.661, "step": 5430 }, { "epoch": 0.3692757168093491, "grad_norm": 1.8448878526687622, "learning_rate": 0.0009538575214023644, "loss": 3.5939, "step": 5435 }, { "epoch": 0.36961543688001086, "grad_norm": 1.8659400939941406, "learning_rate": 0.0009538150563935318, "loss": 3.4455, "step": 5440 }, { "epoch": 0.36995515695067266, "grad_norm": 1.8785974979400635, "learning_rate": 0.0009537725913846991, "loss": 3.5626, "step": 5445 }, { "epoch": 0.3702948770213344, "grad_norm": 2.109265089035034, "learning_rate": 0.0009537301263758663, "loss": 3.5729, "step": 5450 }, { "epoch": 0.3706345970919962, "grad_norm": 1.784554123878479, "learning_rate": 0.0009536876613670336, "loss": 3.8291, "step": 5455 }, { "epoch": 0.370974317162658, "grad_norm": 1.472251296043396, "learning_rate": 0.0009536451963582009, "loss": 3.6299, "step": 5460 }, { "epoch": 0.37131403723331974, "grad_norm": 1.9224287271499634, "learning_rate": 0.0009536027313493681, "loss": 3.433, "step": 5465 }, { "epoch": 0.37165375730398154, "grad_norm": 3.618095874786377, "learning_rate": 0.0009535602663405353, "loss": 3.6785, "step": 5470 }, { "epoch": 0.3719934773746433, "grad_norm": 1.9638206958770752, "learning_rate": 0.0009535178013317028, "loss": 3.8624, "step": 5475 }, { "epoch": 0.3723331974453051, "grad_norm": 1.6765285730361938, "learning_rate": 0.00095347533632287, "loss": 3.7255, "step": 5480 }, { "epoch": 0.3726729175159668, "grad_norm": 1.9442791938781738, "learning_rate": 0.0009534328713140372, "loss": 3.8208, "step": 5485 }, { "epoch": 0.3730126375866286, "grad_norm": 2.2047834396362305, "learning_rate": 0.0009533904063052046, "loss": 3.6753, "step": 5490 }, { "epoch": 0.3733523576572904, "grad_norm": 1.9261034727096558, "learning_rate": 0.0009533479412963718, "loss": 3.6524, "step": 5495 }, { "epoch": 0.37369207772795215, "grad_norm": 1.7920902967453003, "learning_rate": 0.000953305476287539, "loss": 3.5575, "step": 5500 }, { "epoch": 0.37403179779861395, "grad_norm": 1.8486226797103882, "learning_rate": 0.0009532630112787064, "loss": 3.9325, "step": 5505 }, { "epoch": 0.3743715178692757, "grad_norm": 1.6368223428726196, "learning_rate": 0.0009532205462698737, "loss": 3.539, "step": 5510 }, { "epoch": 0.3747112379399375, "grad_norm": 1.8158345222473145, "learning_rate": 0.0009531780812610409, "loss": 3.7471, "step": 5515 }, { "epoch": 0.3750509580105993, "grad_norm": 1.7801252603530884, "learning_rate": 0.0009531356162522083, "loss": 3.3818, "step": 5520 }, { "epoch": 0.37539067808126103, "grad_norm": 2.0484113693237305, "learning_rate": 0.0009530931512433755, "loss": 3.4228, "step": 5525 }, { "epoch": 0.37573039815192283, "grad_norm": 1.9376975297927856, "learning_rate": 0.0009530506862345427, "loss": 3.5189, "step": 5530 }, { "epoch": 0.37607011822258457, "grad_norm": 1.501808524131775, "learning_rate": 0.00095300822122571, "loss": 3.8839, "step": 5535 }, { "epoch": 0.37640983829324637, "grad_norm": 2.0116028785705566, "learning_rate": 0.0009529657562168773, "loss": 3.6685, "step": 5540 }, { "epoch": 0.37674955836390817, "grad_norm": 1.53324294090271, "learning_rate": 0.0009529232912080446, "loss": 3.8735, "step": 5545 }, { "epoch": 0.3770892784345699, "grad_norm": 1.4431952238082886, "learning_rate": 0.0009528808261992119, "loss": 4.006, "step": 5550 }, { "epoch": 0.3774289985052317, "grad_norm": 1.6339349746704102, "learning_rate": 0.0009528383611903792, "loss": 3.8764, "step": 5555 }, { "epoch": 0.37776871857589345, "grad_norm": 2.734637498855591, "learning_rate": 0.0009527958961815464, "loss": 3.8015, "step": 5560 }, { "epoch": 0.37810843864655524, "grad_norm": 2.0114312171936035, "learning_rate": 0.0009527534311727137, "loss": 3.5556, "step": 5565 }, { "epoch": 0.378448158717217, "grad_norm": 1.796217918395996, "learning_rate": 0.000952710966163881, "loss": 3.6965, "step": 5570 }, { "epoch": 0.3787878787878788, "grad_norm": 1.8059078454971313, "learning_rate": 0.0009526685011550482, "loss": 3.5036, "step": 5575 }, { "epoch": 0.3791275988585406, "grad_norm": 1.9656256437301636, "learning_rate": 0.0009526260361462156, "loss": 3.5218, "step": 5580 }, { "epoch": 0.3794673189292023, "grad_norm": 1.8452011346817017, "learning_rate": 0.0009525835711373828, "loss": 3.5252, "step": 5585 }, { "epoch": 0.3798070389998641, "grad_norm": 1.6031079292297363, "learning_rate": 0.0009525411061285501, "loss": 3.4625, "step": 5590 }, { "epoch": 0.38014675907052586, "grad_norm": 1.9537091255187988, "learning_rate": 0.0009524986411197174, "loss": 3.4856, "step": 5595 }, { "epoch": 0.38048647914118766, "grad_norm": 1.5682789087295532, "learning_rate": 0.0009524561761108846, "loss": 3.7163, "step": 5600 }, { "epoch": 0.38082619921184946, "grad_norm": 1.84691321849823, "learning_rate": 0.0009524137111020519, "loss": 3.6595, "step": 5605 }, { "epoch": 0.3811659192825112, "grad_norm": 1.9616477489471436, "learning_rate": 0.0009523712460932192, "loss": 3.508, "step": 5610 }, { "epoch": 0.381505639353173, "grad_norm": 1.602921485900879, "learning_rate": 0.0009523287810843865, "loss": 3.5377, "step": 5615 }, { "epoch": 0.38184535942383474, "grad_norm": 1.3708206415176392, "learning_rate": 0.0009522863160755537, "loss": 3.7443, "step": 5620 }, { "epoch": 0.38218507949449654, "grad_norm": 1.956351399421692, "learning_rate": 0.0009522438510667211, "loss": 3.5613, "step": 5625 }, { "epoch": 0.38252479956515834, "grad_norm": 2.1092288494110107, "learning_rate": 0.0009522013860578883, "loss": 3.6823, "step": 5630 }, { "epoch": 0.3828645196358201, "grad_norm": 1.7899845838546753, "learning_rate": 0.0009521589210490555, "loss": 3.4763, "step": 5635 }, { "epoch": 0.3832042397064819, "grad_norm": 1.7970200777053833, "learning_rate": 0.0009521164560402229, "loss": 3.6458, "step": 5640 }, { "epoch": 0.3835439597771436, "grad_norm": 2.13852858543396, "learning_rate": 0.0009520739910313901, "loss": 3.7025, "step": 5645 }, { "epoch": 0.3838836798478054, "grad_norm": 2.1948328018188477, "learning_rate": 0.0009520315260225574, "loss": 3.5203, "step": 5650 }, { "epoch": 0.38422339991846716, "grad_norm": 1.6637862920761108, "learning_rate": 0.0009519890610137248, "loss": 3.6966, "step": 5655 }, { "epoch": 0.38456311998912895, "grad_norm": 1.5820040702819824, "learning_rate": 0.000951946596004892, "loss": 3.6072, "step": 5660 }, { "epoch": 0.38490284005979075, "grad_norm": 1.7444941997528076, "learning_rate": 0.0009519041309960592, "loss": 3.5993, "step": 5665 }, { "epoch": 0.3852425601304525, "grad_norm": 1.9624375104904175, "learning_rate": 0.0009518616659872265, "loss": 3.4797, "step": 5670 }, { "epoch": 0.3855822802011143, "grad_norm": 1.8700963258743286, "learning_rate": 0.0009518192009783938, "loss": 3.4512, "step": 5675 }, { "epoch": 0.38592200027177603, "grad_norm": 2.2616281509399414, "learning_rate": 0.000951776735969561, "loss": 3.5295, "step": 5680 }, { "epoch": 0.38626172034243783, "grad_norm": 1.5366291999816895, "learning_rate": 0.0009517342709607284, "loss": 3.412, "step": 5685 }, { "epoch": 0.38660144041309963, "grad_norm": 2.145089864730835, "learning_rate": 0.0009516918059518957, "loss": 3.727, "step": 5690 }, { "epoch": 0.38694116048376137, "grad_norm": 1.6657593250274658, "learning_rate": 0.000951649340943063, "loss": 3.5213, "step": 5695 }, { "epoch": 0.38728088055442317, "grad_norm": 1.8543182611465454, "learning_rate": 0.0009516068759342302, "loss": 3.571, "step": 5700 }, { "epoch": 0.3876206006250849, "grad_norm": 2.177210569381714, "learning_rate": 0.0009515644109253975, "loss": 3.4203, "step": 5705 }, { "epoch": 0.3879603206957467, "grad_norm": 2.053079128265381, "learning_rate": 0.0009515219459165648, "loss": 3.6554, "step": 5710 }, { "epoch": 0.3883000407664085, "grad_norm": 1.736803412437439, "learning_rate": 0.000951479480907732, "loss": 3.68, "step": 5715 }, { "epoch": 0.38863976083707025, "grad_norm": 1.7397568225860596, "learning_rate": 0.0009514370158988993, "loss": 3.604, "step": 5720 }, { "epoch": 0.38897948090773204, "grad_norm": 1.6956855058670044, "learning_rate": 0.0009513945508900667, "loss": 3.7953, "step": 5725 }, { "epoch": 0.3893192009783938, "grad_norm": 1.7887511253356934, "learning_rate": 0.0009513520858812339, "loss": 3.6293, "step": 5730 }, { "epoch": 0.3896589210490556, "grad_norm": 2.0704689025878906, "learning_rate": 0.0009513096208724011, "loss": 3.6454, "step": 5735 }, { "epoch": 0.3899986411197173, "grad_norm": 1.7207316160202026, "learning_rate": 0.0009512671558635685, "loss": 3.866, "step": 5740 }, { "epoch": 0.3903383611903791, "grad_norm": 1.568091630935669, "learning_rate": 0.0009512246908547357, "loss": 3.6284, "step": 5745 }, { "epoch": 0.3906780812610409, "grad_norm": 1.5288095474243164, "learning_rate": 0.0009511822258459029, "loss": 3.6049, "step": 5750 }, { "epoch": 0.39101780133170266, "grad_norm": 1.8240407705307007, "learning_rate": 0.0009511397608370704, "loss": 3.7266, "step": 5755 }, { "epoch": 0.39135752140236446, "grad_norm": 1.9980558156967163, "learning_rate": 0.0009510972958282376, "loss": 3.6557, "step": 5760 }, { "epoch": 0.3916972414730262, "grad_norm": 2.004345178604126, "learning_rate": 0.0009510548308194048, "loss": 3.4489, "step": 5765 }, { "epoch": 0.392036961543688, "grad_norm": 1.7662371397018433, "learning_rate": 0.0009510123658105721, "loss": 3.7328, "step": 5770 }, { "epoch": 0.3923766816143498, "grad_norm": 1.9567824602127075, "learning_rate": 0.0009509699008017394, "loss": 3.6722, "step": 5775 }, { "epoch": 0.39271640168501154, "grad_norm": 2.0169665813446045, "learning_rate": 0.0009509274357929066, "loss": 3.4491, "step": 5780 }, { "epoch": 0.39305612175567334, "grad_norm": 1.8472424745559692, "learning_rate": 0.0009508849707840739, "loss": 3.7463, "step": 5785 }, { "epoch": 0.3933958418263351, "grad_norm": 1.863681435585022, "learning_rate": 0.0009508425057752413, "loss": 3.8318, "step": 5790 }, { "epoch": 0.3937355618969969, "grad_norm": 1.7510582208633423, "learning_rate": 0.0009508000407664085, "loss": 3.615, "step": 5795 }, { "epoch": 0.3940752819676587, "grad_norm": 2.2277352809906006, "learning_rate": 0.0009507575757575758, "loss": 3.8044, "step": 5800 }, { "epoch": 0.3944150020383204, "grad_norm": 1.9477206468582153, "learning_rate": 0.0009507151107487431, "loss": 3.8127, "step": 5805 }, { "epoch": 0.3947547221089822, "grad_norm": 1.8139147758483887, "learning_rate": 0.0009506726457399103, "loss": 3.6545, "step": 5810 }, { "epoch": 0.39509444217964396, "grad_norm": 2.2764878273010254, "learning_rate": 0.0009506301807310776, "loss": 3.2597, "step": 5815 }, { "epoch": 0.39543416225030575, "grad_norm": 1.9156484603881836, "learning_rate": 0.0009505877157222448, "loss": 3.8275, "step": 5820 }, { "epoch": 0.3957738823209675, "grad_norm": 1.716383695602417, "learning_rate": 0.0009505452507134122, "loss": 3.7467, "step": 5825 }, { "epoch": 0.3961136023916293, "grad_norm": 1.971254587173462, "learning_rate": 0.0009505027857045795, "loss": 3.4541, "step": 5830 }, { "epoch": 0.3964533224622911, "grad_norm": 1.7432881593704224, "learning_rate": 0.0009504603206957467, "loss": 3.7767, "step": 5835 }, { "epoch": 0.39679304253295283, "grad_norm": 1.7543706893920898, "learning_rate": 0.000950417855686914, "loss": 3.5976, "step": 5840 }, { "epoch": 0.39713276260361463, "grad_norm": 2.301726818084717, "learning_rate": 0.0009503753906780813, "loss": 3.4928, "step": 5845 }, { "epoch": 0.3974724826742764, "grad_norm": 1.8604066371917725, "learning_rate": 0.0009503329256692485, "loss": 3.5322, "step": 5850 }, { "epoch": 0.39781220274493817, "grad_norm": 1.8608406782150269, "learning_rate": 0.0009502904606604157, "loss": 3.6646, "step": 5855 }, { "epoch": 0.39815192281559997, "grad_norm": 1.6280021667480469, "learning_rate": 0.0009502479956515832, "loss": 3.6447, "step": 5860 }, { "epoch": 0.3984916428862617, "grad_norm": 2.6521992683410645, "learning_rate": 0.0009502055306427504, "loss": 3.8521, "step": 5865 }, { "epoch": 0.3988313629569235, "grad_norm": 2.1876580715179443, "learning_rate": 0.0009501630656339176, "loss": 3.5993, "step": 5870 }, { "epoch": 0.39917108302758525, "grad_norm": 1.5766788721084595, "learning_rate": 0.000950120600625085, "loss": 3.3159, "step": 5875 }, { "epoch": 0.39951080309824705, "grad_norm": 2.097520589828491, "learning_rate": 0.0009500781356162522, "loss": 3.7524, "step": 5880 }, { "epoch": 0.39985052316890884, "grad_norm": 1.5818655490875244, "learning_rate": 0.0009500356706074194, "loss": 3.8432, "step": 5885 }, { "epoch": 0.4001902432395706, "grad_norm": 1.6252933740615845, "learning_rate": 0.0009499932055985869, "loss": 3.6987, "step": 5890 }, { "epoch": 0.4005299633102324, "grad_norm": 1.7441222667694092, "learning_rate": 0.0009499507405897541, "loss": 3.3572, "step": 5895 }, { "epoch": 0.4008696833808941, "grad_norm": 2.185302495956421, "learning_rate": 0.0009499082755809213, "loss": 3.5751, "step": 5900 }, { "epoch": 0.4012094034515559, "grad_norm": 1.5092425346374512, "learning_rate": 0.0009498658105720887, "loss": 3.6711, "step": 5905 }, { "epoch": 0.40154912352221767, "grad_norm": 2.2559595108032227, "learning_rate": 0.0009498233455632559, "loss": 3.7495, "step": 5910 }, { "epoch": 0.40188884359287946, "grad_norm": 1.7846527099609375, "learning_rate": 0.0009497808805544231, "loss": 3.7049, "step": 5915 }, { "epoch": 0.40222856366354126, "grad_norm": 2.01631236076355, "learning_rate": 0.0009497384155455904, "loss": 3.8056, "step": 5920 }, { "epoch": 0.402568283734203, "grad_norm": 1.8379533290863037, "learning_rate": 0.0009496959505367578, "loss": 3.5009, "step": 5925 }, { "epoch": 0.4029080038048648, "grad_norm": 2.0160536766052246, "learning_rate": 0.000949653485527925, "loss": 3.6733, "step": 5930 }, { "epoch": 0.40324772387552654, "grad_norm": 1.6493864059448242, "learning_rate": 0.0009496110205190923, "loss": 3.7884, "step": 5935 }, { "epoch": 0.40358744394618834, "grad_norm": 1.8304206132888794, "learning_rate": 0.0009495685555102596, "loss": 3.5358, "step": 5940 }, { "epoch": 0.40392716401685014, "grad_norm": 2.1883046627044678, "learning_rate": 0.0009495260905014268, "loss": 3.6176, "step": 5945 }, { "epoch": 0.4042668840875119, "grad_norm": 1.4969171285629272, "learning_rate": 0.0009494836254925941, "loss": 3.8224, "step": 5950 }, { "epoch": 0.4046066041581737, "grad_norm": 1.6345677375793457, "learning_rate": 0.0009494411604837613, "loss": 3.7245, "step": 5955 }, { "epoch": 0.4049463242288354, "grad_norm": 1.7642689943313599, "learning_rate": 0.0009493986954749287, "loss": 3.5968, "step": 5960 }, { "epoch": 0.4052860442994972, "grad_norm": 1.7243539094924927, "learning_rate": 0.000949356230466096, "loss": 3.6513, "step": 5965 }, { "epoch": 0.405625764370159, "grad_norm": 1.8749953508377075, "learning_rate": 0.0009493137654572632, "loss": 3.8508, "step": 5970 }, { "epoch": 0.40596548444082076, "grad_norm": 1.856766700744629, "learning_rate": 0.0009492713004484305, "loss": 3.4531, "step": 5975 }, { "epoch": 0.40630520451148255, "grad_norm": 1.990910530090332, "learning_rate": 0.0009492288354395978, "loss": 3.629, "step": 5980 }, { "epoch": 0.4066449245821443, "grad_norm": 1.540783405303955, "learning_rate": 0.000949186370430765, "loss": 3.4907, "step": 5985 }, { "epoch": 0.4069846446528061, "grad_norm": 1.9407986402511597, "learning_rate": 0.0009491439054219323, "loss": 3.9251, "step": 5990 }, { "epoch": 0.40732436472346784, "grad_norm": 1.6080083847045898, "learning_rate": 0.0009491014404130997, "loss": 3.6844, "step": 5995 }, { "epoch": 0.40766408479412963, "grad_norm": 1.5853875875473022, "learning_rate": 0.0009490589754042669, "loss": 3.4943, "step": 6000 }, { "epoch": 0.40800380486479143, "grad_norm": 1.9172230958938599, "learning_rate": 0.0009490165103954342, "loss": 3.6372, "step": 6005 }, { "epoch": 0.4083435249354532, "grad_norm": 1.9599542617797852, "learning_rate": 0.0009489740453866015, "loss": 3.5761, "step": 6010 }, { "epoch": 0.40868324500611497, "grad_norm": 1.7309514284133911, "learning_rate": 0.0009489315803777687, "loss": 3.5929, "step": 6015 }, { "epoch": 0.4090229650767767, "grad_norm": 1.8542715311050415, "learning_rate": 0.0009488891153689359, "loss": 3.6602, "step": 6020 }, { "epoch": 0.4093626851474385, "grad_norm": 2.022897720336914, "learning_rate": 0.0009488466503601033, "loss": 3.4482, "step": 6025 }, { "epoch": 0.4097024052181003, "grad_norm": 1.9405527114868164, "learning_rate": 0.0009488041853512706, "loss": 3.3899, "step": 6030 }, { "epoch": 0.41004212528876205, "grad_norm": 2.266096830368042, "learning_rate": 0.0009487617203424379, "loss": 3.5738, "step": 6035 }, { "epoch": 0.41038184535942385, "grad_norm": 2.4896769523620605, "learning_rate": 0.0009487192553336052, "loss": 3.7218, "step": 6040 }, { "epoch": 0.4107215654300856, "grad_norm": 1.7489125728607178, "learning_rate": 0.0009486767903247724, "loss": 3.5683, "step": 6045 }, { "epoch": 0.4110612855007474, "grad_norm": 1.9341275691986084, "learning_rate": 0.0009486343253159397, "loss": 3.6596, "step": 6050 }, { "epoch": 0.4114010055714092, "grad_norm": 1.6507455110549927, "learning_rate": 0.000948591860307107, "loss": 3.4796, "step": 6055 }, { "epoch": 0.4117407256420709, "grad_norm": 1.9063788652420044, "learning_rate": 0.0009485493952982742, "loss": 3.415, "step": 6060 }, { "epoch": 0.4120804457127327, "grad_norm": 2.363940954208374, "learning_rate": 0.0009485069302894416, "loss": 3.6731, "step": 6065 }, { "epoch": 0.41242016578339447, "grad_norm": 2.128358840942383, "learning_rate": 0.0009484644652806088, "loss": 3.8432, "step": 6070 }, { "epoch": 0.41275988585405626, "grad_norm": 1.9851205348968506, "learning_rate": 0.0009484220002717761, "loss": 3.2334, "step": 6075 }, { "epoch": 0.413099605924718, "grad_norm": 2.2875330448150635, "learning_rate": 0.0009483795352629434, "loss": 3.8169, "step": 6080 }, { "epoch": 0.4134393259953798, "grad_norm": 2.4142539501190186, "learning_rate": 0.0009483370702541106, "loss": 3.6663, "step": 6085 }, { "epoch": 0.4137790460660416, "grad_norm": 1.622111201286316, "learning_rate": 0.0009482946052452779, "loss": 3.797, "step": 6090 }, { "epoch": 0.41411876613670334, "grad_norm": 1.9928563833236694, "learning_rate": 0.0009482521402364452, "loss": 3.547, "step": 6095 }, { "epoch": 0.41445848620736514, "grad_norm": 1.8701398372650146, "learning_rate": 0.0009482096752276125, "loss": 3.6075, "step": 6100 }, { "epoch": 0.4147982062780269, "grad_norm": 1.7893022298812866, "learning_rate": 0.0009481672102187798, "loss": 3.4597, "step": 6105 }, { "epoch": 0.4151379263486887, "grad_norm": 1.794904351234436, "learning_rate": 0.0009481247452099471, "loss": 3.6112, "step": 6110 }, { "epoch": 0.4154776464193505, "grad_norm": 2.602504253387451, "learning_rate": 0.0009480822802011143, "loss": 3.4985, "step": 6115 }, { "epoch": 0.4158173664900122, "grad_norm": 1.849289894104004, "learning_rate": 0.0009480398151922815, "loss": 3.9109, "step": 6120 }, { "epoch": 0.416157086560674, "grad_norm": 1.5116159915924072, "learning_rate": 0.0009479973501834489, "loss": 3.6651, "step": 6125 }, { "epoch": 0.41649680663133576, "grad_norm": 1.6705137491226196, "learning_rate": 0.0009479548851746161, "loss": 3.3943, "step": 6130 }, { "epoch": 0.41683652670199756, "grad_norm": 2.151573419570923, "learning_rate": 0.0009479124201657834, "loss": 3.5093, "step": 6135 }, { "epoch": 0.41717624677265935, "grad_norm": 1.8810330629348755, "learning_rate": 0.0009478699551569508, "loss": 3.6876, "step": 6140 }, { "epoch": 0.4175159668433211, "grad_norm": 2.2780649662017822, "learning_rate": 0.000947827490148118, "loss": 3.7216, "step": 6145 }, { "epoch": 0.4178556869139829, "grad_norm": 3.126410722732544, "learning_rate": 0.0009477850251392852, "loss": 3.7767, "step": 6150 }, { "epoch": 0.41819540698464464, "grad_norm": 2.0424246788024902, "learning_rate": 0.0009477425601304526, "loss": 3.5596, "step": 6155 }, { "epoch": 0.41853512705530643, "grad_norm": 1.6683694124221802, "learning_rate": 0.0009477000951216198, "loss": 3.6834, "step": 6160 }, { "epoch": 0.4188748471259682, "grad_norm": 1.9148592948913574, "learning_rate": 0.000947657630112787, "loss": 3.9249, "step": 6165 }, { "epoch": 0.41921456719663, "grad_norm": 2.142505407333374, "learning_rate": 0.0009476151651039544, "loss": 3.7331, "step": 6170 }, { "epoch": 0.41955428726729177, "grad_norm": 2.0052895545959473, "learning_rate": 0.0009475727000951217, "loss": 3.6773, "step": 6175 }, { "epoch": 0.4198940073379535, "grad_norm": 1.6954755783081055, "learning_rate": 0.0009475302350862889, "loss": 3.5684, "step": 6180 }, { "epoch": 0.4202337274086153, "grad_norm": 1.5379425287246704, "learning_rate": 0.0009474877700774562, "loss": 3.4853, "step": 6185 }, { "epoch": 0.42057344747927705, "grad_norm": 1.7818613052368164, "learning_rate": 0.0009474453050686235, "loss": 3.4118, "step": 6190 }, { "epoch": 0.42091316754993885, "grad_norm": 1.4931648969650269, "learning_rate": 0.0009474028400597907, "loss": 3.6715, "step": 6195 }, { "epoch": 0.42125288762060065, "grad_norm": 1.5759822130203247, "learning_rate": 0.000947360375050958, "loss": 3.583, "step": 6200 }, { "epoch": 0.4215926076912624, "grad_norm": 2.163240432739258, "learning_rate": 0.0009473179100421254, "loss": 3.5707, "step": 6205 }, { "epoch": 0.4219323277619242, "grad_norm": 1.7036728858947754, "learning_rate": 0.0009472754450332926, "loss": 3.526, "step": 6210 }, { "epoch": 0.42227204783258593, "grad_norm": 2.193265676498413, "learning_rate": 0.0009472329800244599, "loss": 3.5124, "step": 6215 }, { "epoch": 0.4226117679032477, "grad_norm": 1.9179424047470093, "learning_rate": 0.0009471905150156271, "loss": 3.5148, "step": 6220 }, { "epoch": 0.4229514879739095, "grad_norm": 1.7351652383804321, "learning_rate": 0.0009471480500067944, "loss": 3.5392, "step": 6225 }, { "epoch": 0.42329120804457127, "grad_norm": 1.5640573501586914, "learning_rate": 0.0009471055849979617, "loss": 3.6778, "step": 6230 }, { "epoch": 0.42363092811523306, "grad_norm": 1.5815374851226807, "learning_rate": 0.0009470631199891289, "loss": 3.7072, "step": 6235 }, { "epoch": 0.4239706481858948, "grad_norm": 1.7713813781738281, "learning_rate": 0.0009470206549802963, "loss": 3.4675, "step": 6240 }, { "epoch": 0.4243103682565566, "grad_norm": 2.370572328567505, "learning_rate": 0.0009469781899714636, "loss": 3.6444, "step": 6245 }, { "epoch": 0.42465008832721834, "grad_norm": 1.7247244119644165, "learning_rate": 0.0009469357249626308, "loss": 3.5866, "step": 6250 }, { "epoch": 0.42498980839788014, "grad_norm": 1.8153215646743774, "learning_rate": 0.000946893259953798, "loss": 3.6474, "step": 6255 }, { "epoch": 0.42532952846854194, "grad_norm": 2.071969509124756, "learning_rate": 0.0009468507949449654, "loss": 3.3513, "step": 6260 }, { "epoch": 0.4256692485392037, "grad_norm": 1.4113887548446655, "learning_rate": 0.0009468083299361326, "loss": 3.4348, "step": 6265 }, { "epoch": 0.4260089686098655, "grad_norm": 2.109605312347412, "learning_rate": 0.0009467658649272998, "loss": 3.7878, "step": 6270 }, { "epoch": 0.4263486886805272, "grad_norm": 1.818732738494873, "learning_rate": 0.0009467233999184673, "loss": 3.5242, "step": 6275 }, { "epoch": 0.426688408751189, "grad_norm": 2.449779510498047, "learning_rate": 0.0009466809349096345, "loss": 3.5743, "step": 6280 }, { "epoch": 0.4270281288218508, "grad_norm": 2.008734703063965, "learning_rate": 0.0009466384699008017, "loss": 3.7736, "step": 6285 }, { "epoch": 0.42736784889251256, "grad_norm": 2.1329712867736816, "learning_rate": 0.0009465960048919691, "loss": 3.5922, "step": 6290 }, { "epoch": 0.42770756896317436, "grad_norm": 1.4828671216964722, "learning_rate": 0.0009465535398831363, "loss": 3.785, "step": 6295 }, { "epoch": 0.4280472890338361, "grad_norm": 1.683019995689392, "learning_rate": 0.0009465110748743035, "loss": 3.5818, "step": 6300 }, { "epoch": 0.4283870091044979, "grad_norm": 1.6356521844863892, "learning_rate": 0.0009464686098654708, "loss": 3.3332, "step": 6305 }, { "epoch": 0.4287267291751597, "grad_norm": 1.720829963684082, "learning_rate": 0.0009464261448566382, "loss": 3.8616, "step": 6310 }, { "epoch": 0.42906644924582144, "grad_norm": 2.0990145206451416, "learning_rate": 0.0009463836798478054, "loss": 3.6464, "step": 6315 }, { "epoch": 0.42940616931648323, "grad_norm": 1.935070276260376, "learning_rate": 0.0009463412148389727, "loss": 3.6464, "step": 6320 }, { "epoch": 0.429745889387145, "grad_norm": 2.1485373973846436, "learning_rate": 0.00094629874983014, "loss": 3.4328, "step": 6325 }, { "epoch": 0.4300856094578068, "grad_norm": 2.6021077632904053, "learning_rate": 0.0009462562848213072, "loss": 3.5701, "step": 6330 }, { "epoch": 0.4304253295284685, "grad_norm": 1.813655138015747, "learning_rate": 0.0009462138198124745, "loss": 3.627, "step": 6335 }, { "epoch": 0.4307650495991303, "grad_norm": 1.8974841833114624, "learning_rate": 0.0009461713548036418, "loss": 3.5441, "step": 6340 }, { "epoch": 0.4311047696697921, "grad_norm": 1.8645802736282349, "learning_rate": 0.0009461288897948091, "loss": 3.537, "step": 6345 }, { "epoch": 0.43144448974045385, "grad_norm": 1.9019432067871094, "learning_rate": 0.0009460864247859764, "loss": 3.5254, "step": 6350 }, { "epoch": 0.43178420981111565, "grad_norm": 2.1684646606445312, "learning_rate": 0.0009460439597771436, "loss": 3.6108, "step": 6355 }, { "epoch": 0.4321239298817774, "grad_norm": 1.7560538053512573, "learning_rate": 0.0009460014947683109, "loss": 3.4183, "step": 6360 }, { "epoch": 0.4324636499524392, "grad_norm": 1.5799816846847534, "learning_rate": 0.0009459590297594782, "loss": 3.6012, "step": 6365 }, { "epoch": 0.432803370023101, "grad_norm": 2.4683728218078613, "learning_rate": 0.0009459165647506454, "loss": 3.4287, "step": 6370 }, { "epoch": 0.43314309009376273, "grad_norm": 2.042112112045288, "learning_rate": 0.0009458740997418128, "loss": 3.5049, "step": 6375 }, { "epoch": 0.4334828101644245, "grad_norm": 1.8518515825271606, "learning_rate": 0.0009458316347329801, "loss": 3.6815, "step": 6380 }, { "epoch": 0.43382253023508627, "grad_norm": 2.490804433822632, "learning_rate": 0.0009457891697241473, "loss": 3.7543, "step": 6385 }, { "epoch": 0.43416225030574807, "grad_norm": 2.1295320987701416, "learning_rate": 0.0009457467047153147, "loss": 3.6625, "step": 6390 }, { "epoch": 0.43450197037640986, "grad_norm": 1.747617244720459, "learning_rate": 0.0009457042397064819, "loss": 3.789, "step": 6395 }, { "epoch": 0.4348416904470716, "grad_norm": 1.9635257720947266, "learning_rate": 0.0009456617746976491, "loss": 3.5704, "step": 6400 }, { "epoch": 0.4351814105177334, "grad_norm": 1.5487641096115112, "learning_rate": 0.0009456193096888164, "loss": 3.6295, "step": 6405 }, { "epoch": 0.43552113058839514, "grad_norm": 1.8329668045043945, "learning_rate": 0.0009455768446799837, "loss": 3.4772, "step": 6410 }, { "epoch": 0.43586085065905694, "grad_norm": 1.813430666923523, "learning_rate": 0.000945534379671151, "loss": 3.4299, "step": 6415 }, { "epoch": 0.4362005707297187, "grad_norm": 1.5619617700576782, "learning_rate": 0.0009454919146623183, "loss": 3.4951, "step": 6420 }, { "epoch": 0.4365402908003805, "grad_norm": 1.839874029159546, "learning_rate": 0.0009454494496534856, "loss": 3.4463, "step": 6425 }, { "epoch": 0.4368800108710423, "grad_norm": 1.6888867616653442, "learning_rate": 0.0009454069846446528, "loss": 3.8616, "step": 6430 }, { "epoch": 0.437219730941704, "grad_norm": 1.6578636169433594, "learning_rate": 0.0009453645196358201, "loss": 3.6288, "step": 6435 }, { "epoch": 0.4375594510123658, "grad_norm": 2.020099639892578, "learning_rate": 0.0009453220546269874, "loss": 3.5142, "step": 6440 }, { "epoch": 0.43789917108302756, "grad_norm": 2.319422483444214, "learning_rate": 0.0009452795896181546, "loss": 3.6435, "step": 6445 }, { "epoch": 0.43823889115368936, "grad_norm": 2.0726208686828613, "learning_rate": 0.000945237124609322, "loss": 3.7119, "step": 6450 }, { "epoch": 0.43857861122435116, "grad_norm": 1.9337352514266968, "learning_rate": 0.0009451946596004892, "loss": 3.7477, "step": 6455 }, { "epoch": 0.4389183312950129, "grad_norm": 1.8046481609344482, "learning_rate": 0.0009451521945916565, "loss": 3.5423, "step": 6460 }, { "epoch": 0.4392580513656747, "grad_norm": 2.0393052101135254, "learning_rate": 0.0009451097295828238, "loss": 3.6539, "step": 6465 }, { "epoch": 0.43959777143633644, "grad_norm": 1.8797863721847534, "learning_rate": 0.000945067264573991, "loss": 3.4009, "step": 6470 }, { "epoch": 0.43993749150699824, "grad_norm": 1.8483107089996338, "learning_rate": 0.0009450247995651583, "loss": 3.7944, "step": 6475 }, { "epoch": 0.44027721157766003, "grad_norm": 1.5115200281143188, "learning_rate": 0.0009449823345563257, "loss": 3.8039, "step": 6480 }, { "epoch": 0.4406169316483218, "grad_norm": 1.743880033493042, "learning_rate": 0.0009449398695474929, "loss": 3.5294, "step": 6485 }, { "epoch": 0.44095665171898357, "grad_norm": 1.7968199253082275, "learning_rate": 0.0009448974045386602, "loss": 3.7971, "step": 6490 }, { "epoch": 0.4412963717896453, "grad_norm": 2.0849368572235107, "learning_rate": 0.0009448549395298275, "loss": 3.6379, "step": 6495 }, { "epoch": 0.4416360918603071, "grad_norm": 2.280219078063965, "learning_rate": 0.0009448124745209947, "loss": 3.886, "step": 6500 }, { "epoch": 0.4419758119309689, "grad_norm": 1.9523309469223022, "learning_rate": 0.0009447700095121619, "loss": 3.5279, "step": 6505 }, { "epoch": 0.44231553200163065, "grad_norm": 1.7790738344192505, "learning_rate": 0.0009447275445033293, "loss": 3.7347, "step": 6510 }, { "epoch": 0.44265525207229245, "grad_norm": 1.8586137294769287, "learning_rate": 0.0009446935724962631, "loss": 3.5607, "step": 6515 }, { "epoch": 0.4429949721429542, "grad_norm": 1.8920232057571411, "learning_rate": 0.0009446511074874303, "loss": 3.4061, "step": 6520 }, { "epoch": 0.443334692213616, "grad_norm": 1.886669635772705, "learning_rate": 0.0009446086424785977, "loss": 3.6912, "step": 6525 }, { "epoch": 0.44367441228427773, "grad_norm": 1.7358598709106445, "learning_rate": 0.0009445661774697649, "loss": 3.6055, "step": 6530 }, { "epoch": 0.44401413235493953, "grad_norm": 2.153101682662964, "learning_rate": 0.0009445237124609321, "loss": 3.5438, "step": 6535 }, { "epoch": 0.4443538524256013, "grad_norm": 1.9542155265808105, "learning_rate": 0.0009444812474520996, "loss": 3.5503, "step": 6540 }, { "epoch": 0.44469357249626307, "grad_norm": 1.5750234127044678, "learning_rate": 0.0009444387824432668, "loss": 3.6079, "step": 6545 }, { "epoch": 0.44503329256692487, "grad_norm": 1.8651561737060547, "learning_rate": 0.000944396317434434, "loss": 3.34, "step": 6550 }, { "epoch": 0.4453730126375866, "grad_norm": 2.430546760559082, "learning_rate": 0.0009443538524256013, "loss": 3.7372, "step": 6555 }, { "epoch": 0.4457127327082484, "grad_norm": 1.7223806381225586, "learning_rate": 0.0009443113874167686, "loss": 3.9147, "step": 6560 }, { "epoch": 0.4460524527789102, "grad_norm": 1.9159107208251953, "learning_rate": 0.0009442689224079358, "loss": 3.2606, "step": 6565 }, { "epoch": 0.44639217284957194, "grad_norm": 1.8626925945281982, "learning_rate": 0.0009442264573991031, "loss": 3.4323, "step": 6570 }, { "epoch": 0.44673189292023374, "grad_norm": NaN, "learning_rate": 0.000944192485392037, "loss": 3.7679, "step": 6575 }, { "epoch": 0.4470716129908955, "grad_norm": 1.9819060564041138, "learning_rate": 0.0009441500203832043, "loss": 3.5275, "step": 6580 }, { "epoch": 0.4474113330615573, "grad_norm": 1.7562685012817383, "learning_rate": 0.0009441075553743715, "loss": 3.5585, "step": 6585 }, { "epoch": 0.4477510531322191, "grad_norm": 1.7118613719940186, "learning_rate": 0.0009440650903655387, "loss": 3.5501, "step": 6590 }, { "epoch": 0.4480907732028808, "grad_norm": 1.5229084491729736, "learning_rate": 0.0009440226253567062, "loss": 3.5833, "step": 6595 }, { "epoch": 0.4484304932735426, "grad_norm": 1.6787325143814087, "learning_rate": 0.0009439801603478734, "loss": 3.7707, "step": 6600 }, { "epoch": 0.44877021334420436, "grad_norm": 1.5688403844833374, "learning_rate": 0.0009439376953390406, "loss": 3.495, "step": 6605 }, { "epoch": 0.44910993341486616, "grad_norm": 1.8620282411575317, "learning_rate": 0.000943895230330208, "loss": 3.4642, "step": 6610 }, { "epoch": 0.4494496534855279, "grad_norm": 1.6487711668014526, "learning_rate": 0.0009438527653213752, "loss": 3.8481, "step": 6615 }, { "epoch": 0.4497893735561897, "grad_norm": 1.5833429098129272, "learning_rate": 0.0009438103003125424, "loss": 3.5789, "step": 6620 }, { "epoch": 0.4501290936268515, "grad_norm": 2.6780335903167725, "learning_rate": 0.0009437678353037098, "loss": 3.7064, "step": 6625 }, { "epoch": 0.45046881369751324, "grad_norm": 1.793239951133728, "learning_rate": 0.0009437253702948771, "loss": 3.901, "step": 6630 }, { "epoch": 0.45080853376817503, "grad_norm": 2.2765746116638184, "learning_rate": 0.0009436829052860443, "loss": 3.6479, "step": 6635 }, { "epoch": 0.4511482538388368, "grad_norm": 1.8410252332687378, "learning_rate": 0.0009436404402772116, "loss": 3.6681, "step": 6640 }, { "epoch": 0.4514879739094986, "grad_norm": 1.9070992469787598, "learning_rate": 0.0009435979752683789, "loss": 3.3889, "step": 6645 }, { "epoch": 0.45182769398016037, "grad_norm": 2.0405123233795166, "learning_rate": 0.0009435555102595461, "loss": 3.7751, "step": 6650 }, { "epoch": 0.4521674140508221, "grad_norm": 2.351215362548828, "learning_rate": 0.0009435130452507134, "loss": 3.6818, "step": 6655 }, { "epoch": 0.4525071341214839, "grad_norm": 1.8253047466278076, "learning_rate": 0.0009434705802418807, "loss": 3.5463, "step": 6660 }, { "epoch": 0.45284685419214565, "grad_norm": 1.5990278720855713, "learning_rate": 0.000943428115233048, "loss": 3.4194, "step": 6665 }, { "epoch": 0.45318657426280745, "grad_norm": 2.0799410343170166, "learning_rate": 0.0009433856502242153, "loss": 3.7756, "step": 6670 }, { "epoch": 0.45352629433346925, "grad_norm": 1.767306923866272, "learning_rate": 0.0009433431852153826, "loss": 3.8839, "step": 6675 }, { "epoch": 0.453866014404131, "grad_norm": 2.3738794326782227, "learning_rate": 0.0009433007202065498, "loss": 3.6416, "step": 6680 }, { "epoch": 0.4542057344747928, "grad_norm": 2.7199673652648926, "learning_rate": 0.0009432582551977171, "loss": 3.4957, "step": 6685 }, { "epoch": 0.45454545454545453, "grad_norm": 1.6500394344329834, "learning_rate": 0.0009432157901888843, "loss": 3.4895, "step": 6690 }, { "epoch": 0.45488517461611633, "grad_norm": 2.3751347064971924, "learning_rate": 0.0009431733251800516, "loss": 3.6687, "step": 6695 }, { "epoch": 0.45522489468677807, "grad_norm": 2.356215476989746, "learning_rate": 0.000943130860171219, "loss": 3.3732, "step": 6700 }, { "epoch": 0.45556461475743987, "grad_norm": 2.329878807067871, "learning_rate": 0.0009430883951623862, "loss": 3.5901, "step": 6705 }, { "epoch": 0.45590433482810166, "grad_norm": 2.237454414367676, "learning_rate": 0.0009430459301535535, "loss": 3.6951, "step": 6710 }, { "epoch": 0.4562440548987634, "grad_norm": 1.8095207214355469, "learning_rate": 0.0009430034651447208, "loss": 3.717, "step": 6715 }, { "epoch": 0.4565837749694252, "grad_norm": 1.6566030979156494, "learning_rate": 0.000942961000135888, "loss": 3.5561, "step": 6720 }, { "epoch": 0.45692349504008695, "grad_norm": 2.27333664894104, "learning_rate": 0.0009429185351270552, "loss": 3.5416, "step": 6725 }, { "epoch": 0.45726321511074874, "grad_norm": 1.6221749782562256, "learning_rate": 0.0009428760701182226, "loss": 3.6417, "step": 6730 }, { "epoch": 0.45760293518141054, "grad_norm": 2.4360435009002686, "learning_rate": 0.0009428336051093899, "loss": 3.6138, "step": 6735 }, { "epoch": 0.4579426552520723, "grad_norm": 1.878818154335022, "learning_rate": 0.0009427911401005571, "loss": 3.6221, "step": 6740 }, { "epoch": 0.4582823753227341, "grad_norm": 2.304360866546631, "learning_rate": 0.0009427486750917245, "loss": 3.6168, "step": 6745 }, { "epoch": 0.4586220953933958, "grad_norm": 1.603292465209961, "learning_rate": 0.0009427062100828917, "loss": 3.6926, "step": 6750 }, { "epoch": 0.4589618154640576, "grad_norm": 1.7479709386825562, "learning_rate": 0.0009426637450740589, "loss": 3.7692, "step": 6755 }, { "epoch": 0.4593015355347194, "grad_norm": 1.6980823278427124, "learning_rate": 0.0009426212800652263, "loss": 3.3305, "step": 6760 }, { "epoch": 0.45964125560538116, "grad_norm": 2.4687716960906982, "learning_rate": 0.0009425788150563935, "loss": 3.6905, "step": 6765 }, { "epoch": 0.45998097567604296, "grad_norm": 1.8847265243530273, "learning_rate": 0.0009425363500475608, "loss": 3.6793, "step": 6770 }, { "epoch": 0.4603206957467047, "grad_norm": 1.8201357126235962, "learning_rate": 0.0009424938850387282, "loss": 3.6231, "step": 6775 }, { "epoch": 0.4606604158173665, "grad_norm": 1.887251615524292, "learning_rate": 0.0009424514200298954, "loss": 3.7117, "step": 6780 }, { "epoch": 0.46100013588802824, "grad_norm": 1.7232720851898193, "learning_rate": 0.0009424089550210627, "loss": 3.6807, "step": 6785 }, { "epoch": 0.46133985595869004, "grad_norm": 1.901504635810852, "learning_rate": 0.0009423664900122299, "loss": 3.5181, "step": 6790 }, { "epoch": 0.46167957602935183, "grad_norm": 2.187636613845825, "learning_rate": 0.0009423240250033972, "loss": 3.5445, "step": 6795 }, { "epoch": 0.4620192961000136, "grad_norm": 1.8484975099563599, "learning_rate": 0.0009422815599945645, "loss": 3.6871, "step": 6800 }, { "epoch": 0.4623590161706754, "grad_norm": 2.264803647994995, "learning_rate": 0.0009422390949857318, "loss": 3.6188, "step": 6805 }, { "epoch": 0.4626987362413371, "grad_norm": 2.4335315227508545, "learning_rate": 0.0009421966299768991, "loss": 3.5646, "step": 6810 }, { "epoch": 0.4630384563119989, "grad_norm": 1.9481793642044067, "learning_rate": 0.0009421541649680664, "loss": 3.4766, "step": 6815 }, { "epoch": 0.4633781763826607, "grad_norm": 1.993538498878479, "learning_rate": 0.0009421116999592336, "loss": 3.92, "step": 6820 }, { "epoch": 0.46371789645332245, "grad_norm": 1.8995599746704102, "learning_rate": 0.0009420692349504008, "loss": 3.6983, "step": 6825 }, { "epoch": 0.46405761652398425, "grad_norm": 1.9267207384109497, "learning_rate": 0.0009420267699415682, "loss": 3.5308, "step": 6830 }, { "epoch": 0.464397336594646, "grad_norm": 2.4141926765441895, "learning_rate": 0.0009419843049327354, "loss": 3.486, "step": 6835 }, { "epoch": 0.4647370566653078, "grad_norm": 1.585700511932373, "learning_rate": 0.0009419418399239027, "loss": 3.7726, "step": 6840 }, { "epoch": 0.4650767767359696, "grad_norm": 1.5398318767547607, "learning_rate": 0.0009418993749150701, "loss": 3.5095, "step": 6845 }, { "epoch": 0.46541649680663133, "grad_norm": 1.805751085281372, "learning_rate": 0.0009418569099062373, "loss": 3.6611, "step": 6850 }, { "epoch": 0.4657562168772931, "grad_norm": 2.307342767715454, "learning_rate": 0.0009418144448974045, "loss": 3.5766, "step": 6855 }, { "epoch": 0.46609593694795487, "grad_norm": 1.8184938430786133, "learning_rate": 0.0009417719798885719, "loss": 3.6822, "step": 6860 }, { "epoch": 0.46643565701861667, "grad_norm": 1.62266206741333, "learning_rate": 0.0009417295148797391, "loss": 3.4885, "step": 6865 }, { "epoch": 0.4667753770892784, "grad_norm": 2.029280662536621, "learning_rate": 0.0009416870498709063, "loss": 3.6047, "step": 6870 }, { "epoch": 0.4671150971599402, "grad_norm": 2.343122720718384, "learning_rate": 0.0009416445848620738, "loss": 3.4173, "step": 6875 }, { "epoch": 0.467454817230602, "grad_norm": 1.8083162307739258, "learning_rate": 0.000941602119853241, "loss": 3.4639, "step": 6880 }, { "epoch": 0.46779453730126375, "grad_norm": 1.953895092010498, "learning_rate": 0.0009415596548444082, "loss": 3.6549, "step": 6885 }, { "epoch": 0.46813425737192554, "grad_norm": 1.5946061611175537, "learning_rate": 0.0009415171898355755, "loss": 3.7179, "step": 6890 }, { "epoch": 0.4684739774425873, "grad_norm": 2.2081854343414307, "learning_rate": 0.0009414747248267428, "loss": 3.7377, "step": 6895 }, { "epoch": 0.4688136975132491, "grad_norm": 1.7893260717391968, "learning_rate": 0.00094143225981791, "loss": 3.7812, "step": 6900 }, { "epoch": 0.4691534175839109, "grad_norm": 2.0790178775787354, "learning_rate": 0.0009413897948090774, "loss": 3.7869, "step": 6905 }, { "epoch": 0.4694931376545726, "grad_norm": 2.502347946166992, "learning_rate": 0.0009413473298002447, "loss": 3.7392, "step": 6910 }, { "epoch": 0.4698328577252344, "grad_norm": 1.7381706237792969, "learning_rate": 0.0009413048647914119, "loss": 3.7481, "step": 6915 }, { "epoch": 0.47017257779589616, "grad_norm": 2.445383071899414, "learning_rate": 0.0009412623997825792, "loss": 3.4584, "step": 6920 }, { "epoch": 0.47051229786655796, "grad_norm": 1.6647133827209473, "learning_rate": 0.0009412199347737464, "loss": 3.541, "step": 6925 }, { "epoch": 0.47085201793721976, "grad_norm": 2.7865805625915527, "learning_rate": 0.0009411774697649137, "loss": 3.6944, "step": 6930 }, { "epoch": 0.4711917380078815, "grad_norm": 1.9609665870666504, "learning_rate": 0.000941135004756081, "loss": 3.6083, "step": 6935 }, { "epoch": 0.4715314580785433, "grad_norm": 1.7283531427383423, "learning_rate": 0.0009410925397472483, "loss": 3.5523, "step": 6940 }, { "epoch": 0.47187117814920504, "grad_norm": 1.8870112895965576, "learning_rate": 0.0009410500747384156, "loss": 3.7469, "step": 6945 }, { "epoch": 0.47221089821986684, "grad_norm": 1.9010035991668701, "learning_rate": 0.0009410076097295829, "loss": 3.5122, "step": 6950 }, { "epoch": 0.4725506182905286, "grad_norm": 2.1114003658294678, "learning_rate": 0.0009409651447207501, "loss": 3.8454, "step": 6955 }, { "epoch": 0.4728903383611904, "grad_norm": 2.240225076675415, "learning_rate": 0.0009409226797119174, "loss": 3.763, "step": 6960 }, { "epoch": 0.4732300584318522, "grad_norm": 2.497311592102051, "learning_rate": 0.0009408802147030847, "loss": 3.7176, "step": 6965 }, { "epoch": 0.4735697785025139, "grad_norm": 1.47504460811615, "learning_rate": 0.0009408377496942519, "loss": 3.3692, "step": 6970 }, { "epoch": 0.4739094985731757, "grad_norm": 2.062899589538574, "learning_rate": 0.0009407952846854192, "loss": 3.6741, "step": 6975 }, { "epoch": 0.47424921864383746, "grad_norm": 1.3643637895584106, "learning_rate": 0.0009407528196765866, "loss": 3.7626, "step": 6980 }, { "epoch": 0.47458893871449925, "grad_norm": 1.8050810098648071, "learning_rate": 0.0009407103546677538, "loss": 3.5833, "step": 6985 }, { "epoch": 0.47492865878516105, "grad_norm": 1.7133585214614868, "learning_rate": 0.000940667889658921, "loss": 3.5448, "step": 6990 }, { "epoch": 0.4752683788558228, "grad_norm": 1.935316801071167, "learning_rate": 0.0009406254246500884, "loss": 3.3397, "step": 6995 }, { "epoch": 0.4756080989264846, "grad_norm": 1.9858559370040894, "learning_rate": 0.0009405829596412556, "loss": 3.2813, "step": 7000 }, { "epoch": 0.47594781899714633, "grad_norm": 2.4942679405212402, "learning_rate": 0.0009405404946324228, "loss": 3.9989, "step": 7005 }, { "epoch": 0.47628753906780813, "grad_norm": 2.234844207763672, "learning_rate": 0.0009404980296235903, "loss": 3.741, "step": 7010 }, { "epoch": 0.4766272591384699, "grad_norm": 2.204925537109375, "learning_rate": 0.0009404555646147575, "loss": 3.5734, "step": 7015 }, { "epoch": 0.47696697920913167, "grad_norm": 1.6656725406646729, "learning_rate": 0.0009404130996059247, "loss": 3.3455, "step": 7020 }, { "epoch": 0.47730669927979347, "grad_norm": 1.8467884063720703, "learning_rate": 0.000940370634597092, "loss": 3.7629, "step": 7025 }, { "epoch": 0.4776464193504552, "grad_norm": 1.9763884544372559, "learning_rate": 0.0009403281695882593, "loss": 3.7089, "step": 7030 }, { "epoch": 0.477986139421117, "grad_norm": 1.8228209018707275, "learning_rate": 0.0009402857045794265, "loss": 3.4874, "step": 7035 }, { "epoch": 0.47832585949177875, "grad_norm": 1.6418864727020264, "learning_rate": 0.0009402432395705938, "loss": 3.6603, "step": 7040 }, { "epoch": 0.47866557956244055, "grad_norm": 2.0733232498168945, "learning_rate": 0.0009402007745617612, "loss": 3.637, "step": 7045 }, { "epoch": 0.47900529963310234, "grad_norm": 1.7754911184310913, "learning_rate": 0.0009401583095529284, "loss": 3.5446, "step": 7050 }, { "epoch": 0.4793450197037641, "grad_norm": 2.3997693061828613, "learning_rate": 0.0009401158445440957, "loss": 3.7859, "step": 7055 }, { "epoch": 0.4796847397744259, "grad_norm": 1.710098147392273, "learning_rate": 0.000940073379535263, "loss": 3.7096, "step": 7060 }, { "epoch": 0.4800244598450876, "grad_norm": 1.807304859161377, "learning_rate": 0.0009400309145264302, "loss": 3.5456, "step": 7065 }, { "epoch": 0.4803641799157494, "grad_norm": 2.53741192817688, "learning_rate": 0.0009399884495175975, "loss": 3.4917, "step": 7070 }, { "epoch": 0.4807038999864112, "grad_norm": 1.4452553987503052, "learning_rate": 0.0009399459845087647, "loss": 3.7125, "step": 7075 }, { "epoch": 0.48104362005707296, "grad_norm": 1.5718740224838257, "learning_rate": 0.0009399035194999321, "loss": 3.7187, "step": 7080 }, { "epoch": 0.48138334012773476, "grad_norm": 1.9986610412597656, "learning_rate": 0.0009398610544910994, "loss": 3.7295, "step": 7085 }, { "epoch": 0.4817230601983965, "grad_norm": 2.1783485412597656, "learning_rate": 0.0009398185894822666, "loss": 3.4757, "step": 7090 }, { "epoch": 0.4820627802690583, "grad_norm": 1.829073429107666, "learning_rate": 0.0009397761244734339, "loss": 3.7674, "step": 7095 }, { "epoch": 0.4824025003397201, "grad_norm": 1.4534355401992798, "learning_rate": 0.0009397336594646012, "loss": 3.4953, "step": 7100 }, { "epoch": 0.48274222041038184, "grad_norm": 2.4537928104400635, "learning_rate": 0.0009396911944557684, "loss": 3.6028, "step": 7105 }, { "epoch": 0.48308194048104364, "grad_norm": 2.1731393337249756, "learning_rate": 0.0009396487294469356, "loss": 3.8447, "step": 7110 }, { "epoch": 0.4834216605517054, "grad_norm": 1.8140465021133423, "learning_rate": 0.0009396062644381031, "loss": 3.5828, "step": 7115 }, { "epoch": 0.4837613806223672, "grad_norm": 1.8374356031417847, "learning_rate": 0.0009395637994292703, "loss": 3.6777, "step": 7120 }, { "epoch": 0.4841011006930289, "grad_norm": 2.158670425415039, "learning_rate": 0.0009395213344204377, "loss": 3.6664, "step": 7125 }, { "epoch": 0.4844408207636907, "grad_norm": 1.978879690170288, "learning_rate": 0.0009394788694116049, "loss": 3.5397, "step": 7130 }, { "epoch": 0.4847805408343525, "grad_norm": 2.3793585300445557, "learning_rate": 0.0009394364044027721, "loss": 3.514, "step": 7135 }, { "epoch": 0.48512026090501426, "grad_norm": 2.031531810760498, "learning_rate": 0.0009393939393939394, "loss": 3.8373, "step": 7140 }, { "epoch": 0.48545998097567605, "grad_norm": 2.142778158187866, "learning_rate": 0.0009393514743851067, "loss": 3.8132, "step": 7145 }, { "epoch": 0.4857997010463378, "grad_norm": 1.775416374206543, "learning_rate": 0.000939309009376274, "loss": 3.6695, "step": 7150 }, { "epoch": 0.4861394211169996, "grad_norm": 1.9338051080703735, "learning_rate": 0.0009392665443674413, "loss": 3.6841, "step": 7155 }, { "epoch": 0.4864791411876614, "grad_norm": 1.9334986209869385, "learning_rate": 0.0009392240793586086, "loss": 3.6855, "step": 7160 }, { "epoch": 0.48681886125832313, "grad_norm": 2.140901803970337, "learning_rate": 0.0009391816143497758, "loss": 3.6237, "step": 7165 }, { "epoch": 0.48715858132898493, "grad_norm": 2.0255303382873535, "learning_rate": 0.0009391391493409431, "loss": 3.3512, "step": 7170 }, { "epoch": 0.48749830139964667, "grad_norm": 2.232780694961548, "learning_rate": 0.0009390966843321103, "loss": 3.73, "step": 7175 }, { "epoch": 0.48783802147030847, "grad_norm": 1.9734517335891724, "learning_rate": 0.0009390542193232776, "loss": 3.7251, "step": 7180 }, { "epoch": 0.48817774154097027, "grad_norm": 1.9952970743179321, "learning_rate": 0.000939011754314445, "loss": 3.603, "step": 7185 }, { "epoch": 0.488517461611632, "grad_norm": 1.5333443880081177, "learning_rate": 0.0009389692893056122, "loss": 3.6448, "step": 7190 }, { "epoch": 0.4888571816822938, "grad_norm": 1.9720709323883057, "learning_rate": 0.0009389268242967795, "loss": 3.7062, "step": 7195 }, { "epoch": 0.48919690175295555, "grad_norm": 1.952510952949524, "learning_rate": 0.0009388843592879468, "loss": 3.5568, "step": 7200 }, { "epoch": 0.48953662182361735, "grad_norm": 1.934333086013794, "learning_rate": 0.000938841894279114, "loss": 3.5781, "step": 7205 }, { "epoch": 0.4898763418942791, "grad_norm": 1.8242450952529907, "learning_rate": 0.0009387994292702812, "loss": 3.7442, "step": 7210 }, { "epoch": 0.4902160619649409, "grad_norm": 1.6753813028335571, "learning_rate": 0.0009387569642614486, "loss": 3.7346, "step": 7215 }, { "epoch": 0.4905557820356027, "grad_norm": 2.114917278289795, "learning_rate": 0.0009387144992526159, "loss": 3.6068, "step": 7220 }, { "epoch": 0.4908955021062644, "grad_norm": 2.2246181964874268, "learning_rate": 0.0009386720342437831, "loss": 3.7546, "step": 7225 }, { "epoch": 0.4912352221769262, "grad_norm": 2.6151223182678223, "learning_rate": 0.0009386295692349505, "loss": 3.426, "step": 7230 }, { "epoch": 0.49157494224758796, "grad_norm": 1.7755546569824219, "learning_rate": 0.0009385871042261177, "loss": 3.5811, "step": 7235 }, { "epoch": 0.49191466231824976, "grad_norm": 2.121277332305908, "learning_rate": 0.0009385446392172849, "loss": 3.6697, "step": 7240 }, { "epoch": 0.49225438238891156, "grad_norm": 1.8032259941101074, "learning_rate": 0.0009385021742084523, "loss": 3.7089, "step": 7245 }, { "epoch": 0.4925941024595733, "grad_norm": 1.9233818054199219, "learning_rate": 0.0009384597091996195, "loss": 3.72, "step": 7250 }, { "epoch": 0.4929338225302351, "grad_norm": 1.6029837131500244, "learning_rate": 0.0009384172441907868, "loss": 3.2808, "step": 7255 }, { "epoch": 0.49327354260089684, "grad_norm": 2.025752305984497, "learning_rate": 0.0009383747791819542, "loss": 3.6531, "step": 7260 }, { "epoch": 0.49361326267155864, "grad_norm": 1.7688162326812744, "learning_rate": 0.0009383323141731214, "loss": 3.6632, "step": 7265 }, { "epoch": 0.49395298274222044, "grad_norm": 1.883939266204834, "learning_rate": 0.0009382898491642886, "loss": 3.6615, "step": 7270 }, { "epoch": 0.4942927028128822, "grad_norm": 1.916253685951233, "learning_rate": 0.0009382473841554559, "loss": 3.4793, "step": 7275 }, { "epoch": 0.494632422883544, "grad_norm": 2.163346767425537, "learning_rate": 0.0009382049191466232, "loss": 3.6865, "step": 7280 }, { "epoch": 0.4949721429542057, "grad_norm": 1.833184838294983, "learning_rate": 0.0009381624541377904, "loss": 3.9028, "step": 7285 }, { "epoch": 0.4953118630248675, "grad_norm": 1.8925822973251343, "learning_rate": 0.0009381199891289578, "loss": 3.4908, "step": 7290 }, { "epoch": 0.49565158309552926, "grad_norm": 2.1171228885650635, "learning_rate": 0.0009380775241201251, "loss": 3.6305, "step": 7295 }, { "epoch": 0.49599130316619106, "grad_norm": 1.7656829357147217, "learning_rate": 0.0009380350591112923, "loss": 3.747, "step": 7300 }, { "epoch": 0.49633102323685285, "grad_norm": 1.9057354927062988, "learning_rate": 0.0009379925941024596, "loss": 3.534, "step": 7305 }, { "epoch": 0.4966707433075146, "grad_norm": 1.7219806909561157, "learning_rate": 0.0009379501290936269, "loss": 3.5819, "step": 7310 }, { "epoch": 0.4970104633781764, "grad_norm": 1.7762277126312256, "learning_rate": 0.0009379076640847941, "loss": 3.5461, "step": 7315 }, { "epoch": 0.49735018344883813, "grad_norm": 2.1475906372070312, "learning_rate": 0.0009378651990759614, "loss": 3.963, "step": 7320 }, { "epoch": 0.49768990351949993, "grad_norm": 1.9923732280731201, "learning_rate": 0.0009378227340671287, "loss": 3.616, "step": 7325 }, { "epoch": 0.49802962359016173, "grad_norm": 1.4712884426116943, "learning_rate": 0.000937780269058296, "loss": 3.623, "step": 7330 }, { "epoch": 0.49836934366082347, "grad_norm": 1.776922345161438, "learning_rate": 0.0009377378040494633, "loss": 3.6838, "step": 7335 }, { "epoch": 0.49870906373148527, "grad_norm": 1.6784189939498901, "learning_rate": 0.0009376953390406305, "loss": 3.8362, "step": 7340 }, { "epoch": 0.499048783802147, "grad_norm": 1.6676775217056274, "learning_rate": 0.0009376528740317978, "loss": 3.7197, "step": 7345 }, { "epoch": 0.4993885038728088, "grad_norm": 1.7791309356689453, "learning_rate": 0.0009376104090229651, "loss": 3.3229, "step": 7350 }, { "epoch": 0.4997282239434706, "grad_norm": 1.8205612897872925, "learning_rate": 0.0009375679440141323, "loss": 3.6016, "step": 7355 }, { "epoch": 0.5000679440141323, "grad_norm": 1.905911922454834, "learning_rate": 0.0009375254790052997, "loss": 3.6897, "step": 7360 }, { "epoch": 0.5004076640847941, "grad_norm": 1.7010468244552612, "learning_rate": 0.000937483013996467, "loss": 3.6465, "step": 7365 }, { "epoch": 0.5007473841554559, "grad_norm": 1.9603149890899658, "learning_rate": 0.0009374405489876342, "loss": 3.8853, "step": 7370 }, { "epoch": 0.5010871042261177, "grad_norm": 1.9242368936538696, "learning_rate": 0.0009373980839788014, "loss": 3.7102, "step": 7375 }, { "epoch": 0.5014268242967794, "grad_norm": 1.604752779006958, "learning_rate": 0.0009373556189699688, "loss": 3.892, "step": 7380 }, { "epoch": 0.5017665443674413, "grad_norm": 1.6455137729644775, "learning_rate": 0.000937313153961136, "loss": 3.8101, "step": 7385 }, { "epoch": 0.502106264438103, "grad_norm": 1.748598337173462, "learning_rate": 0.0009372706889523032, "loss": 3.4231, "step": 7390 }, { "epoch": 0.5024459845087648, "grad_norm": 2.2491466999053955, "learning_rate": 0.0009372282239434707, "loss": 3.6028, "step": 7395 }, { "epoch": 0.5027857045794265, "grad_norm": 2.2631514072418213, "learning_rate": 0.0009371857589346379, "loss": 3.7061, "step": 7400 }, { "epoch": 0.5031254246500884, "grad_norm": 2.192995548248291, "learning_rate": 0.0009371432939258051, "loss": 3.5675, "step": 7405 }, { "epoch": 0.5034651447207501, "grad_norm": 2.0099000930786133, "learning_rate": 0.0009371008289169725, "loss": 3.5721, "step": 7410 }, { "epoch": 0.5038048647914118, "grad_norm": 1.6998389959335327, "learning_rate": 0.0009370583639081397, "loss": 3.5503, "step": 7415 }, { "epoch": 0.5041445848620737, "grad_norm": 1.9405243396759033, "learning_rate": 0.0009370158988993069, "loss": 3.6419, "step": 7420 }, { "epoch": 0.5044843049327354, "grad_norm": 2.5013434886932373, "learning_rate": 0.0009369734338904742, "loss": 3.5558, "step": 7425 }, { "epoch": 0.5048240250033972, "grad_norm": 1.7999175786972046, "learning_rate": 0.0009369309688816416, "loss": 3.6127, "step": 7430 }, { "epoch": 0.5051637450740589, "grad_norm": 1.434126377105713, "learning_rate": 0.0009368885038728088, "loss": 3.635, "step": 7435 }, { "epoch": 0.5055034651447208, "grad_norm": 1.534347653388977, "learning_rate": 0.0009368460388639761, "loss": 3.5102, "step": 7440 }, { "epoch": 0.5058431852153825, "grad_norm": 1.5089943408966064, "learning_rate": 0.0009368035738551434, "loss": 3.6244, "step": 7445 }, { "epoch": 0.5061829052860443, "grad_norm": 1.7290353775024414, "learning_rate": 0.0009367611088463106, "loss": 3.5231, "step": 7450 }, { "epoch": 0.5065226253567061, "grad_norm": 2.2136716842651367, "learning_rate": 0.0009367186438374779, "loss": 3.6342, "step": 7455 }, { "epoch": 0.5068623454273679, "grad_norm": 2.042545795440674, "learning_rate": 0.0009366761788286451, "loss": 3.5724, "step": 7460 }, { "epoch": 0.5072020654980296, "grad_norm": 1.9697489738464355, "learning_rate": 0.0009366337138198125, "loss": 3.7238, "step": 7465 }, { "epoch": 0.5075417855686915, "grad_norm": 1.994335412979126, "learning_rate": 0.0009365912488109798, "loss": 3.4146, "step": 7470 }, { "epoch": 0.5078815056393532, "grad_norm": 1.5270652770996094, "learning_rate": 0.000936548783802147, "loss": 3.7925, "step": 7475 }, { "epoch": 0.5082212257100149, "grad_norm": 1.876190185546875, "learning_rate": 0.0009365063187933144, "loss": 3.3939, "step": 7480 }, { "epoch": 0.5085609457806767, "grad_norm": 1.8489068746566772, "learning_rate": 0.0009364638537844816, "loss": 3.8579, "step": 7485 }, { "epoch": 0.5089006658513385, "grad_norm": 2.1076996326446533, "learning_rate": 0.0009364213887756488, "loss": 3.3945, "step": 7490 }, { "epoch": 0.5092403859220003, "grad_norm": 2.228790044784546, "learning_rate": 0.0009363789237668163, "loss": 3.8281, "step": 7495 }, { "epoch": 0.509580105992662, "grad_norm": 1.840259075164795, "learning_rate": 0.0009363364587579835, "loss": 3.6518, "step": 7500 }, { "epoch": 0.5099198260633239, "grad_norm": 1.4832969903945923, "learning_rate": 0.0009362939937491507, "loss": 3.66, "step": 7505 }, { "epoch": 0.5102595461339856, "grad_norm": 1.7537133693695068, "learning_rate": 0.000936251528740318, "loss": 3.8154, "step": 7510 }, { "epoch": 0.5105992662046474, "grad_norm": 1.978459358215332, "learning_rate": 0.0009362090637314853, "loss": 3.7555, "step": 7515 }, { "epoch": 0.5109389862753091, "grad_norm": 2.0619287490844727, "learning_rate": 0.0009361665987226525, "loss": 3.7904, "step": 7520 }, { "epoch": 0.511278706345971, "grad_norm": 2.2229878902435303, "learning_rate": 0.0009361241337138198, "loss": 3.6977, "step": 7525 }, { "epoch": 0.5116184264166327, "grad_norm": 2.4079840183258057, "learning_rate": 0.0009360816687049872, "loss": 3.7213, "step": 7530 }, { "epoch": 0.5119581464872944, "grad_norm": 3.5758564472198486, "learning_rate": 0.0009360392036961544, "loss": 3.7721, "step": 7535 }, { "epoch": 0.5122978665579563, "grad_norm": 1.6300967931747437, "learning_rate": 0.0009359967386873217, "loss": 3.7263, "step": 7540 }, { "epoch": 0.512637586628618, "grad_norm": 1.8676427602767944, "learning_rate": 0.000935954273678489, "loss": 3.453, "step": 7545 }, { "epoch": 0.5129773066992798, "grad_norm": 1.6408576965332031, "learning_rate": 0.0009359118086696562, "loss": 3.5796, "step": 7550 }, { "epoch": 0.5133170267699416, "grad_norm": 1.6013054847717285, "learning_rate": 0.0009358693436608235, "loss": 3.6443, "step": 7555 }, { "epoch": 0.5136567468406034, "grad_norm": 2.155820608139038, "learning_rate": 0.0009358268786519907, "loss": 3.6757, "step": 7560 }, { "epoch": 0.5139964669112651, "grad_norm": 1.9313974380493164, "learning_rate": 0.0009357844136431581, "loss": 3.7552, "step": 7565 }, { "epoch": 0.5143361869819268, "grad_norm": 1.9376916885375977, "learning_rate": 0.0009357419486343254, "loss": 3.6706, "step": 7570 }, { "epoch": 0.5146759070525887, "grad_norm": 1.9983397722244263, "learning_rate": 0.0009356994836254926, "loss": 3.6198, "step": 7575 }, { "epoch": 0.5150156271232504, "grad_norm": 2.4256067276000977, "learning_rate": 0.0009356570186166599, "loss": 3.7664, "step": 7580 }, { "epoch": 0.5153553471939122, "grad_norm": 2.142009973526001, "learning_rate": 0.0009356145536078272, "loss": 3.7194, "step": 7585 }, { "epoch": 0.515695067264574, "grad_norm": 1.7468597888946533, "learning_rate": 0.0009355720885989944, "loss": 3.6837, "step": 7590 }, { "epoch": 0.5160347873352358, "grad_norm": 1.957929015159607, "learning_rate": 0.0009355296235901617, "loss": 3.5369, "step": 7595 }, { "epoch": 0.5163745074058975, "grad_norm": 1.818114995956421, "learning_rate": 0.0009354871585813291, "loss": 3.6966, "step": 7600 }, { "epoch": 0.5167142274765593, "grad_norm": 1.5096427202224731, "learning_rate": 0.0009354446935724963, "loss": 3.4998, "step": 7605 }, { "epoch": 0.5170539475472211, "grad_norm": 2.6768863201141357, "learning_rate": 0.0009354022285636635, "loss": 3.5572, "step": 7610 }, { "epoch": 0.5173936676178829, "grad_norm": 2.198291540145874, "learning_rate": 0.0009353597635548309, "loss": 3.3726, "step": 7615 }, { "epoch": 0.5177333876885446, "grad_norm": 1.3661510944366455, "learning_rate": 0.0009353172985459981, "loss": 3.7167, "step": 7620 }, { "epoch": 0.5180731077592065, "grad_norm": 1.6599849462509155, "learning_rate": 0.0009352748335371653, "loss": 3.776, "step": 7625 }, { "epoch": 0.5184128278298682, "grad_norm": 1.8492376804351807, "learning_rate": 0.0009352323685283327, "loss": 3.5108, "step": 7630 }, { "epoch": 0.5187525479005299, "grad_norm": 2.199364423751831, "learning_rate": 0.0009351899035195, "loss": 3.7097, "step": 7635 }, { "epoch": 0.5190922679711918, "grad_norm": 1.5937011241912842, "learning_rate": 0.0009351474385106672, "loss": 3.6705, "step": 7640 }, { "epoch": 0.5194319880418535, "grad_norm": 1.9736011028289795, "learning_rate": 0.0009351049735018346, "loss": 3.6664, "step": 7645 }, { "epoch": 0.5197717081125153, "grad_norm": 1.9825800657272339, "learning_rate": 0.0009350625084930018, "loss": 3.6199, "step": 7650 }, { "epoch": 0.520111428183177, "grad_norm": 2.1770036220550537, "learning_rate": 0.000935020043484169, "loss": 3.5015, "step": 7655 }, { "epoch": 0.5204511482538389, "grad_norm": 1.8100696802139282, "learning_rate": 0.0009349775784753363, "loss": 3.587, "step": 7660 }, { "epoch": 0.5207908683245006, "grad_norm": 1.9483466148376465, "learning_rate": 0.0009349351134665036, "loss": 3.5488, "step": 7665 }, { "epoch": 0.5211305883951624, "grad_norm": 1.7607131004333496, "learning_rate": 0.0009348926484576709, "loss": 3.629, "step": 7670 }, { "epoch": 0.5214703084658242, "grad_norm": 1.7221271991729736, "learning_rate": 0.0009348501834488382, "loss": 3.458, "step": 7675 }, { "epoch": 0.521810028536486, "grad_norm": 1.6316978931427002, "learning_rate": 0.0009348077184400055, "loss": 3.6754, "step": 7680 }, { "epoch": 0.5221497486071477, "grad_norm": 1.8053470849990845, "learning_rate": 0.0009347652534311727, "loss": 3.6224, "step": 7685 }, { "epoch": 0.5224894686778094, "grad_norm": 1.7017393112182617, "learning_rate": 0.00093472278842234, "loss": 3.7491, "step": 7690 }, { "epoch": 0.5228291887484713, "grad_norm": 1.8619422912597656, "learning_rate": 0.0009346803234135073, "loss": 3.4319, "step": 7695 }, { "epoch": 0.523168908819133, "grad_norm": 1.5430461168289185, "learning_rate": 0.0009346378584046745, "loss": 3.6672, "step": 7700 }, { "epoch": 0.5235086288897948, "grad_norm": 1.6835323572158813, "learning_rate": 0.0009345953933958419, "loss": 3.8213, "step": 7705 }, { "epoch": 0.5238483489604566, "grad_norm": 1.8673502206802368, "learning_rate": 0.0009345529283870091, "loss": 3.5487, "step": 7710 }, { "epoch": 0.5241880690311184, "grad_norm": 2.383011817932129, "learning_rate": 0.0009345104633781764, "loss": 3.7344, "step": 7715 }, { "epoch": 0.5245277891017801, "grad_norm": 1.936143398284912, "learning_rate": 0.0009344679983693437, "loss": 3.619, "step": 7720 }, { "epoch": 0.524867509172442, "grad_norm": 1.744960069656372, "learning_rate": 0.0009344255333605109, "loss": 3.6331, "step": 7725 }, { "epoch": 0.5252072292431037, "grad_norm": 1.632330298423767, "learning_rate": 0.0009343830683516782, "loss": 3.5547, "step": 7730 }, { "epoch": 0.5255469493137654, "grad_norm": 1.8438084125518799, "learning_rate": 0.0009343406033428455, "loss": 3.4918, "step": 7735 }, { "epoch": 0.5258866693844272, "grad_norm": 1.846116304397583, "learning_rate": 0.0009342981383340128, "loss": 3.5975, "step": 7740 }, { "epoch": 0.526226389455089, "grad_norm": 1.7243828773498535, "learning_rate": 0.0009342556733251801, "loss": 3.579, "step": 7745 }, { "epoch": 0.5265661095257508, "grad_norm": 1.694832682609558, "learning_rate": 0.0009342132083163474, "loss": 3.7646, "step": 7750 }, { "epoch": 0.5269058295964125, "grad_norm": 2.221759557723999, "learning_rate": 0.0009341707433075146, "loss": 3.8283, "step": 7755 }, { "epoch": 0.5272455496670744, "grad_norm": 2.496285915374756, "learning_rate": 0.0009341282782986818, "loss": 3.1966, "step": 7760 }, { "epoch": 0.5275852697377361, "grad_norm": 1.994240403175354, "learning_rate": 0.0009340858132898492, "loss": 3.8324, "step": 7765 }, { "epoch": 0.5279249898083979, "grad_norm": 2.963589668273926, "learning_rate": 0.0009340433482810164, "loss": 3.6597, "step": 7770 }, { "epoch": 0.5282647098790596, "grad_norm": 1.9338701963424683, "learning_rate": 0.0009340008832721837, "loss": 3.8113, "step": 7775 }, { "epoch": 0.5286044299497215, "grad_norm": 2.0599112510681152, "learning_rate": 0.0009339584182633511, "loss": 3.3391, "step": 7780 }, { "epoch": 0.5289441500203832, "grad_norm": 1.9342522621154785, "learning_rate": 0.0009339159532545183, "loss": 3.8998, "step": 7785 }, { "epoch": 0.5292838700910449, "grad_norm": 1.8075045347213745, "learning_rate": 0.0009338734882456855, "loss": 3.8411, "step": 7790 }, { "epoch": 0.5296235901617068, "grad_norm": 1.9115121364593506, "learning_rate": 0.0009338310232368529, "loss": 3.765, "step": 7795 }, { "epoch": 0.5299633102323685, "grad_norm": 2.1400699615478516, "learning_rate": 0.0009337885582280201, "loss": 3.3985, "step": 7800 }, { "epoch": 0.5303030303030303, "grad_norm": 1.7827759981155396, "learning_rate": 0.0009337460932191873, "loss": 3.5183, "step": 7805 }, { "epoch": 0.5306427503736921, "grad_norm": 2.382856607437134, "learning_rate": 0.0009337036282103547, "loss": 3.5721, "step": 7810 }, { "epoch": 0.5309824704443539, "grad_norm": 2.0456905364990234, "learning_rate": 0.000933661163201522, "loss": 3.618, "step": 7815 }, { "epoch": 0.5313221905150156, "grad_norm": 1.7404433488845825, "learning_rate": 0.0009336186981926893, "loss": 3.7085, "step": 7820 }, { "epoch": 0.5316619105856774, "grad_norm": 1.8169869184494019, "learning_rate": 0.0009335762331838565, "loss": 3.7533, "step": 7825 }, { "epoch": 0.5320016306563392, "grad_norm": 2.2020535469055176, "learning_rate": 0.0009335337681750238, "loss": 3.5612, "step": 7830 }, { "epoch": 0.532341350727001, "grad_norm": 2.1678431034088135, "learning_rate": 0.0009334913031661911, "loss": 3.7271, "step": 7835 }, { "epoch": 0.5326810707976627, "grad_norm": 2.2185957431793213, "learning_rate": 0.0009334488381573583, "loss": 3.8622, "step": 7840 }, { "epoch": 0.5330207908683245, "grad_norm": 2.1416399478912354, "learning_rate": 0.0009334063731485257, "loss": 3.6141, "step": 7845 }, { "epoch": 0.5333605109389863, "grad_norm": 1.5625866651535034, "learning_rate": 0.000933363908139693, "loss": 3.6773, "step": 7850 }, { "epoch": 0.533700231009648, "grad_norm": 1.5168652534484863, "learning_rate": 0.0009333214431308602, "loss": 3.701, "step": 7855 }, { "epoch": 0.5340399510803098, "grad_norm": 1.8613463640213013, "learning_rate": 0.0009332789781220274, "loss": 3.6577, "step": 7860 }, { "epoch": 0.5343796711509716, "grad_norm": 2.194615364074707, "learning_rate": 0.0009332365131131948, "loss": 3.6849, "step": 7865 }, { "epoch": 0.5347193912216334, "grad_norm": 1.432121992111206, "learning_rate": 0.000933194048104362, "loss": 3.6774, "step": 7870 }, { "epoch": 0.5350591112922951, "grad_norm": 1.5764737129211426, "learning_rate": 0.0009331515830955292, "loss": 3.6295, "step": 7875 }, { "epoch": 0.535398831362957, "grad_norm": 2.183804750442505, "learning_rate": 0.0009331091180866967, "loss": 3.8248, "step": 7880 }, { "epoch": 0.5357385514336187, "grad_norm": 2.1378016471862793, "learning_rate": 0.0009330666530778639, "loss": 3.6167, "step": 7885 }, { "epoch": 0.5360782715042804, "grad_norm": 1.9690850973129272, "learning_rate": 0.0009330241880690311, "loss": 3.8316, "step": 7890 }, { "epoch": 0.5364179915749423, "grad_norm": 2.038747787475586, "learning_rate": 0.0009329817230601985, "loss": 3.4866, "step": 7895 }, { "epoch": 0.536757711645604, "grad_norm": 1.8043183088302612, "learning_rate": 0.0009329392580513657, "loss": 3.3908, "step": 7900 }, { "epoch": 0.5370974317162658, "grad_norm": 1.895783543586731, "learning_rate": 0.0009328967930425329, "loss": 3.7968, "step": 7905 }, { "epoch": 0.5374371517869275, "grad_norm": 1.7149784564971924, "learning_rate": 0.0009328543280337002, "loss": 3.6978, "step": 7910 }, { "epoch": 0.5377768718575894, "grad_norm": 1.422873854637146, "learning_rate": 0.0009328118630248676, "loss": 3.793, "step": 7915 }, { "epoch": 0.5381165919282511, "grad_norm": 2.2710208892822266, "learning_rate": 0.0009327693980160348, "loss": 3.5154, "step": 7920 }, { "epoch": 0.5384563119989129, "grad_norm": 1.9760037660598755, "learning_rate": 0.0009327269330072021, "loss": 3.6342, "step": 7925 }, { "epoch": 0.5387960320695747, "grad_norm": 1.7053872346878052, "learning_rate": 0.0009326844679983694, "loss": 3.8375, "step": 7930 }, { "epoch": 0.5391357521402365, "grad_norm": 1.7250990867614746, "learning_rate": 0.0009326420029895366, "loss": 3.7914, "step": 7935 }, { "epoch": 0.5394754722108982, "grad_norm": 2.3732166290283203, "learning_rate": 0.0009325995379807039, "loss": 3.5635, "step": 7940 }, { "epoch": 0.5398151922815599, "grad_norm": 2.1176068782806396, "learning_rate": 0.0009325570729718711, "loss": 3.6175, "step": 7945 }, { "epoch": 0.5401549123522218, "grad_norm": 2.0409982204437256, "learning_rate": 0.0009325146079630385, "loss": 3.7548, "step": 7950 }, { "epoch": 0.5404946324228835, "grad_norm": 1.7936404943466187, "learning_rate": 0.0009324721429542058, "loss": 3.3825, "step": 7955 }, { "epoch": 0.5408343524935453, "grad_norm": 1.6753736734390259, "learning_rate": 0.000932429677945373, "loss": 3.6269, "step": 7960 }, { "epoch": 0.5411740725642071, "grad_norm": 1.379127860069275, "learning_rate": 0.0009323872129365403, "loss": 3.6559, "step": 7965 }, { "epoch": 0.5415137926348689, "grad_norm": 2.4368817806243896, "learning_rate": 0.0009323447479277076, "loss": 3.5273, "step": 7970 }, { "epoch": 0.5418535127055306, "grad_norm": 1.558010458946228, "learning_rate": 0.0009323022829188748, "loss": 3.6478, "step": 7975 }, { "epoch": 0.5421932327761925, "grad_norm": 2.040591239929199, "learning_rate": 0.0009322598179100421, "loss": 3.669, "step": 7980 }, { "epoch": 0.5425329528468542, "grad_norm": 1.9291584491729736, "learning_rate": 0.0009322173529012095, "loss": 3.7638, "step": 7985 }, { "epoch": 0.542872672917516, "grad_norm": 1.7299983501434326, "learning_rate": 0.0009321748878923767, "loss": 3.6892, "step": 7990 }, { "epoch": 0.5432123929881777, "grad_norm": 2.273134469985962, "learning_rate": 0.000932132422883544, "loss": 3.7055, "step": 7995 }, { "epoch": 0.5435521130588395, "grad_norm": 2.2705910205841064, "learning_rate": 0.0009320899578747113, "loss": 3.7567, "step": 8000 }, { "epoch": 0.5438918331295013, "grad_norm": 1.7267683744430542, "learning_rate": 0.0009320474928658785, "loss": 3.5032, "step": 8005 }, { "epoch": 0.544231553200163, "grad_norm": 1.9954707622528076, "learning_rate": 0.0009320050278570457, "loss": 3.6151, "step": 8010 }, { "epoch": 0.5445712732708249, "grad_norm": 1.6746381521224976, "learning_rate": 0.0009319625628482131, "loss": 3.6133, "step": 8015 }, { "epoch": 0.5449109933414866, "grad_norm": 1.8626587390899658, "learning_rate": 0.0009319200978393804, "loss": 3.6175, "step": 8020 }, { "epoch": 0.5452507134121484, "grad_norm": 1.8662619590759277, "learning_rate": 0.0009318776328305476, "loss": 3.7951, "step": 8025 }, { "epoch": 0.5455904334828101, "grad_norm": 2.050415515899658, "learning_rate": 0.000931835167821715, "loss": 3.5931, "step": 8030 }, { "epoch": 0.545930153553472, "grad_norm": 1.5256080627441406, "learning_rate": 0.0009317927028128822, "loss": 3.6294, "step": 8035 }, { "epoch": 0.5462698736241337, "grad_norm": 1.9006472826004028, "learning_rate": 0.0009317502378040494, "loss": 3.4182, "step": 8040 }, { "epoch": 0.5466095936947954, "grad_norm": 1.7928365468978882, "learning_rate": 0.0009317077727952168, "loss": 3.4705, "step": 8045 }, { "epoch": 0.5469493137654573, "grad_norm": 2.194777488708496, "learning_rate": 0.000931665307786384, "loss": 3.4633, "step": 8050 }, { "epoch": 0.547289033836119, "grad_norm": 1.7905651330947876, "learning_rate": 0.0009316228427775513, "loss": 3.6098, "step": 8055 }, { "epoch": 0.5476287539067808, "grad_norm": 1.8976613283157349, "learning_rate": 0.0009315803777687186, "loss": 3.634, "step": 8060 }, { "epoch": 0.5479684739774426, "grad_norm": 2.1543593406677246, "learning_rate": 0.0009315379127598859, "loss": 3.6417, "step": 8065 }, { "epoch": 0.5483081940481044, "grad_norm": 2.070128917694092, "learning_rate": 0.0009314954477510531, "loss": 3.6992, "step": 8070 }, { "epoch": 0.5486479141187661, "grad_norm": 1.588387131690979, "learning_rate": 0.0009314529827422204, "loss": 3.6064, "step": 8075 }, { "epoch": 0.5489876341894279, "grad_norm": 1.6052368879318237, "learning_rate": 0.0009314105177333877, "loss": 3.6384, "step": 8080 }, { "epoch": 0.5493273542600897, "grad_norm": 1.9266494512557983, "learning_rate": 0.0009313680527245549, "loss": 3.6514, "step": 8085 }, { "epoch": 0.5496670743307515, "grad_norm": 2.177419424057007, "learning_rate": 0.0009313255877157223, "loss": 3.6871, "step": 8090 }, { "epoch": 0.5500067944014132, "grad_norm": 2.099846363067627, "learning_rate": 0.0009312831227068896, "loss": 3.6182, "step": 8095 }, { "epoch": 0.550346514472075, "grad_norm": 2.32961106300354, "learning_rate": 0.0009312406576980568, "loss": 3.7078, "step": 8100 }, { "epoch": 0.5506862345427368, "grad_norm": 2.024815797805786, "learning_rate": 0.0009311981926892241, "loss": 3.5291, "step": 8105 }, { "epoch": 0.5510259546133985, "grad_norm": 2.275057315826416, "learning_rate": 0.0009311557276803913, "loss": 3.5856, "step": 8110 }, { "epoch": 0.5513656746840603, "grad_norm": 1.7196167707443237, "learning_rate": 0.0009311132626715586, "loss": 3.5163, "step": 8115 }, { "epoch": 0.5517053947547221, "grad_norm": 1.726975917816162, "learning_rate": 0.000931070797662726, "loss": 3.5777, "step": 8120 }, { "epoch": 0.5520451148253839, "grad_norm": 1.84687077999115, "learning_rate": 0.0009310283326538932, "loss": 3.7539, "step": 8125 }, { "epoch": 0.5523848348960456, "grad_norm": 2.3164548873901367, "learning_rate": 0.0009309858676450605, "loss": 3.782, "step": 8130 }, { "epoch": 0.5527245549667075, "grad_norm": 1.7663824558258057, "learning_rate": 0.0009309434026362278, "loss": 3.4883, "step": 8135 }, { "epoch": 0.5530642750373692, "grad_norm": 1.9788618087768555, "learning_rate": 0.000930900937627395, "loss": 3.5975, "step": 8140 }, { "epoch": 0.553403995108031, "grad_norm": 2.324986219406128, "learning_rate": 0.0009308584726185622, "loss": 3.6927, "step": 8145 }, { "epoch": 0.5537437151786928, "grad_norm": 2.3872454166412354, "learning_rate": 0.0009308160076097296, "loss": 3.5428, "step": 8150 }, { "epoch": 0.5540834352493546, "grad_norm": 2.4835164546966553, "learning_rate": 0.0009307735426008969, "loss": 3.563, "step": 8155 }, { "epoch": 0.5544231553200163, "grad_norm": 1.8395699262619019, "learning_rate": 0.0009307310775920642, "loss": 3.474, "step": 8160 }, { "epoch": 0.554762875390678, "grad_norm": 1.9332760572433472, "learning_rate": 0.0009306886125832315, "loss": 3.6884, "step": 8165 }, { "epoch": 0.5551025954613399, "grad_norm": 1.5264747142791748, "learning_rate": 0.0009306461475743987, "loss": 3.8004, "step": 8170 }, { "epoch": 0.5554423155320016, "grad_norm": 1.545542597770691, "learning_rate": 0.000930603682565566, "loss": 3.6881, "step": 8175 }, { "epoch": 0.5557820356026634, "grad_norm": 1.6236293315887451, "learning_rate": 0.0009305612175567333, "loss": 3.6158, "step": 8180 }, { "epoch": 0.5561217556733252, "grad_norm": 1.7472355365753174, "learning_rate": 0.0009305187525479005, "loss": 3.531, "step": 8185 }, { "epoch": 0.556461475743987, "grad_norm": 1.9212998151779175, "learning_rate": 0.0009304762875390679, "loss": 3.6483, "step": 8190 }, { "epoch": 0.5568011958146487, "grad_norm": 2.169081687927246, "learning_rate": 0.0009304338225302352, "loss": 3.647, "step": 8195 }, { "epoch": 0.5571409158853105, "grad_norm": 1.646346926689148, "learning_rate": 0.0009303913575214024, "loss": 3.62, "step": 8200 }, { "epoch": 0.5574806359559723, "grad_norm": 2.0602192878723145, "learning_rate": 0.0009303488925125697, "loss": 3.5691, "step": 8205 }, { "epoch": 0.557820356026634, "grad_norm": 2.0629189014434814, "learning_rate": 0.0009303064275037369, "loss": 3.6187, "step": 8210 }, { "epoch": 0.5581600760972958, "grad_norm": 1.909126877784729, "learning_rate": 0.0009302639624949042, "loss": 3.5869, "step": 8215 }, { "epoch": 0.5584997961679576, "grad_norm": 1.6874308586120605, "learning_rate": 0.0009302214974860715, "loss": 3.759, "step": 8220 }, { "epoch": 0.5588395162386194, "grad_norm": 2.503085136413574, "learning_rate": 0.0009301790324772388, "loss": 3.5068, "step": 8225 }, { "epoch": 0.5591792363092811, "grad_norm": 1.7304047346115112, "learning_rate": 0.0009301365674684061, "loss": 3.7701, "step": 8230 }, { "epoch": 0.559518956379943, "grad_norm": 2.0035300254821777, "learning_rate": 0.0009300941024595734, "loss": 3.6377, "step": 8235 }, { "epoch": 0.5598586764506047, "grad_norm": 1.6262927055358887, "learning_rate": 0.0009300516374507406, "loss": 3.8854, "step": 8240 }, { "epoch": 0.5601983965212665, "grad_norm": 1.9815477132797241, "learning_rate": 0.0009300091724419078, "loss": 3.4285, "step": 8245 }, { "epoch": 0.5605381165919282, "grad_norm": 2.025541067123413, "learning_rate": 0.0009299667074330752, "loss": 3.6223, "step": 8250 }, { "epoch": 0.5608778366625901, "grad_norm": 1.7665048837661743, "learning_rate": 0.0009299242424242424, "loss": 3.4159, "step": 8255 }, { "epoch": 0.5612175567332518, "grad_norm": 1.7804021835327148, "learning_rate": 0.0009298817774154097, "loss": 3.6145, "step": 8260 }, { "epoch": 0.5615572768039135, "grad_norm": 1.720787525177002, "learning_rate": 0.0009298393124065771, "loss": 3.5861, "step": 8265 }, { "epoch": 0.5618969968745754, "grad_norm": 1.6045082807540894, "learning_rate": 0.0009297968473977443, "loss": 3.4765, "step": 8270 }, { "epoch": 0.5622367169452371, "grad_norm": 2.549987316131592, "learning_rate": 0.0009297543823889115, "loss": 3.5163, "step": 8275 }, { "epoch": 0.5625764370158989, "grad_norm": 2.334484338760376, "learning_rate": 0.0009297119173800789, "loss": 3.7248, "step": 8280 }, { "epoch": 0.5629161570865606, "grad_norm": 1.8090500831604004, "learning_rate": 0.0009296694523712461, "loss": 3.6028, "step": 8285 }, { "epoch": 0.5632558771572225, "grad_norm": 1.8580840826034546, "learning_rate": 0.0009296269873624133, "loss": 3.8011, "step": 8290 }, { "epoch": 0.5635955972278842, "grad_norm": 2.177724599838257, "learning_rate": 0.0009295845223535808, "loss": 3.5576, "step": 8295 }, { "epoch": 0.563935317298546, "grad_norm": 1.658918857574463, "learning_rate": 0.000929542057344748, "loss": 3.7448, "step": 8300 }, { "epoch": 0.5642750373692078, "grad_norm": 1.607996940612793, "learning_rate": 0.0009294995923359152, "loss": 3.5753, "step": 8305 }, { "epoch": 0.5646147574398696, "grad_norm": 1.750438928604126, "learning_rate": 0.0009294571273270825, "loss": 3.5686, "step": 8310 }, { "epoch": 0.5649544775105313, "grad_norm": 1.8382105827331543, "learning_rate": 0.0009294146623182498, "loss": 3.5224, "step": 8315 }, { "epoch": 0.5652941975811931, "grad_norm": 1.7467728853225708, "learning_rate": 0.000929372197309417, "loss": 3.4339, "step": 8320 }, { "epoch": 0.5656339176518549, "grad_norm": 1.9608042240142822, "learning_rate": 0.0009293297323005843, "loss": 3.7933, "step": 8325 }, { "epoch": 0.5659736377225166, "grad_norm": 1.948883295059204, "learning_rate": 0.0009292872672917517, "loss": 3.72, "step": 8330 }, { "epoch": 0.5663133577931784, "grad_norm": 1.72773277759552, "learning_rate": 0.0009292448022829189, "loss": 3.8091, "step": 8335 }, { "epoch": 0.5666530778638402, "grad_norm": 2.0996508598327637, "learning_rate": 0.0009292023372740862, "loss": 3.7175, "step": 8340 }, { "epoch": 0.566992797934502, "grad_norm": 1.7901890277862549, "learning_rate": 0.0009291598722652534, "loss": 3.5998, "step": 8345 }, { "epoch": 0.5673325180051637, "grad_norm": 2.510694980621338, "learning_rate": 0.0009291174072564207, "loss": 3.9552, "step": 8350 }, { "epoch": 0.5676722380758256, "grad_norm": 1.6055324077606201, "learning_rate": 0.000929074942247588, "loss": 3.7867, "step": 8355 }, { "epoch": 0.5680119581464873, "grad_norm": 1.9331703186035156, "learning_rate": 0.0009290324772387552, "loss": 3.5343, "step": 8360 }, { "epoch": 0.568351678217149, "grad_norm": 1.7353618144989014, "learning_rate": 0.0009289900122299226, "loss": 3.5656, "step": 8365 }, { "epoch": 0.5686913982878108, "grad_norm": 1.6821918487548828, "learning_rate": 0.0009289475472210899, "loss": 3.6009, "step": 8370 }, { "epoch": 0.5690311183584726, "grad_norm": 1.4653825759887695, "learning_rate": 0.0009289050822122571, "loss": 3.8247, "step": 8375 }, { "epoch": 0.5693708384291344, "grad_norm": 1.679146647453308, "learning_rate": 0.0009288626172034244, "loss": 3.8505, "step": 8380 }, { "epoch": 0.5697105584997961, "grad_norm": 1.7367463111877441, "learning_rate": 0.0009288201521945917, "loss": 3.6185, "step": 8385 }, { "epoch": 0.570050278570458, "grad_norm": 1.90449059009552, "learning_rate": 0.0009287776871857589, "loss": 3.5657, "step": 8390 }, { "epoch": 0.5703899986411197, "grad_norm": 1.890317678451538, "learning_rate": 0.0009287352221769261, "loss": 3.8699, "step": 8395 }, { "epoch": 0.5707297187117815, "grad_norm": 1.825915813446045, "learning_rate": 0.0009286927571680936, "loss": 3.5497, "step": 8400 }, { "epoch": 0.5710694387824433, "grad_norm": 1.8238065242767334, "learning_rate": 0.0009286502921592608, "loss": 3.6584, "step": 8405 }, { "epoch": 0.5714091588531051, "grad_norm": 2.416472911834717, "learning_rate": 0.000928607827150428, "loss": 3.7021, "step": 8410 }, { "epoch": 0.5717488789237668, "grad_norm": 1.9791520833969116, "learning_rate": 0.0009285653621415954, "loss": 3.2757, "step": 8415 }, { "epoch": 0.5720885989944285, "grad_norm": 1.9630755186080933, "learning_rate": 0.0009285228971327626, "loss": 3.7327, "step": 8420 }, { "epoch": 0.5724283190650904, "grad_norm": 1.4304882287979126, "learning_rate": 0.0009284804321239298, "loss": 3.6264, "step": 8425 }, { "epoch": 0.5727680391357521, "grad_norm": 1.5768704414367676, "learning_rate": 0.0009284379671150972, "loss": 3.6625, "step": 8430 }, { "epoch": 0.5731077592064139, "grad_norm": 2.6382339000701904, "learning_rate": 0.0009283955021062645, "loss": 3.4694, "step": 8435 }, { "epoch": 0.5734474792770757, "grad_norm": 2.146211624145508, "learning_rate": 0.0009283530370974317, "loss": 3.6337, "step": 8440 }, { "epoch": 0.5737871993477375, "grad_norm": 2.092672348022461, "learning_rate": 0.000928310572088599, "loss": 3.4648, "step": 8445 }, { "epoch": 0.5741269194183992, "grad_norm": 1.9665969610214233, "learning_rate": 0.0009282681070797663, "loss": 3.646, "step": 8450 }, { "epoch": 0.574466639489061, "grad_norm": 2.0691072940826416, "learning_rate": 0.0009282256420709335, "loss": 3.3584, "step": 8455 }, { "epoch": 0.5748063595597228, "grad_norm": 1.8948931694030762, "learning_rate": 0.0009281831770621008, "loss": 3.7484, "step": 8460 }, { "epoch": 0.5751460796303846, "grad_norm": 1.69923996925354, "learning_rate": 0.0009281407120532681, "loss": 3.688, "step": 8465 }, { "epoch": 0.5754857997010463, "grad_norm": 1.7243754863739014, "learning_rate": 0.0009280982470444354, "loss": 3.5443, "step": 8470 }, { "epoch": 0.5758255197717081, "grad_norm": 1.9562031030654907, "learning_rate": 0.0009280557820356027, "loss": 3.6327, "step": 8475 }, { "epoch": 0.5761652398423699, "grad_norm": 1.841532826423645, "learning_rate": 0.00092801331702677, "loss": 3.4762, "step": 8480 }, { "epoch": 0.5765049599130316, "grad_norm": 1.9201101064682007, "learning_rate": 0.0009279708520179372, "loss": 3.6564, "step": 8485 }, { "epoch": 0.5768446799836935, "grad_norm": 2.133340835571289, "learning_rate": 0.0009279283870091045, "loss": 3.4374, "step": 8490 }, { "epoch": 0.5771844000543552, "grad_norm": 2.3442163467407227, "learning_rate": 0.0009278859220002717, "loss": 3.6382, "step": 8495 }, { "epoch": 0.577524120125017, "grad_norm": 1.5687851905822754, "learning_rate": 0.0009278434569914391, "loss": 3.6815, "step": 8500 }, { "epoch": 0.5778638401956787, "grad_norm": 2.0196151733398438, "learning_rate": 0.0009278009919826064, "loss": 3.5057, "step": 8505 }, { "epoch": 0.5782035602663406, "grad_norm": 1.8515969514846802, "learning_rate": 0.0009277585269737736, "loss": 3.6205, "step": 8510 }, { "epoch": 0.5785432803370023, "grad_norm": 4.728092670440674, "learning_rate": 0.000927716061964941, "loss": 3.5655, "step": 8515 }, { "epoch": 0.578883000407664, "grad_norm": 1.8320356607437134, "learning_rate": 0.0009276735969561082, "loss": 3.413, "step": 8520 }, { "epoch": 0.5792227204783259, "grad_norm": 1.5707504749298096, "learning_rate": 0.0009276311319472754, "loss": 3.5361, "step": 8525 }, { "epoch": 0.5795624405489876, "grad_norm": 1.5375465154647827, "learning_rate": 0.0009275886669384428, "loss": 3.4968, "step": 8530 }, { "epoch": 0.5799021606196494, "grad_norm": 2.0489118099212646, "learning_rate": 0.00092754620192961, "loss": 3.571, "step": 8535 }, { "epoch": 0.5802418806903111, "grad_norm": 2.755242347717285, "learning_rate": 0.0009275037369207773, "loss": 3.5502, "step": 8540 }, { "epoch": 0.580581600760973, "grad_norm": 2.1710150241851807, "learning_rate": 0.0009274612719119447, "loss": 3.7356, "step": 8545 }, { "epoch": 0.5809213208316347, "grad_norm": 1.6549665927886963, "learning_rate": 0.0009274188069031119, "loss": 3.4254, "step": 8550 }, { "epoch": 0.5812610409022965, "grad_norm": 1.7644845247268677, "learning_rate": 0.0009273763418942791, "loss": 3.6197, "step": 8555 }, { "epoch": 0.5816007609729583, "grad_norm": 2.2540664672851562, "learning_rate": 0.0009273338768854464, "loss": 3.6853, "step": 8560 }, { "epoch": 0.5819404810436201, "grad_norm": 1.8398905992507935, "learning_rate": 0.0009272914118766137, "loss": 3.5883, "step": 8565 }, { "epoch": 0.5822802011142818, "grad_norm": 2.229562520980835, "learning_rate": 0.0009272489468677809, "loss": 3.6155, "step": 8570 }, { "epoch": 0.5826199211849437, "grad_norm": 2.103154182434082, "learning_rate": 0.0009272064818589483, "loss": 3.5309, "step": 8575 }, { "epoch": 0.5829596412556054, "grad_norm": 1.837262749671936, "learning_rate": 0.0009271640168501156, "loss": 3.475, "step": 8580 }, { "epoch": 0.5832993613262671, "grad_norm": 1.751814365386963, "learning_rate": 0.0009271215518412828, "loss": 3.7108, "step": 8585 }, { "epoch": 0.5836390813969289, "grad_norm": 2.098740816116333, "learning_rate": 0.0009270790868324501, "loss": 3.7228, "step": 8590 }, { "epoch": 0.5839788014675907, "grad_norm": 1.6498565673828125, "learning_rate": 0.0009270366218236173, "loss": 3.9626, "step": 8595 }, { "epoch": 0.5843185215382525, "grad_norm": 1.8823027610778809, "learning_rate": 0.0009269941568147846, "loss": 3.6164, "step": 8600 }, { "epoch": 0.5846582416089142, "grad_norm": 2.2393128871917725, "learning_rate": 0.000926951691805952, "loss": 3.934, "step": 8605 }, { "epoch": 0.5849979616795761, "grad_norm": 1.6947885751724243, "learning_rate": 0.0009269092267971192, "loss": 3.5021, "step": 8610 }, { "epoch": 0.5853376817502378, "grad_norm": 1.981837272644043, "learning_rate": 0.0009268667617882865, "loss": 3.6408, "step": 8615 }, { "epoch": 0.5856774018208996, "grad_norm": 2.533721923828125, "learning_rate": 0.0009268242967794538, "loss": 3.5606, "step": 8620 }, { "epoch": 0.5860171218915613, "grad_norm": 2.1879515647888184, "learning_rate": 0.000926781831770621, "loss": 3.6082, "step": 8625 }, { "epoch": 0.5863568419622232, "grad_norm": 1.8596590757369995, "learning_rate": 0.0009267393667617882, "loss": 3.8089, "step": 8630 }, { "epoch": 0.5866965620328849, "grad_norm": 3.5611531734466553, "learning_rate": 0.0009266969017529556, "loss": 3.6467, "step": 8635 }, { "epoch": 0.5870362821035466, "grad_norm": 2.310742139816284, "learning_rate": 0.0009266544367441229, "loss": 3.3063, "step": 8640 }, { "epoch": 0.5873760021742085, "grad_norm": 1.8442643880844116, "learning_rate": 0.0009266119717352901, "loss": 3.9651, "step": 8645 }, { "epoch": 0.5877157222448702, "grad_norm": 1.6318575143814087, "learning_rate": 0.0009265695067264575, "loss": 3.899, "step": 8650 }, { "epoch": 0.588055442315532, "grad_norm": 1.6776069402694702, "learning_rate": 0.0009265270417176247, "loss": 3.4411, "step": 8655 }, { "epoch": 0.5883951623861938, "grad_norm": 2.499911308288574, "learning_rate": 0.0009264845767087919, "loss": 3.4955, "step": 8660 }, { "epoch": 0.5887348824568556, "grad_norm": 1.6062639951705933, "learning_rate": 0.0009264421116999593, "loss": 3.5416, "step": 8665 }, { "epoch": 0.5890746025275173, "grad_norm": 1.4478684663772583, "learning_rate": 0.0009263996466911265, "loss": 3.7491, "step": 8670 }, { "epoch": 0.589414322598179, "grad_norm": 1.7573801279067993, "learning_rate": 0.0009263571816822938, "loss": 3.5848, "step": 8675 }, { "epoch": 0.5897540426688409, "grad_norm": 1.5092499256134033, "learning_rate": 0.0009263147166734612, "loss": 3.6024, "step": 8680 }, { "epoch": 0.5900937627395026, "grad_norm": 1.8347514867782593, "learning_rate": 0.0009262722516646284, "loss": 3.4624, "step": 8685 }, { "epoch": 0.5904334828101644, "grad_norm": 1.897250771522522, "learning_rate": 0.0009262297866557956, "loss": 3.3721, "step": 8690 }, { "epoch": 0.5907732028808262, "grad_norm": 1.8485082387924194, "learning_rate": 0.0009261873216469629, "loss": 3.6259, "step": 8695 }, { "epoch": 0.591112922951488, "grad_norm": 1.817400336265564, "learning_rate": 0.0009261448566381302, "loss": 3.7443, "step": 8700 }, { "epoch": 0.5914526430221497, "grad_norm": 1.9090909957885742, "learning_rate": 0.0009261023916292974, "loss": 3.6915, "step": 8705 }, { "epoch": 0.5917923630928115, "grad_norm": 1.8996124267578125, "learning_rate": 0.0009260599266204648, "loss": 3.6208, "step": 8710 }, { "epoch": 0.5921320831634733, "grad_norm": 2.000065565109253, "learning_rate": 0.0009260174616116321, "loss": 3.4142, "step": 8715 }, { "epoch": 0.5924718032341351, "grad_norm": 2.191530227661133, "learning_rate": 0.0009259749966027993, "loss": 3.6147, "step": 8720 }, { "epoch": 0.5928115233047968, "grad_norm": 2.181163787841797, "learning_rate": 0.0009259325315939666, "loss": 3.7393, "step": 8725 }, { "epoch": 0.5931512433754587, "grad_norm": 2.0871381759643555, "learning_rate": 0.0009258900665851339, "loss": 3.8373, "step": 8730 }, { "epoch": 0.5934909634461204, "grad_norm": 2.4327073097229004, "learning_rate": 0.0009258476015763011, "loss": 3.6251, "step": 8735 }, { "epoch": 0.5938306835167821, "grad_norm": 2.0847721099853516, "learning_rate": 0.0009258051365674684, "loss": 3.7404, "step": 8740 }, { "epoch": 0.594170403587444, "grad_norm": 1.9051098823547363, "learning_rate": 0.0009257626715586357, "loss": 3.5875, "step": 8745 }, { "epoch": 0.5945101236581057, "grad_norm": 1.7826132774353027, "learning_rate": 0.000925720206549803, "loss": 3.6171, "step": 8750 }, { "epoch": 0.5948498437287675, "grad_norm": 1.596144199371338, "learning_rate": 0.0009256777415409703, "loss": 3.4137, "step": 8755 }, { "epoch": 0.5951895637994292, "grad_norm": 2.5888378620147705, "learning_rate": 0.0009256352765321375, "loss": 3.7156, "step": 8760 }, { "epoch": 0.5955292838700911, "grad_norm": 1.9871124029159546, "learning_rate": 0.0009255928115233048, "loss": 3.4966, "step": 8765 }, { "epoch": 0.5958690039407528, "grad_norm": 1.6214646100997925, "learning_rate": 0.0009255503465144721, "loss": 3.5629, "step": 8770 }, { "epoch": 0.5962087240114146, "grad_norm": 2.0833816528320312, "learning_rate": 0.0009255078815056393, "loss": 3.6258, "step": 8775 }, { "epoch": 0.5965484440820764, "grad_norm": 1.7272230386734009, "learning_rate": 0.0009254654164968067, "loss": 3.5886, "step": 8780 }, { "epoch": 0.5968881641527382, "grad_norm": 2.2348921298980713, "learning_rate": 0.000925422951487974, "loss": 3.6912, "step": 8785 }, { "epoch": 0.5972278842233999, "grad_norm": 1.9288824796676636, "learning_rate": 0.0009253804864791412, "loss": 3.6875, "step": 8790 }, { "epoch": 0.5975676042940616, "grad_norm": 1.615113377571106, "learning_rate": 0.0009253380214703084, "loss": 3.6645, "step": 8795 }, { "epoch": 0.5979073243647235, "grad_norm": 1.712365746498108, "learning_rate": 0.0009252955564614758, "loss": 3.5363, "step": 8800 }, { "epoch": 0.5982470444353852, "grad_norm": 1.8899204730987549, "learning_rate": 0.000925253091452643, "loss": 3.6285, "step": 8805 }, { "epoch": 0.598586764506047, "grad_norm": 1.821804165840149, "learning_rate": 0.0009252106264438102, "loss": 3.6811, "step": 8810 }, { "epoch": 0.5989264845767088, "grad_norm": 2.5174570083618164, "learning_rate": 0.0009251681614349777, "loss": 3.671, "step": 8815 }, { "epoch": 0.5992662046473706, "grad_norm": 2.132817029953003, "learning_rate": 0.0009251256964261449, "loss": 3.4351, "step": 8820 }, { "epoch": 0.5996059247180323, "grad_norm": 2.033853054046631, "learning_rate": 0.0009250832314173121, "loss": 3.537, "step": 8825 }, { "epoch": 0.5999456447886942, "grad_norm": 1.5978028774261475, "learning_rate": 0.0009250407664084795, "loss": 3.4968, "step": 8830 }, { "epoch": 0.6002853648593559, "grad_norm": 1.7544993162155151, "learning_rate": 0.0009249983013996467, "loss": 3.6311, "step": 8835 }, { "epoch": 0.6006250849300176, "grad_norm": 2.0209150314331055, "learning_rate": 0.000924955836390814, "loss": 3.6595, "step": 8840 }, { "epoch": 0.6009648050006794, "grad_norm": 1.779381513595581, "learning_rate": 0.0009249133713819812, "loss": 3.6223, "step": 8845 }, { "epoch": 0.6013045250713412, "grad_norm": 1.8088514804840088, "learning_rate": 0.0009248709063731486, "loss": 3.8173, "step": 8850 }, { "epoch": 0.601644245142003, "grad_norm": 1.9151737689971924, "learning_rate": 0.0009248284413643159, "loss": 3.6629, "step": 8855 }, { "epoch": 0.6019839652126647, "grad_norm": 1.7606257200241089, "learning_rate": 0.0009247859763554831, "loss": 3.5862, "step": 8860 }, { "epoch": 0.6023236852833266, "grad_norm": 2.086052417755127, "learning_rate": 0.0009247435113466504, "loss": 3.5932, "step": 8865 }, { "epoch": 0.6026634053539883, "grad_norm": 1.6921601295471191, "learning_rate": 0.0009247010463378177, "loss": 3.4901, "step": 8870 }, { "epoch": 0.6030031254246501, "grad_norm": 1.9764739274978638, "learning_rate": 0.0009246585813289849, "loss": 3.5791, "step": 8875 }, { "epoch": 0.6033428454953118, "grad_norm": 2.1354870796203613, "learning_rate": 0.0009246161163201521, "loss": 3.5832, "step": 8880 }, { "epoch": 0.6036825655659737, "grad_norm": 1.9876662492752075, "learning_rate": 0.0009245736513113196, "loss": 3.7982, "step": 8885 }, { "epoch": 0.6040222856366354, "grad_norm": 1.5061699151992798, "learning_rate": 0.0009245311863024868, "loss": 3.6329, "step": 8890 }, { "epoch": 0.6043620057072971, "grad_norm": 2.0100812911987305, "learning_rate": 0.000924488721293654, "loss": 3.6815, "step": 8895 }, { "epoch": 0.604701725777959, "grad_norm": 1.900373935699463, "learning_rate": 0.0009244462562848214, "loss": 3.8034, "step": 8900 }, { "epoch": 0.6050414458486207, "grad_norm": 2.0891613960266113, "learning_rate": 0.0009244037912759886, "loss": 3.5894, "step": 8905 }, { "epoch": 0.6053811659192825, "grad_norm": 1.704795002937317, "learning_rate": 0.0009243613262671558, "loss": 3.6352, "step": 8910 }, { "epoch": 0.6057208859899443, "grad_norm": 2.3815388679504395, "learning_rate": 0.0009243188612583232, "loss": 3.5363, "step": 8915 }, { "epoch": 0.6060606060606061, "grad_norm": 1.6923481225967407, "learning_rate": 0.0009242763962494905, "loss": 3.5618, "step": 8920 }, { "epoch": 0.6064003261312678, "grad_norm": 2.048490047454834, "learning_rate": 0.0009242339312406577, "loss": 3.475, "step": 8925 }, { "epoch": 0.6067400462019296, "grad_norm": 2.396881103515625, "learning_rate": 0.000924191466231825, "loss": 3.7628, "step": 8930 }, { "epoch": 0.6070797662725914, "grad_norm": 2.6644089221954346, "learning_rate": 0.0009241490012229923, "loss": 3.6809, "step": 8935 }, { "epoch": 0.6074194863432532, "grad_norm": 2.013432741165161, "learning_rate": 0.0009241065362141595, "loss": 3.6217, "step": 8940 }, { "epoch": 0.6077592064139149, "grad_norm": 1.886428952217102, "learning_rate": 0.0009240640712053268, "loss": 3.7763, "step": 8945 }, { "epoch": 0.6080989264845768, "grad_norm": 1.8970369100570679, "learning_rate": 0.0009240216061964941, "loss": 3.7825, "step": 8950 }, { "epoch": 0.6084386465552385, "grad_norm": 1.947910189628601, "learning_rate": 0.0009239791411876614, "loss": 3.6063, "step": 8955 }, { "epoch": 0.6087783666259002, "grad_norm": 1.7387669086456299, "learning_rate": 0.0009239366761788287, "loss": 3.6768, "step": 8960 }, { "epoch": 0.609118086696562, "grad_norm": 1.9406960010528564, "learning_rate": 0.000923894211169996, "loss": 3.5243, "step": 8965 }, { "epoch": 0.6094578067672238, "grad_norm": 1.9598042964935303, "learning_rate": 0.0009238517461611632, "loss": 3.6241, "step": 8970 }, { "epoch": 0.6097975268378856, "grad_norm": 2.0493996143341064, "learning_rate": 0.0009238092811523305, "loss": 3.5892, "step": 8975 }, { "epoch": 0.6101372469085473, "grad_norm": 2.1761105060577393, "learning_rate": 0.0009237668161434977, "loss": 3.4533, "step": 8980 }, { "epoch": 0.6104769669792092, "grad_norm": 1.7464373111724854, "learning_rate": 0.000923724351134665, "loss": 3.3491, "step": 8985 }, { "epoch": 0.6108166870498709, "grad_norm": 1.6261385679244995, "learning_rate": 0.0009236818861258324, "loss": 3.6159, "step": 8990 }, { "epoch": 0.6111564071205327, "grad_norm": 2.077998161315918, "learning_rate": 0.0009236394211169996, "loss": 3.9146, "step": 8995 }, { "epoch": 0.6114961271911945, "grad_norm": 1.5173449516296387, "learning_rate": 0.0009235969561081669, "loss": 3.6617, "step": 9000 }, { "epoch": 0.6118358472618562, "grad_norm": 1.9380521774291992, "learning_rate": 0.0009235544910993342, "loss": 3.8172, "step": 9005 }, { "epoch": 0.612175567332518, "grad_norm": 1.9251073598861694, "learning_rate": 0.0009235120260905014, "loss": 3.5827, "step": 9010 }, { "epoch": 0.6125152874031797, "grad_norm": 2.0318729877471924, "learning_rate": 0.0009234695610816687, "loss": 3.5347, "step": 9015 }, { "epoch": 0.6128550074738416, "grad_norm": 1.8040181398391724, "learning_rate": 0.000923427096072836, "loss": 3.722, "step": 9020 }, { "epoch": 0.6131947275445033, "grad_norm": 2.4696483612060547, "learning_rate": 0.0009233846310640033, "loss": 3.5136, "step": 9025 }, { "epoch": 0.6135344476151651, "grad_norm": 2.002131223678589, "learning_rate": 0.0009233421660551705, "loss": 3.484, "step": 9030 }, { "epoch": 0.6138741676858269, "grad_norm": 2.1598446369171143, "learning_rate": 0.0009232997010463379, "loss": 3.4604, "step": 9035 }, { "epoch": 0.6142138877564887, "grad_norm": 1.5970803499221802, "learning_rate": 0.0009232572360375051, "loss": 3.7638, "step": 9040 }, { "epoch": 0.6145536078271504, "grad_norm": 1.921280860900879, "learning_rate": 0.0009232147710286723, "loss": 3.6774, "step": 9045 }, { "epoch": 0.6148933278978121, "grad_norm": 2.018861770629883, "learning_rate": 0.0009231723060198397, "loss": 3.6455, "step": 9050 }, { "epoch": 0.615233047968474, "grad_norm": 2.0725796222686768, "learning_rate": 0.0009231298410110069, "loss": 3.8143, "step": 9055 }, { "epoch": 0.6155727680391357, "grad_norm": 1.920080542564392, "learning_rate": 0.0009230873760021742, "loss": 3.5949, "step": 9060 }, { "epoch": 0.6159124881097975, "grad_norm": 1.8107619285583496, "learning_rate": 0.0009230449109933416, "loss": 3.8245, "step": 9065 }, { "epoch": 0.6162522081804593, "grad_norm": 2.028353452682495, "learning_rate": 0.0009230024459845088, "loss": 3.41, "step": 9070 }, { "epoch": 0.6165919282511211, "grad_norm": 1.9023923873901367, "learning_rate": 0.000922959980975676, "loss": 3.4311, "step": 9075 }, { "epoch": 0.6169316483217828, "grad_norm": 1.6814237833023071, "learning_rate": 0.0009229175159668433, "loss": 3.6545, "step": 9080 }, { "epoch": 0.6172713683924447, "grad_norm": 1.7935514450073242, "learning_rate": 0.0009228750509580106, "loss": 3.7056, "step": 9085 }, { "epoch": 0.6176110884631064, "grad_norm": 1.729417324066162, "learning_rate": 0.0009228325859491778, "loss": 3.4865, "step": 9090 }, { "epoch": 0.6179508085337682, "grad_norm": 1.5807242393493652, "learning_rate": 0.0009227901209403452, "loss": 3.5302, "step": 9095 }, { "epoch": 0.6182905286044299, "grad_norm": 1.9828128814697266, "learning_rate": 0.0009227476559315125, "loss": 3.8491, "step": 9100 }, { "epoch": 0.6186302486750918, "grad_norm": 1.8082166910171509, "learning_rate": 0.0009227051909226797, "loss": 3.8528, "step": 9105 }, { "epoch": 0.6189699687457535, "grad_norm": 2.309807777404785, "learning_rate": 0.000922662725913847, "loss": 3.2265, "step": 9110 }, { "epoch": 0.6193096888164152, "grad_norm": 2.0094001293182373, "learning_rate": 0.0009226202609050143, "loss": 3.7412, "step": 9115 }, { "epoch": 0.6196494088870771, "grad_norm": 1.700287103652954, "learning_rate": 0.0009225777958961815, "loss": 3.7085, "step": 9120 }, { "epoch": 0.6199891289577388, "grad_norm": 1.7330362796783447, "learning_rate": 0.0009225353308873488, "loss": 3.8063, "step": 9125 }, { "epoch": 0.6203288490284006, "grad_norm": 2.0147793292999268, "learning_rate": 0.0009224928658785161, "loss": 3.6604, "step": 9130 }, { "epoch": 0.6206685690990623, "grad_norm": 1.698072075843811, "learning_rate": 0.0009224504008696834, "loss": 3.6988, "step": 9135 }, { "epoch": 0.6210082891697242, "grad_norm": 1.7108591794967651, "learning_rate": 0.0009224079358608507, "loss": 3.5601, "step": 9140 }, { "epoch": 0.6213480092403859, "grad_norm": 2.0179712772369385, "learning_rate": 0.0009223654708520179, "loss": 3.6897, "step": 9145 }, { "epoch": 0.6216877293110477, "grad_norm": 1.4242019653320312, "learning_rate": 0.0009223230058431852, "loss": 3.4183, "step": 9150 }, { "epoch": 0.6220274493817095, "grad_norm": 1.5444989204406738, "learning_rate": 0.0009222805408343525, "loss": 3.6575, "step": 9155 }, { "epoch": 0.6223671694523712, "grad_norm": 5.82496452331543, "learning_rate": 0.0009222380758255197, "loss": 3.6978, "step": 9160 }, { "epoch": 0.622706889523033, "grad_norm": 2.0915589332580566, "learning_rate": 0.0009221956108166871, "loss": 3.5562, "step": 9165 }, { "epoch": 0.6230466095936948, "grad_norm": 2.292154550552368, "learning_rate": 0.0009221531458078544, "loss": 3.683, "step": 9170 }, { "epoch": 0.6233863296643566, "grad_norm": 2.1240954399108887, "learning_rate": 0.0009221106807990216, "loss": 3.6696, "step": 9175 }, { "epoch": 0.6237260497350183, "grad_norm": 2.4177961349487305, "learning_rate": 0.000922068215790189, "loss": 3.3532, "step": 9180 }, { "epoch": 0.6240657698056801, "grad_norm": 1.6669301986694336, "learning_rate": 0.0009220257507813562, "loss": 3.8383, "step": 9185 }, { "epoch": 0.6244054898763419, "grad_norm": 1.9469083547592163, "learning_rate": 0.0009219832857725234, "loss": 3.712, "step": 9190 }, { "epoch": 0.6247452099470037, "grad_norm": 1.959093689918518, "learning_rate": 0.0009219408207636908, "loss": 3.7841, "step": 9195 }, { "epoch": 0.6250849300176654, "grad_norm": 2.1276514530181885, "learning_rate": 0.0009218983557548581, "loss": 3.5525, "step": 9200 }, { "epoch": 0.6254246500883273, "grad_norm": 1.738795518875122, "learning_rate": 0.0009218558907460253, "loss": 3.6664, "step": 9205 }, { "epoch": 0.625764370158989, "grad_norm": 2.176098346710205, "learning_rate": 0.0009218134257371926, "loss": 3.8943, "step": 9210 }, { "epoch": 0.6261040902296507, "grad_norm": 2.3254446983337402, "learning_rate": 0.0009217709607283599, "loss": 3.8311, "step": 9215 }, { "epoch": 0.6264438103003126, "grad_norm": 2.159151554107666, "learning_rate": 0.0009217284957195271, "loss": 3.6437, "step": 9220 }, { "epoch": 0.6267835303709743, "grad_norm": 1.9607999324798584, "learning_rate": 0.0009216860307106944, "loss": 4.0092, "step": 9225 }, { "epoch": 0.6271232504416361, "grad_norm": 2.4139444828033447, "learning_rate": 0.0009216435657018617, "loss": 3.6137, "step": 9230 }, { "epoch": 0.6274629705122978, "grad_norm": 1.8196024894714355, "learning_rate": 0.000921601100693029, "loss": 3.5196, "step": 9235 }, { "epoch": 0.6278026905829597, "grad_norm": 2.0611495971679688, "learning_rate": 0.0009215586356841963, "loss": 3.6168, "step": 9240 }, { "epoch": 0.6281424106536214, "grad_norm": 2.2153944969177246, "learning_rate": 0.0009215161706753635, "loss": 3.6641, "step": 9245 }, { "epoch": 0.6284821307242832, "grad_norm": 2.3401010036468506, "learning_rate": 0.0009214737056665308, "loss": 3.5392, "step": 9250 }, { "epoch": 0.628821850794945, "grad_norm": 2.705379009246826, "learning_rate": 0.0009214312406576981, "loss": 3.4446, "step": 9255 }, { "epoch": 0.6291615708656068, "grad_norm": 1.593441367149353, "learning_rate": 0.0009213887756488653, "loss": 3.5715, "step": 9260 }, { "epoch": 0.6295012909362685, "grad_norm": 1.7347431182861328, "learning_rate": 0.0009213463106400327, "loss": 3.48, "step": 9265 }, { "epoch": 0.6298410110069302, "grad_norm": 2.6394095420837402, "learning_rate": 0.0009213038456312, "loss": 3.5064, "step": 9270 }, { "epoch": 0.6301807310775921, "grad_norm": 1.9726173877716064, "learning_rate": 0.0009212613806223672, "loss": 3.5351, "step": 9275 }, { "epoch": 0.6305204511482538, "grad_norm": 2.0869855880737305, "learning_rate": 0.0009212189156135344, "loss": 3.7068, "step": 9280 }, { "epoch": 0.6308601712189156, "grad_norm": 2.2851264476776123, "learning_rate": 0.0009211764506047018, "loss": 3.8362, "step": 9285 }, { "epoch": 0.6311998912895774, "grad_norm": 2.2010931968688965, "learning_rate": 0.000921133985595869, "loss": 3.3082, "step": 9290 }, { "epoch": 0.6315396113602392, "grad_norm": 1.6196587085723877, "learning_rate": 0.0009210915205870362, "loss": 3.4369, "step": 9295 }, { "epoch": 0.6318793314309009, "grad_norm": 2.2629830837249756, "learning_rate": 0.0009210490555782037, "loss": 3.7103, "step": 9300 }, { "epoch": 0.6322190515015628, "grad_norm": 1.6847811937332153, "learning_rate": 0.0009210065905693709, "loss": 3.6148, "step": 9305 }, { "epoch": 0.6325587715722245, "grad_norm": 1.562086582183838, "learning_rate": 0.0009209641255605381, "loss": 3.4462, "step": 9310 }, { "epoch": 0.6328984916428863, "grad_norm": 2.1195945739746094, "learning_rate": 0.0009209216605517055, "loss": 3.5133, "step": 9315 }, { "epoch": 0.633238211713548, "grad_norm": 1.9009767770767212, "learning_rate": 0.0009208791955428727, "loss": 3.5172, "step": 9320 }, { "epoch": 0.6335779317842098, "grad_norm": 2.0154659748077393, "learning_rate": 0.0009208367305340399, "loss": 3.7731, "step": 9325 }, { "epoch": 0.6339176518548716, "grad_norm": 1.954379677772522, "learning_rate": 0.0009207942655252072, "loss": 3.6796, "step": 9330 }, { "epoch": 0.6342573719255333, "grad_norm": 2.062129020690918, "learning_rate": 0.0009207518005163746, "loss": 3.8154, "step": 9335 }, { "epoch": 0.6345970919961952, "grad_norm": 1.7184922695159912, "learning_rate": 0.0009207093355075418, "loss": 3.7777, "step": 9340 }, { "epoch": 0.6349368120668569, "grad_norm": 1.8626660108566284, "learning_rate": 0.0009206668704987091, "loss": 3.604, "step": 9345 }, { "epoch": 0.6352765321375187, "grad_norm": 1.7326329946517944, "learning_rate": 0.0009206244054898764, "loss": 3.6344, "step": 9350 }, { "epoch": 0.6356162522081804, "grad_norm": 1.5656068325042725, "learning_rate": 0.0009205819404810436, "loss": 3.5609, "step": 9355 }, { "epoch": 0.6359559722788423, "grad_norm": 1.9431090354919434, "learning_rate": 0.0009205394754722109, "loss": 3.5573, "step": 9360 }, { "epoch": 0.636295692349504, "grad_norm": 2.153434991836548, "learning_rate": 0.0009204970104633781, "loss": 3.6901, "step": 9365 }, { "epoch": 0.6366354124201657, "grad_norm": 1.5187281370162964, "learning_rate": 0.0009204545454545455, "loss": 3.7669, "step": 9370 }, { "epoch": 0.6369751324908276, "grad_norm": 2.403784990310669, "learning_rate": 0.0009204120804457128, "loss": 3.692, "step": 9375 }, { "epoch": 0.6373148525614893, "grad_norm": 2.0424964427948, "learning_rate": 0.00092036961543688, "loss": 3.4486, "step": 9380 }, { "epoch": 0.6376545726321511, "grad_norm": 1.6979421377182007, "learning_rate": 0.0009203271504280473, "loss": 3.5626, "step": 9385 }, { "epoch": 0.6379942927028129, "grad_norm": 1.8683804273605347, "learning_rate": 0.0009202846854192146, "loss": 3.5963, "step": 9390 }, { "epoch": 0.6383340127734747, "grad_norm": 1.989781379699707, "learning_rate": 0.0009202422204103818, "loss": 3.6692, "step": 9395 }, { "epoch": 0.6386737328441364, "grad_norm": 1.738935112953186, "learning_rate": 0.0009201997554015491, "loss": 3.5567, "step": 9400 }, { "epoch": 0.6390134529147982, "grad_norm": 2.3861567974090576, "learning_rate": 0.0009201572903927165, "loss": 3.5627, "step": 9405 }, { "epoch": 0.63935317298546, "grad_norm": 2.063753843307495, "learning_rate": 0.0009201148253838837, "loss": 3.5914, "step": 9410 }, { "epoch": 0.6396928930561218, "grad_norm": 1.85115385055542, "learning_rate": 0.000920072360375051, "loss": 3.4583, "step": 9415 }, { "epoch": 0.6400326131267835, "grad_norm": 1.9481347799301147, "learning_rate": 0.0009200298953662183, "loss": 3.6349, "step": 9420 }, { "epoch": 0.6403723331974454, "grad_norm": 1.8758563995361328, "learning_rate": 0.0009199874303573855, "loss": 3.6681, "step": 9425 }, { "epoch": 0.6407120532681071, "grad_norm": 2.0380959510803223, "learning_rate": 0.0009199449653485527, "loss": 3.6215, "step": 9430 }, { "epoch": 0.6410517733387688, "grad_norm": 2.159525156021118, "learning_rate": 0.0009199025003397201, "loss": 3.4447, "step": 9435 }, { "epoch": 0.6413914934094306, "grad_norm": 1.839869499206543, "learning_rate": 0.0009198600353308874, "loss": 3.6153, "step": 9440 }, { "epoch": 0.6417312134800924, "grad_norm": 2.2306947708129883, "learning_rate": 0.0009198175703220546, "loss": 3.6358, "step": 9445 }, { "epoch": 0.6420709335507542, "grad_norm": 2.1153688430786133, "learning_rate": 0.000919775105313222, "loss": 3.6138, "step": 9450 }, { "epoch": 0.6424106536214159, "grad_norm": 2.4241676330566406, "learning_rate": 0.0009197326403043892, "loss": 3.5556, "step": 9455 }, { "epoch": 0.6427503736920778, "grad_norm": 2.3546950817108154, "learning_rate": 0.0009196901752955564, "loss": 3.7632, "step": 9460 }, { "epoch": 0.6430900937627395, "grad_norm": 1.8141297101974487, "learning_rate": 0.0009196477102867238, "loss": 3.4703, "step": 9465 }, { "epoch": 0.6434298138334013, "grad_norm": 2.1679842472076416, "learning_rate": 0.000919605245277891, "loss": 3.6355, "step": 9470 }, { "epoch": 0.6437695339040631, "grad_norm": 1.8691107034683228, "learning_rate": 0.0009195627802690583, "loss": 3.7424, "step": 9475 }, { "epoch": 0.6441092539747248, "grad_norm": 1.8621492385864258, "learning_rate": 0.0009195203152602256, "loss": 3.4203, "step": 9480 }, { "epoch": 0.6444489740453866, "grad_norm": 1.9232395887374878, "learning_rate": 0.0009194778502513929, "loss": 4.008, "step": 9485 }, { "epoch": 0.6447886941160483, "grad_norm": 1.9645395278930664, "learning_rate": 0.0009194353852425601, "loss": 3.6142, "step": 9490 }, { "epoch": 0.6451284141867102, "grad_norm": 1.8441591262817383, "learning_rate": 0.0009193929202337274, "loss": 3.5816, "step": 9495 }, { "epoch": 0.6454681342573719, "grad_norm": 1.8764678239822388, "learning_rate": 0.0009193504552248947, "loss": 3.5863, "step": 9500 }, { "epoch": 0.6458078543280337, "grad_norm": 1.604627013206482, "learning_rate": 0.0009193079902160619, "loss": 3.4572, "step": 9505 }, { "epoch": 0.6461475743986955, "grad_norm": 1.7977708578109741, "learning_rate": 0.0009192655252072293, "loss": 3.5334, "step": 9510 }, { "epoch": 0.6464872944693573, "grad_norm": 1.8154044151306152, "learning_rate": 0.0009192230601983966, "loss": 3.6519, "step": 9515 }, { "epoch": 0.646827014540019, "grad_norm": 2.0640652179718018, "learning_rate": 0.0009191805951895639, "loss": 3.7463, "step": 9520 }, { "epoch": 0.6471667346106807, "grad_norm": 2.0134220123291016, "learning_rate": 0.0009191381301807311, "loss": 3.7377, "step": 9525 }, { "epoch": 0.6475064546813426, "grad_norm": 1.5366233587265015, "learning_rate": 0.0009190956651718983, "loss": 3.606, "step": 9530 }, { "epoch": 0.6478461747520043, "grad_norm": 1.8117351531982422, "learning_rate": 0.0009190532001630657, "loss": 3.6126, "step": 9535 }, { "epoch": 0.6481858948226661, "grad_norm": 2.007399559020996, "learning_rate": 0.0009190107351542329, "loss": 3.8087, "step": 9540 }, { "epoch": 0.6485256148933279, "grad_norm": 2.6129329204559326, "learning_rate": 0.0009189682701454002, "loss": 3.7356, "step": 9545 }, { "epoch": 0.6488653349639897, "grad_norm": 1.9403553009033203, "learning_rate": 0.0009189258051365676, "loss": 3.7657, "step": 9550 }, { "epoch": 0.6492050550346514, "grad_norm": 1.517622947692871, "learning_rate": 0.0009188833401277348, "loss": 3.5194, "step": 9555 }, { "epoch": 0.6495447751053133, "grad_norm": 1.9238899946212769, "learning_rate": 0.000918840875118902, "loss": 3.7165, "step": 9560 }, { "epoch": 0.649884495175975, "grad_norm": 1.759041666984558, "learning_rate": 0.0009187984101100694, "loss": 3.7343, "step": 9565 }, { "epoch": 0.6502242152466368, "grad_norm": 1.8674468994140625, "learning_rate": 0.0009187559451012366, "loss": 3.5376, "step": 9570 }, { "epoch": 0.6505639353172985, "grad_norm": 2.064831018447876, "learning_rate": 0.0009187134800924038, "loss": 3.5227, "step": 9575 }, { "epoch": 0.6509036553879604, "grad_norm": 2.21189284324646, "learning_rate": 0.0009186710150835712, "loss": 3.6766, "step": 9580 }, { "epoch": 0.6512433754586221, "grad_norm": 2.316236972808838, "learning_rate": 0.0009186285500747385, "loss": 3.5712, "step": 9585 }, { "epoch": 0.6515830955292838, "grad_norm": 1.4394649267196655, "learning_rate": 0.0009185860850659057, "loss": 3.5528, "step": 9590 }, { "epoch": 0.6519228155999457, "grad_norm": 2.0281388759613037, "learning_rate": 0.000918543620057073, "loss": 3.7178, "step": 9595 }, { "epoch": 0.6522625356706074, "grad_norm": 1.708493709564209, "learning_rate": 0.0009185011550482403, "loss": 3.451, "step": 9600 }, { "epoch": 0.6526022557412692, "grad_norm": 1.8538358211517334, "learning_rate": 0.0009184586900394075, "loss": 3.7166, "step": 9605 }, { "epoch": 0.6529419758119309, "grad_norm": 2.0409176349639893, "learning_rate": 0.0009184162250305748, "loss": 3.7559, "step": 9610 }, { "epoch": 0.6532816958825928, "grad_norm": 1.898531198501587, "learning_rate": 0.0009183737600217422, "loss": 3.7996, "step": 9615 }, { "epoch": 0.6536214159532545, "grad_norm": 1.869681477546692, "learning_rate": 0.0009183312950129094, "loss": 3.5098, "step": 9620 }, { "epoch": 0.6539611360239163, "grad_norm": 2.61397647857666, "learning_rate": 0.0009182888300040767, "loss": 3.6481, "step": 9625 }, { "epoch": 0.6543008560945781, "grad_norm": 2.0204968452453613, "learning_rate": 0.0009182463649952439, "loss": 3.6442, "step": 9630 }, { "epoch": 0.6546405761652399, "grad_norm": 1.9350311756134033, "learning_rate": 0.0009182038999864112, "loss": 3.5505, "step": 9635 }, { "epoch": 0.6549802962359016, "grad_norm": 2.2359745502471924, "learning_rate": 0.0009181614349775785, "loss": 3.5887, "step": 9640 }, { "epoch": 0.6553200163065634, "grad_norm": 1.8800760507583618, "learning_rate": 0.0009181189699687457, "loss": 3.4382, "step": 9645 }, { "epoch": 0.6556597363772252, "grad_norm": 2.581307888031006, "learning_rate": 0.0009180765049599131, "loss": 3.2643, "step": 9650 }, { "epoch": 0.6559994564478869, "grad_norm": 1.9799455404281616, "learning_rate": 0.0009180340399510804, "loss": 3.6185, "step": 9655 }, { "epoch": 0.6563391765185487, "grad_norm": 1.9357798099517822, "learning_rate": 0.0009179915749422476, "loss": 3.9299, "step": 9660 }, { "epoch": 0.6566788965892105, "grad_norm": 2.046229124069214, "learning_rate": 0.0009179491099334148, "loss": 3.4741, "step": 9665 }, { "epoch": 0.6570186166598723, "grad_norm": 4.007790565490723, "learning_rate": 0.0009179066449245822, "loss": 3.1848, "step": 9670 }, { "epoch": 0.657358336730534, "grad_norm": 2.243412971496582, "learning_rate": 0.0009178641799157494, "loss": 3.4002, "step": 9675 }, { "epoch": 0.6576980568011959, "grad_norm": 1.636687159538269, "learning_rate": 0.0009178217149069166, "loss": 3.7485, "step": 9680 }, { "epoch": 0.6580377768718576, "grad_norm": 2.0140256881713867, "learning_rate": 0.0009177792498980841, "loss": 3.4508, "step": 9685 }, { "epoch": 0.6583774969425193, "grad_norm": 2.8220622539520264, "learning_rate": 0.0009177367848892513, "loss": 3.3606, "step": 9690 }, { "epoch": 0.6587172170131811, "grad_norm": 2.246427059173584, "learning_rate": 0.0009176943198804185, "loss": 3.637, "step": 9695 }, { "epoch": 0.6590569370838429, "grad_norm": 1.819976806640625, "learning_rate": 0.0009176518548715859, "loss": 3.5685, "step": 9700 }, { "epoch": 0.6593966571545047, "grad_norm": 1.9589930772781372, "learning_rate": 0.0009176093898627531, "loss": 3.6466, "step": 9705 }, { "epoch": 0.6597363772251664, "grad_norm": 1.665411114692688, "learning_rate": 0.0009175669248539203, "loss": 3.5527, "step": 9710 }, { "epoch": 0.6600760972958283, "grad_norm": 1.8652441501617432, "learning_rate": 0.0009175244598450876, "loss": 3.6975, "step": 9715 }, { "epoch": 0.66041581736649, "grad_norm": 1.6234278678894043, "learning_rate": 0.000917481994836255, "loss": 4.1165, "step": 9720 }, { "epoch": 0.6607555374371518, "grad_norm": 1.7198771238327026, "learning_rate": 0.0009174395298274222, "loss": 3.738, "step": 9725 }, { "epoch": 0.6610952575078136, "grad_norm": 2.355131149291992, "learning_rate": 0.0009173970648185895, "loss": 3.4378, "step": 9730 }, { "epoch": 0.6614349775784754, "grad_norm": 1.7304739952087402, "learning_rate": 0.0009173545998097568, "loss": 3.522, "step": 9735 }, { "epoch": 0.6617746976491371, "grad_norm": 2.286628246307373, "learning_rate": 0.000917312134800924, "loss": 3.5552, "step": 9740 }, { "epoch": 0.6621144177197988, "grad_norm": 1.8017314672470093, "learning_rate": 0.0009172696697920913, "loss": 3.601, "step": 9745 }, { "epoch": 0.6624541377904607, "grad_norm": 2.1469433307647705, "learning_rate": 0.0009172272047832586, "loss": 3.6761, "step": 9750 }, { "epoch": 0.6627938578611224, "grad_norm": 2.2254998683929443, "learning_rate": 0.0009171847397744259, "loss": 3.7942, "step": 9755 }, { "epoch": 0.6631335779317842, "grad_norm": 2.3448739051818848, "learning_rate": 0.0009171422747655932, "loss": 3.5481, "step": 9760 }, { "epoch": 0.663473298002446, "grad_norm": 2.4234538078308105, "learning_rate": 0.0009170998097567604, "loss": 3.3781, "step": 9765 }, { "epoch": 0.6638130180731078, "grad_norm": 2.10581374168396, "learning_rate": 0.0009170573447479277, "loss": 3.6558, "step": 9770 }, { "epoch": 0.6641527381437695, "grad_norm": 2.6716699600219727, "learning_rate": 0.000917014879739095, "loss": 3.5537, "step": 9775 }, { "epoch": 0.6644924582144313, "grad_norm": 1.6641322374343872, "learning_rate": 0.0009169724147302622, "loss": 3.8251, "step": 9780 }, { "epoch": 0.6648321782850931, "grad_norm": 1.6253060102462769, "learning_rate": 0.0009169299497214295, "loss": 3.7019, "step": 9785 }, { "epoch": 0.6651718983557549, "grad_norm": 1.6675037145614624, "learning_rate": 0.0009168874847125969, "loss": 3.5069, "step": 9790 }, { "epoch": 0.6655116184264166, "grad_norm": 1.7827826738357544, "learning_rate": 0.0009168450197037641, "loss": 3.4215, "step": 9795 }, { "epoch": 0.6658513384970784, "grad_norm": 2.187441110610962, "learning_rate": 0.0009168025546949314, "loss": 3.4603, "step": 9800 }, { "epoch": 0.6661910585677402, "grad_norm": 1.8954871892929077, "learning_rate": 0.0009167600896860987, "loss": 3.557, "step": 9805 }, { "epoch": 0.6665307786384019, "grad_norm": 1.6667758226394653, "learning_rate": 0.0009167176246772659, "loss": 3.5938, "step": 9810 }, { "epoch": 0.6668704987090638, "grad_norm": 2.134892702102661, "learning_rate": 0.0009166751596684331, "loss": 3.8565, "step": 9815 }, { "epoch": 0.6672102187797255, "grad_norm": 2.247432231903076, "learning_rate": 0.0009166326946596006, "loss": 3.9117, "step": 9820 }, { "epoch": 0.6675499388503873, "grad_norm": 2.274089813232422, "learning_rate": 0.0009165902296507678, "loss": 3.722, "step": 9825 }, { "epoch": 0.667889658921049, "grad_norm": 1.8941168785095215, "learning_rate": 0.000916547764641935, "loss": 3.7037, "step": 9830 }, { "epoch": 0.6682293789917109, "grad_norm": 1.8548154830932617, "learning_rate": 0.0009165052996331024, "loss": 3.5663, "step": 9835 }, { "epoch": 0.6685690990623726, "grad_norm": 1.9541778564453125, "learning_rate": 0.0009164628346242696, "loss": 3.5625, "step": 9840 }, { "epoch": 0.6689088191330343, "grad_norm": 2.135904550552368, "learning_rate": 0.0009164203696154368, "loss": 3.7652, "step": 9845 }, { "epoch": 0.6692485392036962, "grad_norm": 1.8334624767303467, "learning_rate": 0.0009163779046066042, "loss": 3.3807, "step": 9850 }, { "epoch": 0.6695882592743579, "grad_norm": 2.2429709434509277, "learning_rate": 0.0009163354395977715, "loss": 3.8373, "step": 9855 }, { "epoch": 0.6699279793450197, "grad_norm": 1.5965996980667114, "learning_rate": 0.0009162929745889388, "loss": 3.8216, "step": 9860 }, { "epoch": 0.6702676994156814, "grad_norm": 1.5553334951400757, "learning_rate": 0.000916250509580106, "loss": 3.7536, "step": 9865 }, { "epoch": 0.6706074194863433, "grad_norm": 2.071202278137207, "learning_rate": 0.0009162080445712733, "loss": 3.4764, "step": 9870 }, { "epoch": 0.670947139557005, "grad_norm": 1.7689276933670044, "learning_rate": 0.0009161655795624406, "loss": 3.92, "step": 9875 }, { "epoch": 0.6712868596276668, "grad_norm": 1.83269464969635, "learning_rate": 0.0009161231145536078, "loss": 3.6806, "step": 9880 }, { "epoch": 0.6716265796983286, "grad_norm": 3.2043917179107666, "learning_rate": 0.0009160806495447751, "loss": 3.7189, "step": 9885 }, { "epoch": 0.6719662997689904, "grad_norm": 1.578902006149292, "learning_rate": 0.0009160381845359425, "loss": 3.5098, "step": 9890 }, { "epoch": 0.6723060198396521, "grad_norm": 1.805031180381775, "learning_rate": 0.0009159957195271097, "loss": 3.5409, "step": 9895 }, { "epoch": 0.672645739910314, "grad_norm": 1.8258405923843384, "learning_rate": 0.000915953254518277, "loss": 3.7446, "step": 9900 }, { "epoch": 0.6729854599809757, "grad_norm": 1.7562475204467773, "learning_rate": 0.0009159107895094443, "loss": 3.4365, "step": 9905 }, { "epoch": 0.6733251800516374, "grad_norm": 1.5874475240707397, "learning_rate": 0.0009158683245006115, "loss": 3.6691, "step": 9910 }, { "epoch": 0.6736649001222992, "grad_norm": 1.6595121622085571, "learning_rate": 0.0009158258594917787, "loss": 3.7419, "step": 9915 }, { "epoch": 0.674004620192961, "grad_norm": 2.095226526260376, "learning_rate": 0.0009157833944829461, "loss": 3.6548, "step": 9920 }, { "epoch": 0.6743443402636228, "grad_norm": 1.8038252592086792, "learning_rate": 0.0009157409294741134, "loss": 3.3994, "step": 9925 }, { "epoch": 0.6746840603342845, "grad_norm": 1.5220344066619873, "learning_rate": 0.0009156984644652806, "loss": 3.5764, "step": 9930 }, { "epoch": 0.6750237804049464, "grad_norm": 2.1713550090789795, "learning_rate": 0.000915655999456448, "loss": 3.5724, "step": 9935 }, { "epoch": 0.6753635004756081, "grad_norm": 2.2031097412109375, "learning_rate": 0.0009156135344476152, "loss": 3.589, "step": 9940 }, { "epoch": 0.6757032205462699, "grad_norm": 1.9058914184570312, "learning_rate": 0.0009155710694387824, "loss": 3.6171, "step": 9945 }, { "epoch": 0.6760429406169316, "grad_norm": 1.9562128782272339, "learning_rate": 0.0009155286044299498, "loss": 3.7003, "step": 9950 }, { "epoch": 0.6763826606875935, "grad_norm": 1.8068536520004272, "learning_rate": 0.000915486139421117, "loss": 3.5492, "step": 9955 }, { "epoch": 0.6767223807582552, "grad_norm": 1.9395283460617065, "learning_rate": 0.0009154436744122843, "loss": 3.5882, "step": 9960 }, { "epoch": 0.6770621008289169, "grad_norm": 2.096832036972046, "learning_rate": 0.0009154012094034517, "loss": 3.7, "step": 9965 }, { "epoch": 0.6774018208995788, "grad_norm": 2.3709304332733154, "learning_rate": 0.0009153587443946189, "loss": 3.5796, "step": 9970 }, { "epoch": 0.6777415409702405, "grad_norm": 2.146432399749756, "learning_rate": 0.0009153162793857861, "loss": 3.5342, "step": 9975 }, { "epoch": 0.6780812610409023, "grad_norm": 1.7643839120864868, "learning_rate": 0.0009152738143769534, "loss": 3.6593, "step": 9980 }, { "epoch": 0.6784209811115641, "grad_norm": 2.1002345085144043, "learning_rate": 0.0009152313493681207, "loss": 3.5405, "step": 9985 }, { "epoch": 0.6787607011822259, "grad_norm": 1.7355833053588867, "learning_rate": 0.0009151888843592879, "loss": 3.612, "step": 9990 }, { "epoch": 0.6791004212528876, "grad_norm": 1.896955966949463, "learning_rate": 0.0009151464193504553, "loss": 3.4019, "step": 9995 }, { "epoch": 0.6794401413235494, "grad_norm": 1.7860370874404907, "learning_rate": 0.0009151039543416226, "loss": 3.5336, "step": 10000 }, { "epoch": 0.6797798613942112, "grad_norm": 2.091878652572632, "learning_rate": 0.0009150614893327898, "loss": 3.8086, "step": 10005 }, { "epoch": 0.680119581464873, "grad_norm": 1.9924163818359375, "learning_rate": 0.0009150190243239571, "loss": 3.6274, "step": 10010 }, { "epoch": 0.6804593015355347, "grad_norm": 1.8523551225662231, "learning_rate": 0.0009149765593151243, "loss": 3.6067, "step": 10015 }, { "epoch": 0.6807990216061965, "grad_norm": 1.8082249164581299, "learning_rate": 0.0009149340943062916, "loss": 3.5393, "step": 10020 }, { "epoch": 0.6811387416768583, "grad_norm": 1.9327292442321777, "learning_rate": 0.0009148916292974589, "loss": 3.635, "step": 10025 }, { "epoch": 0.68147846174752, "grad_norm": 1.9776281118392944, "learning_rate": 0.0009148491642886262, "loss": 3.5195, "step": 10030 }, { "epoch": 0.6818181818181818, "grad_norm": 1.7856097221374512, "learning_rate": 0.0009148066992797935, "loss": 3.8734, "step": 10035 }, { "epoch": 0.6821579018888436, "grad_norm": 1.8247390985488892, "learning_rate": 0.0009147642342709608, "loss": 3.5705, "step": 10040 }, { "epoch": 0.6824976219595054, "grad_norm": 1.9871809482574463, "learning_rate": 0.000914721769262128, "loss": 3.5476, "step": 10045 }, { "epoch": 0.6828373420301671, "grad_norm": 2.064030170440674, "learning_rate": 0.0009146793042532952, "loss": 3.4337, "step": 10050 }, { "epoch": 0.683177062100829, "grad_norm": 1.6590059995651245, "learning_rate": 0.0009146368392444626, "loss": 3.6604, "step": 10055 }, { "epoch": 0.6835167821714907, "grad_norm": 1.7259809970855713, "learning_rate": 0.0009145943742356298, "loss": 3.8212, "step": 10060 }, { "epoch": 0.6838565022421524, "grad_norm": 1.793843150138855, "learning_rate": 0.0009145519092267971, "loss": 3.6857, "step": 10065 }, { "epoch": 0.6841962223128143, "grad_norm": 1.5499526262283325, "learning_rate": 0.0009145094442179645, "loss": 3.8621, "step": 10070 }, { "epoch": 0.684535942383476, "grad_norm": 3.2797980308532715, "learning_rate": 0.0009144669792091317, "loss": 3.3612, "step": 10075 }, { "epoch": 0.6848756624541378, "grad_norm": 2.009361505508423, "learning_rate": 0.0009144245142002989, "loss": 3.5561, "step": 10080 }, { "epoch": 0.6852153825247995, "grad_norm": 1.9171264171600342, "learning_rate": 0.0009143820491914663, "loss": 3.535, "step": 10085 }, { "epoch": 0.6855551025954614, "grad_norm": 1.683921217918396, "learning_rate": 0.0009143395841826335, "loss": 3.5103, "step": 10090 }, { "epoch": 0.6858948226661231, "grad_norm": 2.1731653213500977, "learning_rate": 0.0009142971191738007, "loss": 3.5834, "step": 10095 }, { "epoch": 0.6862345427367849, "grad_norm": 2.8313629627227783, "learning_rate": 0.0009142546541649682, "loss": 3.787, "step": 10100 }, { "epoch": 0.6865742628074467, "grad_norm": 2.130007743835449, "learning_rate": 0.0009142121891561354, "loss": 3.4683, "step": 10105 }, { "epoch": 0.6869139828781085, "grad_norm": 1.8987630605697632, "learning_rate": 0.0009141697241473026, "loss": 3.4692, "step": 10110 }, { "epoch": 0.6872537029487702, "grad_norm": 1.9904451370239258, "learning_rate": 0.0009141272591384699, "loss": 3.7642, "step": 10115 }, { "epoch": 0.6875934230194319, "grad_norm": 2.4702179431915283, "learning_rate": 0.0009140847941296372, "loss": 3.3595, "step": 10120 }, { "epoch": 0.6879331430900938, "grad_norm": 1.8890141248703003, "learning_rate": 0.0009140423291208044, "loss": 3.7804, "step": 10125 }, { "epoch": 0.6882728631607555, "grad_norm": 1.8018927574157715, "learning_rate": 0.0009139998641119717, "loss": 3.4304, "step": 10130 }, { "epoch": 0.6886125832314173, "grad_norm": 1.4470690488815308, "learning_rate": 0.0009139573991031391, "loss": 3.5699, "step": 10135 }, { "epoch": 0.6889523033020791, "grad_norm": 2.110337495803833, "learning_rate": 0.0009139149340943063, "loss": 3.4165, "step": 10140 }, { "epoch": 0.6892920233727409, "grad_norm": 1.8703052997589111, "learning_rate": 0.0009138724690854736, "loss": 3.6706, "step": 10145 }, { "epoch": 0.6896317434434026, "grad_norm": 2.042797088623047, "learning_rate": 0.0009138300040766409, "loss": 3.6811, "step": 10150 }, { "epoch": 0.6899714635140645, "grad_norm": 2.104506731033325, "learning_rate": 0.0009137875390678081, "loss": 3.6544, "step": 10155 }, { "epoch": 0.6903111835847262, "grad_norm": 2.1965579986572266, "learning_rate": 0.0009137450740589754, "loss": 3.5836, "step": 10160 }, { "epoch": 0.690650903655388, "grad_norm": 2.053215742111206, "learning_rate": 0.0009137026090501426, "loss": 3.6322, "step": 10165 }, { "epoch": 0.6909906237260497, "grad_norm": 2.587296724319458, "learning_rate": 0.00091366014404131, "loss": 3.5721, "step": 10170 }, { "epoch": 0.6913303437967115, "grad_norm": 1.948530912399292, "learning_rate": 0.0009136176790324773, "loss": 3.4736, "step": 10175 }, { "epoch": 0.6916700638673733, "grad_norm": 2.2580935955047607, "learning_rate": 0.0009135752140236445, "loss": 3.7941, "step": 10180 }, { "epoch": 0.692009783938035, "grad_norm": 1.7330050468444824, "learning_rate": 0.0009135327490148118, "loss": 3.2698, "step": 10185 }, { "epoch": 0.6923495040086969, "grad_norm": 2.006439685821533, "learning_rate": 0.0009134902840059791, "loss": 3.5016, "step": 10190 }, { "epoch": 0.6926892240793586, "grad_norm": 1.7400978803634644, "learning_rate": 0.0009134478189971463, "loss": 3.6627, "step": 10195 }, { "epoch": 0.6930289441500204, "grad_norm": 1.9065417051315308, "learning_rate": 0.0009134053539883137, "loss": 3.566, "step": 10200 }, { "epoch": 0.6933686642206821, "grad_norm": 2.2186577320098877, "learning_rate": 0.000913362888979481, "loss": 3.3707, "step": 10205 }, { "epoch": 0.693708384291344, "grad_norm": 1.7848314046859741, "learning_rate": 0.0009133204239706482, "loss": 3.6328, "step": 10210 }, { "epoch": 0.6940481043620057, "grad_norm": 1.8709899187088013, "learning_rate": 0.0009132779589618155, "loss": 3.6895, "step": 10215 }, { "epoch": 0.6943878244326674, "grad_norm": 2.3451240062713623, "learning_rate": 0.0009132354939529828, "loss": 3.726, "step": 10220 }, { "epoch": 0.6947275445033293, "grad_norm": 3.02061128616333, "learning_rate": 0.00091319302894415, "loss": 3.6104, "step": 10225 }, { "epoch": 0.695067264573991, "grad_norm": 1.9037772417068481, "learning_rate": 0.0009131505639353173, "loss": 3.654, "step": 10230 }, { "epoch": 0.6954069846446528, "grad_norm": 1.971064805984497, "learning_rate": 0.0009131080989264846, "loss": 3.6445, "step": 10235 }, { "epoch": 0.6957467047153146, "grad_norm": 1.8714619874954224, "learning_rate": 0.0009130656339176519, "loss": 3.6841, "step": 10240 }, { "epoch": 0.6960864247859764, "grad_norm": 1.714072585105896, "learning_rate": 0.0009130231689088192, "loss": 3.7093, "step": 10245 }, { "epoch": 0.6964261448566381, "grad_norm": 1.8281292915344238, "learning_rate": 0.0009129807038999865, "loss": 3.6058, "step": 10250 }, { "epoch": 0.6967658649272999, "grad_norm": 2.45699143409729, "learning_rate": 0.0009129382388911537, "loss": 3.6494, "step": 10255 }, { "epoch": 0.6971055849979617, "grad_norm": 2.2351229190826416, "learning_rate": 0.000912895773882321, "loss": 3.6707, "step": 10260 }, { "epoch": 0.6974453050686235, "grad_norm": 2.138144016265869, "learning_rate": 0.0009128533088734882, "loss": 3.6117, "step": 10265 }, { "epoch": 0.6977850251392852, "grad_norm": 1.8122375011444092, "learning_rate": 0.0009128108438646555, "loss": 3.8448, "step": 10270 }, { "epoch": 0.698124745209947, "grad_norm": 1.9831247329711914, "learning_rate": 0.0009127683788558229, "loss": 3.4452, "step": 10275 }, { "epoch": 0.6984644652806088, "grad_norm": 2.5940585136413574, "learning_rate": 0.0009127259138469901, "loss": 3.7665, "step": 10280 }, { "epoch": 0.6988041853512705, "grad_norm": 1.623205542564392, "learning_rate": 0.0009126834488381574, "loss": 3.5731, "step": 10285 }, { "epoch": 0.6991439054219323, "grad_norm": 2.096752166748047, "learning_rate": 0.0009126409838293247, "loss": 3.5198, "step": 10290 }, { "epoch": 0.6994836254925941, "grad_norm": 1.9139105081558228, "learning_rate": 0.0009125985188204919, "loss": 3.3806, "step": 10295 }, { "epoch": 0.6998233455632559, "grad_norm": 5.014660358428955, "learning_rate": 0.0009125560538116591, "loss": 3.6042, "step": 10300 }, { "epoch": 0.7001630656339176, "grad_norm": 2.3214874267578125, "learning_rate": 0.0009125135888028266, "loss": 3.6521, "step": 10305 }, { "epoch": 0.7005027857045795, "grad_norm": 2.1162681579589844, "learning_rate": 0.0009124711237939938, "loss": 3.5479, "step": 10310 }, { "epoch": 0.7008425057752412, "grad_norm": 2.2798385620117188, "learning_rate": 0.000912428658785161, "loss": 3.6574, "step": 10315 }, { "epoch": 0.701182225845903, "grad_norm": 2.1733169555664062, "learning_rate": 0.0009123861937763284, "loss": 3.4657, "step": 10320 }, { "epoch": 0.7015219459165648, "grad_norm": 1.995898723602295, "learning_rate": 0.0009123437287674956, "loss": 3.6769, "step": 10325 }, { "epoch": 0.7018616659872265, "grad_norm": 2.3269784450531006, "learning_rate": 0.0009123012637586628, "loss": 3.7559, "step": 10330 }, { "epoch": 0.7022013860578883, "grad_norm": 1.632681131362915, "learning_rate": 0.0009122587987498302, "loss": 3.5317, "step": 10335 }, { "epoch": 0.70254110612855, "grad_norm": 1.8258620500564575, "learning_rate": 0.0009122163337409975, "loss": 3.4337, "step": 10340 }, { "epoch": 0.7028808261992119, "grad_norm": 2.0490148067474365, "learning_rate": 0.0009121738687321647, "loss": 3.7553, "step": 10345 }, { "epoch": 0.7032205462698736, "grad_norm": 1.8930089473724365, "learning_rate": 0.000912131403723332, "loss": 3.5252, "step": 10350 }, { "epoch": 0.7035602663405354, "grad_norm": 2.1005570888519287, "learning_rate": 0.0009120889387144993, "loss": 3.6744, "step": 10355 }, { "epoch": 0.7038999864111972, "grad_norm": 1.9561069011688232, "learning_rate": 0.0009120464737056665, "loss": 3.7175, "step": 10360 }, { "epoch": 0.704239706481859, "grad_norm": 2.710188865661621, "learning_rate": 0.0009120040086968338, "loss": 3.3865, "step": 10365 }, { "epoch": 0.7045794265525207, "grad_norm": 2.929851531982422, "learning_rate": 0.0009119615436880011, "loss": 3.7574, "step": 10370 }, { "epoch": 0.7049191466231824, "grad_norm": 1.8179961442947388, "learning_rate": 0.0009119190786791684, "loss": 3.55, "step": 10375 }, { "epoch": 0.7052588666938443, "grad_norm": 1.848914384841919, "learning_rate": 0.0009118766136703357, "loss": 3.7695, "step": 10380 }, { "epoch": 0.705598586764506, "grad_norm": 1.751220941543579, "learning_rate": 0.000911834148661503, "loss": 3.5973, "step": 10385 }, { "epoch": 0.7059383068351678, "grad_norm": 1.9702467918395996, "learning_rate": 0.0009117916836526702, "loss": 3.6429, "step": 10390 }, { "epoch": 0.7062780269058296, "grad_norm": 2.3736073970794678, "learning_rate": 0.0009117492186438375, "loss": 3.704, "step": 10395 }, { "epoch": 0.7066177469764914, "grad_norm": 1.6919418573379517, "learning_rate": 0.0009117067536350047, "loss": 3.7221, "step": 10400 }, { "epoch": 0.7069574670471531, "grad_norm": 1.800899863243103, "learning_rate": 0.000911664288626172, "loss": 3.4549, "step": 10405 }, { "epoch": 0.707297187117815, "grad_norm": 1.794323444366455, "learning_rate": 0.0009116218236173394, "loss": 3.7119, "step": 10410 }, { "epoch": 0.7076369071884767, "grad_norm": 1.6690022945404053, "learning_rate": 0.0009115793586085066, "loss": 3.6564, "step": 10415 }, { "epoch": 0.7079766272591385, "grad_norm": 2.146390914916992, "learning_rate": 0.0009115368935996739, "loss": 3.7557, "step": 10420 }, { "epoch": 0.7083163473298002, "grad_norm": 1.7134017944335938, "learning_rate": 0.0009114944285908412, "loss": 3.7154, "step": 10425 }, { "epoch": 0.708656067400462, "grad_norm": 1.8383632898330688, "learning_rate": 0.0009114519635820084, "loss": 3.7657, "step": 10430 }, { "epoch": 0.7089957874711238, "grad_norm": 1.625365138053894, "learning_rate": 0.0009114094985731757, "loss": 3.7028, "step": 10435 }, { "epoch": 0.7093355075417855, "grad_norm": 1.740931749343872, "learning_rate": 0.000911367033564343, "loss": 3.7439, "step": 10440 }, { "epoch": 0.7096752276124474, "grad_norm": 1.6602027416229248, "learning_rate": 0.0009113245685555103, "loss": 3.6666, "step": 10445 }, { "epoch": 0.7100149476831091, "grad_norm": 1.691372036933899, "learning_rate": 0.0009112821035466775, "loss": 3.6089, "step": 10450 }, { "epoch": 0.7103546677537709, "grad_norm": 1.7991154193878174, "learning_rate": 0.0009112396385378449, "loss": 3.795, "step": 10455 }, { "epoch": 0.7106943878244326, "grad_norm": 1.8170804977416992, "learning_rate": 0.0009111971735290121, "loss": 3.6344, "step": 10460 }, { "epoch": 0.7110341078950945, "grad_norm": 1.8529475927352905, "learning_rate": 0.0009111547085201793, "loss": 3.5909, "step": 10465 }, { "epoch": 0.7113738279657562, "grad_norm": 1.4645298719406128, "learning_rate": 0.0009111122435113467, "loss": 3.7356, "step": 10470 }, { "epoch": 0.711713548036418, "grad_norm": 2.3397796154022217, "learning_rate": 0.0009110697785025139, "loss": 3.5927, "step": 10475 }, { "epoch": 0.7120532681070798, "grad_norm": 1.6523410081863403, "learning_rate": 0.0009110273134936812, "loss": 3.6023, "step": 10480 }, { "epoch": 0.7123929881777415, "grad_norm": 2.149212598800659, "learning_rate": 0.0009109848484848486, "loss": 3.3476, "step": 10485 }, { "epoch": 0.7127327082484033, "grad_norm": 1.9639641046524048, "learning_rate": 0.0009109423834760158, "loss": 3.5876, "step": 10490 }, { "epoch": 0.7130724283190651, "grad_norm": 2.0714495182037354, "learning_rate": 0.000910899918467183, "loss": 3.671, "step": 10495 }, { "epoch": 0.7134121483897269, "grad_norm": 1.883967399597168, "learning_rate": 0.0009108574534583503, "loss": 3.6083, "step": 10500 }, { "epoch": 0.7137518684603886, "grad_norm": 2.130661964416504, "learning_rate": 0.0009108149884495176, "loss": 3.723, "step": 10505 }, { "epoch": 0.7140915885310504, "grad_norm": 1.6292574405670166, "learning_rate": 0.0009107725234406848, "loss": 3.8572, "step": 10510 }, { "epoch": 0.7144313086017122, "grad_norm": 1.602768898010254, "learning_rate": 0.0009107300584318522, "loss": 3.4999, "step": 10515 }, { "epoch": 0.714771028672374, "grad_norm": 2.36784029006958, "learning_rate": 0.0009106875934230195, "loss": 3.4851, "step": 10520 }, { "epoch": 0.7151107487430357, "grad_norm": 2.0951650142669678, "learning_rate": 0.0009106451284141867, "loss": 3.7308, "step": 10525 }, { "epoch": 0.7154504688136976, "grad_norm": 1.7596150636672974, "learning_rate": 0.000910602663405354, "loss": 3.4576, "step": 10530 }, { "epoch": 0.7157901888843593, "grad_norm": 1.4382071495056152, "learning_rate": 0.0009105601983965213, "loss": 3.7745, "step": 10535 }, { "epoch": 0.716129908955021, "grad_norm": 1.9657474756240845, "learning_rate": 0.0009105177333876886, "loss": 3.7187, "step": 10540 }, { "epoch": 0.7164696290256828, "grad_norm": 2.738626480102539, "learning_rate": 0.0009104752683788558, "loss": 3.4787, "step": 10545 }, { "epoch": 0.7168093490963446, "grad_norm": 2.3133561611175537, "learning_rate": 0.0009104328033700231, "loss": 3.7139, "step": 10550 }, { "epoch": 0.7171490691670064, "grad_norm": 1.7862216234207153, "learning_rate": 0.0009103903383611905, "loss": 3.7086, "step": 10555 }, { "epoch": 0.7174887892376681, "grad_norm": 1.7054983377456665, "learning_rate": 0.0009103478733523577, "loss": 3.6795, "step": 10560 }, { "epoch": 0.71782850930833, "grad_norm": 1.4356526136398315, "learning_rate": 0.0009103054083435249, "loss": 3.6798, "step": 10565 }, { "epoch": 0.7181682293789917, "grad_norm": 1.7894598245620728, "learning_rate": 0.0009102629433346923, "loss": 3.7266, "step": 10570 }, { "epoch": 0.7185079494496535, "grad_norm": 2.8376688957214355, "learning_rate": 0.0009102204783258595, "loss": 3.5116, "step": 10575 }, { "epoch": 0.7188476695203153, "grad_norm": 2.3265721797943115, "learning_rate": 0.0009101780133170267, "loss": 3.7971, "step": 10580 }, { "epoch": 0.719187389590977, "grad_norm": 2.426877737045288, "learning_rate": 0.0009101355483081942, "loss": 3.4669, "step": 10585 }, { "epoch": 0.7195271096616388, "grad_norm": 2.2409563064575195, "learning_rate": 0.0009100930832993614, "loss": 3.6981, "step": 10590 }, { "epoch": 0.7198668297323005, "grad_norm": 1.863362193107605, "learning_rate": 0.0009100506182905286, "loss": 3.4176, "step": 10595 }, { "epoch": 0.7202065498029624, "grad_norm": 1.7523043155670166, "learning_rate": 0.000910008153281696, "loss": 3.498, "step": 10600 }, { "epoch": 0.7205462698736241, "grad_norm": 1.9645369052886963, "learning_rate": 0.0009099656882728632, "loss": 3.7369, "step": 10605 }, { "epoch": 0.7208859899442859, "grad_norm": 1.5704066753387451, "learning_rate": 0.0009099232232640304, "loss": 3.6303, "step": 10610 }, { "epoch": 0.7212257100149477, "grad_norm": 1.4634732007980347, "learning_rate": 0.0009098807582551977, "loss": 3.5822, "step": 10615 }, { "epoch": 0.7215654300856095, "grad_norm": 2.008650064468384, "learning_rate": 0.0009098382932463651, "loss": 3.6085, "step": 10620 }, { "epoch": 0.7219051501562712, "grad_norm": 1.879660725593567, "learning_rate": 0.0009097958282375323, "loss": 3.8085, "step": 10625 }, { "epoch": 0.722244870226933, "grad_norm": 1.715406894683838, "learning_rate": 0.0009097533632286996, "loss": 3.5985, "step": 10630 }, { "epoch": 0.7225845902975948, "grad_norm": 1.7367660999298096, "learning_rate": 0.0009097108982198669, "loss": 3.4981, "step": 10635 }, { "epoch": 0.7229243103682566, "grad_norm": 1.83036208152771, "learning_rate": 0.0009096684332110341, "loss": 3.4585, "step": 10640 }, { "epoch": 0.7232640304389183, "grad_norm": 2.332327127456665, "learning_rate": 0.0009096259682022014, "loss": 3.6665, "step": 10645 }, { "epoch": 0.7236037505095801, "grad_norm": 2.7516748905181885, "learning_rate": 0.0009095835031933686, "loss": 3.5921, "step": 10650 }, { "epoch": 0.7239434705802419, "grad_norm": 2.319422721862793, "learning_rate": 0.000909541038184536, "loss": 3.6347, "step": 10655 }, { "epoch": 0.7242831906509036, "grad_norm": 2.1724915504455566, "learning_rate": 0.0009094985731757033, "loss": 3.3233, "step": 10660 }, { "epoch": 0.7246229107215655, "grad_norm": 1.8603650331497192, "learning_rate": 0.0009094561081668705, "loss": 3.6286, "step": 10665 }, { "epoch": 0.7249626307922272, "grad_norm": 2.1480672359466553, "learning_rate": 0.0009094136431580378, "loss": 3.6375, "step": 10670 }, { "epoch": 0.725302350862889, "grad_norm": 2.0457582473754883, "learning_rate": 0.0009093711781492051, "loss": 3.7682, "step": 10675 }, { "epoch": 0.7256420709335507, "grad_norm": 1.898259162902832, "learning_rate": 0.0009093287131403723, "loss": 3.7742, "step": 10680 }, { "epoch": 0.7259817910042126, "grad_norm": 2.4292335510253906, "learning_rate": 0.0009092862481315395, "loss": 3.7227, "step": 10685 }, { "epoch": 0.7263215110748743, "grad_norm": 1.8403369188308716, "learning_rate": 0.000909243783122707, "loss": 3.6955, "step": 10690 }, { "epoch": 0.726661231145536, "grad_norm": 1.7183939218521118, "learning_rate": 0.0009092013181138742, "loss": 3.7917, "step": 10695 }, { "epoch": 0.7270009512161979, "grad_norm": 2.177769184112549, "learning_rate": 0.0009091588531050414, "loss": 3.898, "step": 10700 }, { "epoch": 0.7273406712868596, "grad_norm": 1.9397484064102173, "learning_rate": 0.0009091163880962088, "loss": 3.707, "step": 10705 }, { "epoch": 0.7276803913575214, "grad_norm": 2.1744701862335205, "learning_rate": 0.000909073923087376, "loss": 3.85, "step": 10710 }, { "epoch": 0.7280201114281831, "grad_norm": 1.6131958961486816, "learning_rate": 0.0009090314580785432, "loss": 3.8364, "step": 10715 }, { "epoch": 0.728359831498845, "grad_norm": 2.528141975402832, "learning_rate": 0.0009089889930697106, "loss": 3.4407, "step": 10720 }, { "epoch": 0.7286995515695067, "grad_norm": 1.5347998142242432, "learning_rate": 0.0009089465280608779, "loss": 3.6451, "step": 10725 }, { "epoch": 0.7290392716401685, "grad_norm": 1.8358412981033325, "learning_rate": 0.0009089040630520451, "loss": 3.4182, "step": 10730 }, { "epoch": 0.7293789917108303, "grad_norm": 1.8234546184539795, "learning_rate": 0.0009088615980432125, "loss": 3.784, "step": 10735 }, { "epoch": 0.7297187117814921, "grad_norm": 1.873008370399475, "learning_rate": 0.0009088191330343797, "loss": 3.6609, "step": 10740 }, { "epoch": 0.7300584318521538, "grad_norm": 1.9893032312393188, "learning_rate": 0.0009087766680255469, "loss": 3.5413, "step": 10745 }, { "epoch": 0.7303981519228157, "grad_norm": 2.8633341789245605, "learning_rate": 0.0009087342030167142, "loss": 3.791, "step": 10750 }, { "epoch": 0.7307378719934774, "grad_norm": 2.335963487625122, "learning_rate": 0.0009086917380078815, "loss": 3.6544, "step": 10755 }, { "epoch": 0.7310775920641391, "grad_norm": 1.8646308183670044, "learning_rate": 0.0009086492729990488, "loss": 3.8243, "step": 10760 }, { "epoch": 0.7314173121348009, "grad_norm": 1.9486544132232666, "learning_rate": 0.0009086068079902161, "loss": 3.6959, "step": 10765 }, { "epoch": 0.7317570322054627, "grad_norm": 1.8509575128555298, "learning_rate": 0.0009085643429813834, "loss": 3.5265, "step": 10770 }, { "epoch": 0.7320967522761245, "grad_norm": 1.7517584562301636, "learning_rate": 0.0009085218779725506, "loss": 3.8692, "step": 10775 }, { "epoch": 0.7324364723467862, "grad_norm": 1.6734857559204102, "learning_rate": 0.0009084794129637179, "loss": 3.6091, "step": 10780 }, { "epoch": 0.7327761924174481, "grad_norm": 2.011200189590454, "learning_rate": 0.0009084369479548851, "loss": 3.7248, "step": 10785 }, { "epoch": 0.7331159124881098, "grad_norm": 1.8084676265716553, "learning_rate": 0.0009083944829460524, "loss": 3.5607, "step": 10790 }, { "epoch": 0.7334556325587716, "grad_norm": 2.0499143600463867, "learning_rate": 0.0009083520179372198, "loss": 3.6217, "step": 10795 }, { "epoch": 0.7337953526294333, "grad_norm": 2.1930716037750244, "learning_rate": 0.000908309552928387, "loss": 3.6006, "step": 10800 }, { "epoch": 0.7341350727000951, "grad_norm": 2.1140332221984863, "learning_rate": 0.0009082670879195543, "loss": 3.742, "step": 10805 }, { "epoch": 0.7344747927707569, "grad_norm": 1.898756504058838, "learning_rate": 0.0009082246229107216, "loss": 3.8565, "step": 10810 }, { "epoch": 0.7348145128414186, "grad_norm": 2.0299503803253174, "learning_rate": 0.0009081821579018888, "loss": 3.6468, "step": 10815 }, { "epoch": 0.7351542329120805, "grad_norm": 1.5961848497390747, "learning_rate": 0.0009081396928930561, "loss": 3.7061, "step": 10820 }, { "epoch": 0.7354939529827422, "grad_norm": 1.5035616159439087, "learning_rate": 0.0009080972278842234, "loss": 3.6391, "step": 10825 }, { "epoch": 0.735833673053404, "grad_norm": 1.8490874767303467, "learning_rate": 0.0009080547628753907, "loss": 3.4957, "step": 10830 }, { "epoch": 0.7361733931240658, "grad_norm": 1.910025715827942, "learning_rate": 0.000908012297866558, "loss": 3.6567, "step": 10835 }, { "epoch": 0.7365131131947276, "grad_norm": 2.139986753463745, "learning_rate": 0.0009079698328577253, "loss": 3.5497, "step": 10840 }, { "epoch": 0.7368528332653893, "grad_norm": 2.2482376098632812, "learning_rate": 0.0009079273678488925, "loss": 3.5219, "step": 10845 }, { "epoch": 0.737192553336051, "grad_norm": 2.001877546310425, "learning_rate": 0.0009078849028400597, "loss": 3.6484, "step": 10850 }, { "epoch": 0.7375322734067129, "grad_norm": 1.6756467819213867, "learning_rate": 0.0009078424378312271, "loss": 3.6146, "step": 10855 }, { "epoch": 0.7378719934773746, "grad_norm": 1.750274419784546, "learning_rate": 0.0009077999728223943, "loss": 3.7, "step": 10860 }, { "epoch": 0.7382117135480364, "grad_norm": 1.5451383590698242, "learning_rate": 0.0009077575078135616, "loss": 3.7566, "step": 10865 }, { "epoch": 0.7385514336186982, "grad_norm": 1.755075454711914, "learning_rate": 0.000907715042804729, "loss": 3.52, "step": 10870 }, { "epoch": 0.73889115368936, "grad_norm": 2.0861642360687256, "learning_rate": 0.0009076725777958962, "loss": 3.7125, "step": 10875 }, { "epoch": 0.7392308737600217, "grad_norm": 1.83343505859375, "learning_rate": 0.0009076301127870635, "loss": 3.6267, "step": 10880 }, { "epoch": 0.7395705938306835, "grad_norm": 1.71404230594635, "learning_rate": 0.0009075876477782308, "loss": 3.6007, "step": 10885 }, { "epoch": 0.7399103139013453, "grad_norm": 1.9214481115341187, "learning_rate": 0.000907545182769398, "loss": 3.5727, "step": 10890 }, { "epoch": 0.7402500339720071, "grad_norm": 2.1148037910461426, "learning_rate": 0.0009075112107623319, "loss": 3.6124, "step": 10895 }, { "epoch": 0.7405897540426688, "grad_norm": 1.990259051322937, "learning_rate": 0.0009074687457534991, "loss": 3.5375, "step": 10900 }, { "epoch": 0.7409294741133307, "grad_norm": 1.7084485292434692, "learning_rate": 0.0009074262807446664, "loss": 3.7054, "step": 10905 }, { "epoch": 0.7412691941839924, "grad_norm": 1.7774074077606201, "learning_rate": 0.0009073838157358337, "loss": 3.6561, "step": 10910 }, { "epoch": 0.7416089142546541, "grad_norm": 1.4523849487304688, "learning_rate": 0.0009073413507270009, "loss": 3.8194, "step": 10915 }, { "epoch": 0.741948634325316, "grad_norm": 2.351632595062256, "learning_rate": 0.0009072988857181683, "loss": 3.6464, "step": 10920 }, { "epoch": 0.7422883543959777, "grad_norm": 2.1939423084259033, "learning_rate": 0.0009072564207093356, "loss": 3.6436, "step": 10925 }, { "epoch": 0.7426280744666395, "grad_norm": 2.143824577331543, "learning_rate": 0.0009072139557005028, "loss": 3.7823, "step": 10930 }, { "epoch": 0.7429677945373012, "grad_norm": 2.3169965744018555, "learning_rate": 0.00090717149069167, "loss": 3.517, "step": 10935 }, { "epoch": 0.7433075146079631, "grad_norm": 2.6005613803863525, "learning_rate": 0.0009071290256828374, "loss": 3.553, "step": 10940 }, { "epoch": 0.7436472346786248, "grad_norm": 1.7440285682678223, "learning_rate": 0.0009070865606740046, "loss": 3.5015, "step": 10945 }, { "epoch": 0.7439869547492866, "grad_norm": 1.8596141338348389, "learning_rate": 0.0009070440956651718, "loss": 4.0236, "step": 10950 }, { "epoch": 0.7443266748199484, "grad_norm": 1.7573013305664062, "learning_rate": 0.0009070016306563393, "loss": 3.5488, "step": 10955 }, { "epoch": 0.7446663948906102, "grad_norm": 1.4739881753921509, "learning_rate": 0.0009069591656475065, "loss": 3.4648, "step": 10960 }, { "epoch": 0.7450061149612719, "grad_norm": 1.7784475088119507, "learning_rate": 0.0009069167006386737, "loss": 3.7902, "step": 10965 }, { "epoch": 0.7453458350319336, "grad_norm": 2.4465508460998535, "learning_rate": 0.0009068742356298411, "loss": 3.5058, "step": 10970 }, { "epoch": 0.7456855551025955, "grad_norm": 2.02844500541687, "learning_rate": 0.0009068317706210083, "loss": 3.6587, "step": 10975 }, { "epoch": 0.7460252751732572, "grad_norm": 2.6300158500671387, "learning_rate": 0.0009067893056121755, "loss": 3.4902, "step": 10980 }, { "epoch": 0.746364995243919, "grad_norm": 2.130206346511841, "learning_rate": 0.0009067468406033428, "loss": 3.6423, "step": 10985 }, { "epoch": 0.7467047153145808, "grad_norm": 1.7555897235870361, "learning_rate": 0.0009067043755945102, "loss": 3.539, "step": 10990 }, { "epoch": 0.7470444353852426, "grad_norm": 1.7145676612854004, "learning_rate": 0.0009066619105856774, "loss": 3.3899, "step": 10995 }, { "epoch": 0.7473841554559043, "grad_norm": 1.7191431522369385, "learning_rate": 0.0009066194455768447, "loss": 3.4475, "step": 11000 }, { "epoch": 0.7477238755265662, "grad_norm": 2.037879228591919, "learning_rate": 0.000906576980568012, "loss": 3.591, "step": 11005 }, { "epoch": 0.7480635955972279, "grad_norm": 1.9523876905441284, "learning_rate": 0.0009065345155591792, "loss": 3.6053, "step": 11010 }, { "epoch": 0.7484033156678896, "grad_norm": 1.896470546722412, "learning_rate": 0.0009064920505503465, "loss": 3.3472, "step": 11015 }, { "epoch": 0.7487430357385514, "grad_norm": 2.1291098594665527, "learning_rate": 0.0009064495855415137, "loss": 3.6203, "step": 11020 }, { "epoch": 0.7490827558092132, "grad_norm": 1.9301557540893555, "learning_rate": 0.0009064071205326811, "loss": 3.615, "step": 11025 }, { "epoch": 0.749422475879875, "grad_norm": 2.14108943939209, "learning_rate": 0.0009063646555238484, "loss": 3.6924, "step": 11030 }, { "epoch": 0.7497621959505367, "grad_norm": 1.6213929653167725, "learning_rate": 0.0009063221905150156, "loss": 3.6554, "step": 11035 }, { "epoch": 0.7501019160211986, "grad_norm": 1.950795292854309, "learning_rate": 0.0009062797255061829, "loss": 3.7187, "step": 11040 }, { "epoch": 0.7504416360918603, "grad_norm": 1.599382758140564, "learning_rate": 0.0009062372604973502, "loss": 3.5862, "step": 11045 }, { "epoch": 0.7507813561625221, "grad_norm": 1.8204560279846191, "learning_rate": 0.0009061947954885174, "loss": 3.6283, "step": 11050 }, { "epoch": 0.7511210762331838, "grad_norm": 1.9420442581176758, "learning_rate": 0.0009061523304796847, "loss": 3.3236, "step": 11055 }, { "epoch": 0.7514607963038457, "grad_norm": 1.931004285812378, "learning_rate": 0.0009061098654708521, "loss": 3.6736, "step": 11060 }, { "epoch": 0.7518005163745074, "grad_norm": 2.0424704551696777, "learning_rate": 0.0009060674004620193, "loss": 3.7566, "step": 11065 }, { "epoch": 0.7521402364451691, "grad_norm": 2.2057294845581055, "learning_rate": 0.0009060249354531865, "loss": 3.6067, "step": 11070 }, { "epoch": 0.752479956515831, "grad_norm": 1.772132396697998, "learning_rate": 0.0009059824704443539, "loss": 3.7301, "step": 11075 }, { "epoch": 0.7528196765864927, "grad_norm": 1.6747239828109741, "learning_rate": 0.0009059400054355211, "loss": 3.4931, "step": 11080 }, { "epoch": 0.7531593966571545, "grad_norm": 2.2662813663482666, "learning_rate": 0.0009058975404266884, "loss": 3.3548, "step": 11085 }, { "epoch": 0.7534991167278163, "grad_norm": 2.574580430984497, "learning_rate": 0.0009058550754178557, "loss": 3.71, "step": 11090 }, { "epoch": 0.7538388367984781, "grad_norm": 1.5443856716156006, "learning_rate": 0.000905812610409023, "loss": 3.7372, "step": 11095 }, { "epoch": 0.7541785568691398, "grad_norm": 1.9933788776397705, "learning_rate": 0.0009057701454001903, "loss": 3.4298, "step": 11100 }, { "epoch": 0.7545182769398016, "grad_norm": 1.5038034915924072, "learning_rate": 0.0009057276803913576, "loss": 3.6279, "step": 11105 }, { "epoch": 0.7548579970104634, "grad_norm": 1.7499479055404663, "learning_rate": 0.0009056852153825248, "loss": 3.5768, "step": 11110 }, { "epoch": 0.7551977170811252, "grad_norm": 1.9721225500106812, "learning_rate": 0.0009056427503736921, "loss": 3.5791, "step": 11115 }, { "epoch": 0.7555374371517869, "grad_norm": 2.139810800552368, "learning_rate": 0.0009056002853648593, "loss": 3.6571, "step": 11120 }, { "epoch": 0.7558771572224487, "grad_norm": 1.8378849029541016, "learning_rate": 0.0009055578203560266, "loss": 3.756, "step": 11125 }, { "epoch": 0.7562168772931105, "grad_norm": 1.777510643005371, "learning_rate": 0.000905515355347194, "loss": 3.6353, "step": 11130 }, { "epoch": 0.7565565973637722, "grad_norm": 1.9284536838531494, "learning_rate": 0.0009054728903383612, "loss": 3.781, "step": 11135 }, { "epoch": 0.756896317434434, "grad_norm": 1.722133755683899, "learning_rate": 0.0009054304253295285, "loss": 3.774, "step": 11140 }, { "epoch": 0.7572360375050958, "grad_norm": 2.0399675369262695, "learning_rate": 0.0009053879603206958, "loss": 3.391, "step": 11145 }, { "epoch": 0.7575757575757576, "grad_norm": 2.5460045337677, "learning_rate": 0.000905345495311863, "loss": 3.907, "step": 11150 }, { "epoch": 0.7579154776464193, "grad_norm": 2.459224224090576, "learning_rate": 0.0009053030303030303, "loss": 3.647, "step": 11155 }, { "epoch": 0.7582551977170812, "grad_norm": 1.7415517568588257, "learning_rate": 0.0009052605652941977, "loss": 3.6155, "step": 11160 }, { "epoch": 0.7585949177877429, "grad_norm": 2.3432228565216064, "learning_rate": 0.0009052181002853649, "loss": 3.4307, "step": 11165 }, { "epoch": 0.7589346378584046, "grad_norm": 1.8679935932159424, "learning_rate": 0.0009051756352765322, "loss": 3.532, "step": 11170 }, { "epoch": 0.7592743579290665, "grad_norm": 1.8737506866455078, "learning_rate": 0.0009051331702676995, "loss": 3.6384, "step": 11175 }, { "epoch": 0.7596140779997282, "grad_norm": 2.5906856060028076, "learning_rate": 0.0009050907052588667, "loss": 3.5465, "step": 11180 }, { "epoch": 0.75995379807039, "grad_norm": 1.7574783563613892, "learning_rate": 0.0009050482402500339, "loss": 3.5665, "step": 11185 }, { "epoch": 0.7602935181410517, "grad_norm": 1.7231266498565674, "learning_rate": 0.0009050057752412013, "loss": 3.6087, "step": 11190 }, { "epoch": 0.7606332382117136, "grad_norm": 1.6252933740615845, "learning_rate": 0.0009049633102323686, "loss": 3.6932, "step": 11195 }, { "epoch": 0.7609729582823753, "grad_norm": 2.032081365585327, "learning_rate": 0.0009049208452235358, "loss": 3.5154, "step": 11200 }, { "epoch": 0.7613126783530371, "grad_norm": 2.055819511413574, "learning_rate": 0.0009048783802147032, "loss": 3.7025, "step": 11205 }, { "epoch": 0.7616523984236989, "grad_norm": 1.8409039974212646, "learning_rate": 0.0009048359152058704, "loss": 3.6654, "step": 11210 }, { "epoch": 0.7619921184943607, "grad_norm": 2.071221351623535, "learning_rate": 0.0009047934501970376, "loss": 3.5147, "step": 11215 }, { "epoch": 0.7623318385650224, "grad_norm": 1.673843264579773, "learning_rate": 0.000904750985188205, "loss": 3.6764, "step": 11220 }, { "epoch": 0.7626715586356841, "grad_norm": 1.8410176038742065, "learning_rate": 0.0009047085201793722, "loss": 3.4837, "step": 11225 }, { "epoch": 0.763011278706346, "grad_norm": 1.7720872163772583, "learning_rate": 0.0009046660551705395, "loss": 3.7645, "step": 11230 }, { "epoch": 0.7633509987770077, "grad_norm": 1.8151825666427612, "learning_rate": 0.0009046235901617068, "loss": 3.7564, "step": 11235 }, { "epoch": 0.7636907188476695, "grad_norm": 2.0932505130767822, "learning_rate": 0.0009045811251528741, "loss": 3.5163, "step": 11240 }, { "epoch": 0.7640304389183313, "grad_norm": 2.2056214809417725, "learning_rate": 0.0009045386601440413, "loss": 3.4205, "step": 11245 }, { "epoch": 0.7643701589889931, "grad_norm": 1.9558162689208984, "learning_rate": 0.0009044961951352086, "loss": 3.5369, "step": 11250 }, { "epoch": 0.7647098790596548, "grad_norm": 1.6008434295654297, "learning_rate": 0.0009044537301263759, "loss": 3.7628, "step": 11255 }, { "epoch": 0.7650495991303167, "grad_norm": 1.7449761629104614, "learning_rate": 0.0009044112651175431, "loss": 3.592, "step": 11260 }, { "epoch": 0.7653893192009784, "grad_norm": 1.8924920558929443, "learning_rate": 0.0009043688001087105, "loss": 3.5955, "step": 11265 }, { "epoch": 0.7657290392716402, "grad_norm": 2.313901662826538, "learning_rate": 0.0009043263350998778, "loss": 3.6383, "step": 11270 }, { "epoch": 0.7660687593423019, "grad_norm": 2.106180191040039, "learning_rate": 0.000904283870091045, "loss": 3.6536, "step": 11275 }, { "epoch": 0.7664084794129638, "grad_norm": 2.1356372833251953, "learning_rate": 0.0009042414050822123, "loss": 3.5245, "step": 11280 }, { "epoch": 0.7667481994836255, "grad_norm": 2.2463390827178955, "learning_rate": 0.0009041989400733795, "loss": 3.5894, "step": 11285 }, { "epoch": 0.7670879195542872, "grad_norm": 1.7360639572143555, "learning_rate": 0.0009041564750645468, "loss": 3.5854, "step": 11290 }, { "epoch": 0.7674276396249491, "grad_norm": 1.6888504028320312, "learning_rate": 0.0009041140100557141, "loss": 3.4689, "step": 11295 }, { "epoch": 0.7677673596956108, "grad_norm": 1.3801379203796387, "learning_rate": 0.0009040715450468814, "loss": 3.5714, "step": 11300 }, { "epoch": 0.7681070797662726, "grad_norm": 2.102229356765747, "learning_rate": 0.0009040290800380487, "loss": 3.4685, "step": 11305 }, { "epoch": 0.7684467998369343, "grad_norm": 2.088855743408203, "learning_rate": 0.000903986615029216, "loss": 3.7554, "step": 11310 }, { "epoch": 0.7687865199075962, "grad_norm": 2.419267177581787, "learning_rate": 0.0009039441500203832, "loss": 3.5539, "step": 11315 }, { "epoch": 0.7691262399782579, "grad_norm": 1.7632737159729004, "learning_rate": 0.0009039016850115504, "loss": 3.7493, "step": 11320 }, { "epoch": 0.7694659600489197, "grad_norm": 2.1826207637786865, "learning_rate": 0.0009038592200027178, "loss": 3.5489, "step": 11325 }, { "epoch": 0.7698056801195815, "grad_norm": 2.2420310974121094, "learning_rate": 0.000903816754993885, "loss": 3.4704, "step": 11330 }, { "epoch": 0.7701454001902432, "grad_norm": 2.32153058052063, "learning_rate": 0.0009037742899850523, "loss": 3.5541, "step": 11335 }, { "epoch": 0.770485120260905, "grad_norm": 2.2202765941619873, "learning_rate": 0.0009037318249762197, "loss": 3.4315, "step": 11340 }, { "epoch": 0.7708248403315668, "grad_norm": 2.3718080520629883, "learning_rate": 0.0009036893599673869, "loss": 3.6683, "step": 11345 }, { "epoch": 0.7711645604022286, "grad_norm": 1.3896256685256958, "learning_rate": 0.0009036468949585541, "loss": 3.6106, "step": 11350 }, { "epoch": 0.7715042804728903, "grad_norm": 2.0195717811584473, "learning_rate": 0.0009036044299497215, "loss": 3.4329, "step": 11355 }, { "epoch": 0.7718440005435521, "grad_norm": 2.628326654434204, "learning_rate": 0.0009035619649408887, "loss": 3.8735, "step": 11360 }, { "epoch": 0.7721837206142139, "grad_norm": 1.5161054134368896, "learning_rate": 0.0009035194999320559, "loss": 3.6593, "step": 11365 }, { "epoch": 0.7725234406848757, "grad_norm": 2.059781551361084, "learning_rate": 0.0009034770349232234, "loss": 3.8584, "step": 11370 }, { "epoch": 0.7728631607555374, "grad_norm": 2.4836363792419434, "learning_rate": 0.0009034345699143906, "loss": 3.5866, "step": 11375 }, { "epoch": 0.7732028808261993, "grad_norm": 2.5561530590057373, "learning_rate": 0.0009033921049055578, "loss": 3.4725, "step": 11380 }, { "epoch": 0.773542600896861, "grad_norm": 1.98677659034729, "learning_rate": 0.0009033496398967251, "loss": 3.6092, "step": 11385 }, { "epoch": 0.7738823209675227, "grad_norm": 2.0382020473480225, "learning_rate": 0.0009033071748878924, "loss": 3.6999, "step": 11390 }, { "epoch": 0.7742220410381845, "grad_norm": 2.533143997192383, "learning_rate": 0.0009032647098790596, "loss": 3.6595, "step": 11395 }, { "epoch": 0.7745617611088463, "grad_norm": 1.7000397443771362, "learning_rate": 0.0009032222448702269, "loss": 3.5663, "step": 11400 }, { "epoch": 0.7749014811795081, "grad_norm": 1.999706745147705, "learning_rate": 0.0009031797798613943, "loss": 3.7025, "step": 11405 }, { "epoch": 0.7752412012501698, "grad_norm": 1.691595435142517, "learning_rate": 0.0009031373148525615, "loss": 3.7676, "step": 11410 }, { "epoch": 0.7755809213208317, "grad_norm": 2.5046608448028564, "learning_rate": 0.0009030948498437288, "loss": 3.8336, "step": 11415 }, { "epoch": 0.7759206413914934, "grad_norm": 2.3163342475891113, "learning_rate": 0.000903052384834896, "loss": 3.734, "step": 11420 }, { "epoch": 0.7762603614621552, "grad_norm": 2.4621081352233887, "learning_rate": 0.0009030099198260634, "loss": 3.4122, "step": 11425 }, { "epoch": 0.776600081532817, "grad_norm": 1.9546284675598145, "learning_rate": 0.0009029674548172306, "loss": 3.3936, "step": 11430 }, { "epoch": 0.7769398016034788, "grad_norm": 1.9773472547531128, "learning_rate": 0.0009029249898083978, "loss": 3.5308, "step": 11435 }, { "epoch": 0.7772795216741405, "grad_norm": 2.2461860179901123, "learning_rate": 0.0009028825247995653, "loss": 3.7, "step": 11440 }, { "epoch": 0.7776192417448022, "grad_norm": 1.8959810733795166, "learning_rate": 0.0009028400597907325, "loss": 3.4668, "step": 11445 }, { "epoch": 0.7779589618154641, "grad_norm": 2.336677074432373, "learning_rate": 0.0009027975947818997, "loss": 3.6577, "step": 11450 }, { "epoch": 0.7782986818861258, "grad_norm": 1.9661316871643066, "learning_rate": 0.0009027551297730671, "loss": 3.7557, "step": 11455 }, { "epoch": 0.7786384019567876, "grad_norm": 1.6758650541305542, "learning_rate": 0.0009027126647642343, "loss": 3.4371, "step": 11460 }, { "epoch": 0.7789781220274494, "grad_norm": 1.7062751054763794, "learning_rate": 0.0009026701997554015, "loss": 3.6621, "step": 11465 }, { "epoch": 0.7793178420981112, "grad_norm": 2.0651895999908447, "learning_rate": 0.0009026277347465688, "loss": 3.6554, "step": 11470 }, { "epoch": 0.7796575621687729, "grad_norm": 1.993229866027832, "learning_rate": 0.0009025852697377362, "loss": 3.694, "step": 11475 }, { "epoch": 0.7799972822394347, "grad_norm": 1.9070930480957031, "learning_rate": 0.0009025428047289034, "loss": 3.6737, "step": 11480 }, { "epoch": 0.7803370023100965, "grad_norm": 1.7999343872070312, "learning_rate": 0.0009025003397200707, "loss": 3.5913, "step": 11485 }, { "epoch": 0.7806767223807582, "grad_norm": 2.5319900512695312, "learning_rate": 0.000902457874711238, "loss": 3.53, "step": 11490 }, { "epoch": 0.78101644245142, "grad_norm": 1.6945593357086182, "learning_rate": 0.0009024154097024052, "loss": 3.6565, "step": 11495 }, { "epoch": 0.7813561625220818, "grad_norm": 2.3059866428375244, "learning_rate": 0.0009023729446935725, "loss": 3.2428, "step": 11500 }, { "epoch": 0.7816958825927436, "grad_norm": 1.6982353925704956, "learning_rate": 0.0009023304796847398, "loss": 3.5385, "step": 11505 }, { "epoch": 0.7820356026634053, "grad_norm": 1.6216310262680054, "learning_rate": 0.0009022880146759071, "loss": 3.8403, "step": 11510 }, { "epoch": 0.7823753227340672, "grad_norm": 1.6761631965637207, "learning_rate": 0.0009022455496670744, "loss": 3.5761, "step": 11515 }, { "epoch": 0.7827150428047289, "grad_norm": 1.8639732599258423, "learning_rate": 0.0009022030846582416, "loss": 3.7054, "step": 11520 }, { "epoch": 0.7830547628753907, "grad_norm": 1.9460583925247192, "learning_rate": 0.0009021606196494089, "loss": 3.8167, "step": 11525 }, { "epoch": 0.7833944829460524, "grad_norm": 1.8750625848770142, "learning_rate": 0.0009021181546405762, "loss": 3.5459, "step": 11530 }, { "epoch": 0.7837342030167143, "grad_norm": 1.8744643926620483, "learning_rate": 0.0009020756896317434, "loss": 3.4699, "step": 11535 }, { "epoch": 0.784073923087376, "grad_norm": 1.544968843460083, "learning_rate": 0.0009020332246229107, "loss": 3.836, "step": 11540 }, { "epoch": 0.7844136431580377, "grad_norm": 1.6526787281036377, "learning_rate": 0.0009019907596140781, "loss": 3.6203, "step": 11545 }, { "epoch": 0.7847533632286996, "grad_norm": 1.4729514122009277, "learning_rate": 0.0009019482946052453, "loss": 3.6703, "step": 11550 }, { "epoch": 0.7850930832993613, "grad_norm": 1.7165935039520264, "learning_rate": 0.0009019058295964126, "loss": 3.6412, "step": 11555 }, { "epoch": 0.7854328033700231, "grad_norm": 1.6337451934814453, "learning_rate": 0.0009018633645875799, "loss": 3.7164, "step": 11560 }, { "epoch": 0.7857725234406848, "grad_norm": 1.7865502834320068, "learning_rate": 0.0009018208995787471, "loss": 3.5676, "step": 11565 }, { "epoch": 0.7861122435113467, "grad_norm": 3.023987293243408, "learning_rate": 0.0009017784345699143, "loss": 3.4591, "step": 11570 }, { "epoch": 0.7864519635820084, "grad_norm": 2.1436991691589355, "learning_rate": 0.0009017359695610817, "loss": 3.5924, "step": 11575 }, { "epoch": 0.7867916836526702, "grad_norm": 2.0719311237335205, "learning_rate": 0.000901693504552249, "loss": 3.9291, "step": 11580 }, { "epoch": 0.787131403723332, "grad_norm": 1.8154362440109253, "learning_rate": 0.0009016510395434162, "loss": 3.5121, "step": 11585 }, { "epoch": 0.7874711237939938, "grad_norm": 1.7528142929077148, "learning_rate": 0.0009016085745345836, "loss": 3.5241, "step": 11590 }, { "epoch": 0.7878108438646555, "grad_norm": 2.078572988510132, "learning_rate": 0.0009015661095257508, "loss": 3.4511, "step": 11595 }, { "epoch": 0.7881505639353173, "grad_norm": 1.8110109567642212, "learning_rate": 0.000901523644516918, "loss": 3.5846, "step": 11600 }, { "epoch": 0.7884902840059791, "grad_norm": 1.9590286016464233, "learning_rate": 0.0009014811795080854, "loss": 3.8224, "step": 11605 }, { "epoch": 0.7888300040766408, "grad_norm": 2.0140938758850098, "learning_rate": 0.0009014387144992526, "loss": 3.769, "step": 11610 }, { "epoch": 0.7891697241473026, "grad_norm": 2.0441534519195557, "learning_rate": 0.0009013962494904199, "loss": 3.4704, "step": 11615 }, { "epoch": 0.7895094442179644, "grad_norm": 2.1704530715942383, "learning_rate": 0.0009013537844815872, "loss": 3.6122, "step": 11620 }, { "epoch": 0.7898491642886262, "grad_norm": 1.8016612529754639, "learning_rate": 0.0009013113194727545, "loss": 3.4876, "step": 11625 }, { "epoch": 0.7901888843592879, "grad_norm": 2.000324249267578, "learning_rate": 0.0009012688544639217, "loss": 3.5597, "step": 11630 }, { "epoch": 0.7905286044299498, "grad_norm": 1.9233386516571045, "learning_rate": 0.000901226389455089, "loss": 3.568, "step": 11635 }, { "epoch": 0.7908683245006115, "grad_norm": 1.965821623802185, "learning_rate": 0.0009011839244462563, "loss": 3.7921, "step": 11640 }, { "epoch": 0.7912080445712733, "grad_norm": 3.5384466648101807, "learning_rate": 0.0009011414594374235, "loss": 3.5235, "step": 11645 }, { "epoch": 0.791547764641935, "grad_norm": 2.1392483711242676, "learning_rate": 0.0009010989944285909, "loss": 3.7282, "step": 11650 }, { "epoch": 0.7918874847125968, "grad_norm": 2.445974111557007, "learning_rate": 0.0009010565294197582, "loss": 3.7105, "step": 11655 }, { "epoch": 0.7922272047832586, "grad_norm": 2.304281711578369, "learning_rate": 0.0009010140644109254, "loss": 3.6816, "step": 11660 }, { "epoch": 0.7925669248539203, "grad_norm": 2.148695707321167, "learning_rate": 0.0009009715994020927, "loss": 3.5337, "step": 11665 }, { "epoch": 0.7929066449245822, "grad_norm": 1.5591646432876587, "learning_rate": 0.0009009291343932599, "loss": 3.6068, "step": 11670 }, { "epoch": 0.7932463649952439, "grad_norm": 1.9296822547912598, "learning_rate": 0.0009008866693844272, "loss": 3.5716, "step": 11675 }, { "epoch": 0.7935860850659057, "grad_norm": 1.747256875038147, "learning_rate": 0.0009008442043755946, "loss": 3.7159, "step": 11680 }, { "epoch": 0.7939258051365675, "grad_norm": 1.7918370962142944, "learning_rate": 0.0009008017393667618, "loss": 3.7968, "step": 11685 }, { "epoch": 0.7942655252072293, "grad_norm": 1.991623044013977, "learning_rate": 0.0009007592743579291, "loss": 3.4853, "step": 11690 }, { "epoch": 0.794605245277891, "grad_norm": 1.7981168031692505, "learning_rate": 0.0009007168093490964, "loss": 3.6659, "step": 11695 }, { "epoch": 0.7949449653485527, "grad_norm": 2.025810718536377, "learning_rate": 0.0009006743443402636, "loss": 3.624, "step": 11700 }, { "epoch": 0.7952846854192146, "grad_norm": 2.835524082183838, "learning_rate": 0.0009006318793314308, "loss": 3.4766, "step": 11705 }, { "epoch": 0.7956244054898763, "grad_norm": 1.8831655979156494, "learning_rate": 0.0009005894143225982, "loss": 3.4398, "step": 11710 }, { "epoch": 0.7959641255605381, "grad_norm": 1.734561800956726, "learning_rate": 0.0009005469493137655, "loss": 3.6986, "step": 11715 }, { "epoch": 0.7963038456311999, "grad_norm": 2.03184175491333, "learning_rate": 0.0009005044843049327, "loss": 3.7409, "step": 11720 }, { "epoch": 0.7966435657018617, "grad_norm": 1.8913283348083496, "learning_rate": 0.0009004620192961001, "loss": 3.5482, "step": 11725 }, { "epoch": 0.7969832857725234, "grad_norm": 2.0057997703552246, "learning_rate": 0.0009004195542872673, "loss": 3.8052, "step": 11730 }, { "epoch": 0.7973230058431852, "grad_norm": 2.0463571548461914, "learning_rate": 0.0009003770892784345, "loss": 3.6448, "step": 11735 }, { "epoch": 0.797662725913847, "grad_norm": 2.1070733070373535, "learning_rate": 0.0009003346242696019, "loss": 3.5787, "step": 11740 }, { "epoch": 0.7980024459845088, "grad_norm": 2.099184513092041, "learning_rate": 0.0009002921592607691, "loss": 3.6908, "step": 11745 }, { "epoch": 0.7983421660551705, "grad_norm": 2.2858376502990723, "learning_rate": 0.0009002496942519364, "loss": 3.8707, "step": 11750 }, { "epoch": 0.7986818861258324, "grad_norm": 1.90115225315094, "learning_rate": 0.0009002072292431038, "loss": 3.3701, "step": 11755 }, { "epoch": 0.7990216061964941, "grad_norm": 2.7023792266845703, "learning_rate": 0.000900164764234271, "loss": 3.7096, "step": 11760 }, { "epoch": 0.7993613262671558, "grad_norm": 1.9805102348327637, "learning_rate": 0.0009001222992254383, "loss": 3.7318, "step": 11765 }, { "epoch": 0.7997010463378177, "grad_norm": 1.4424973726272583, "learning_rate": 0.0009000798342166055, "loss": 3.5794, "step": 11770 }, { "epoch": 0.8000407664084794, "grad_norm": 1.5489274263381958, "learning_rate": 0.0009000373692077728, "loss": 3.5139, "step": 11775 }, { "epoch": 0.8003804864791412, "grad_norm": 1.7774899005889893, "learning_rate": 0.0008999949041989401, "loss": 3.847, "step": 11780 }, { "epoch": 0.8007202065498029, "grad_norm": 2.0797324180603027, "learning_rate": 0.0008999524391901074, "loss": 3.7492, "step": 11785 }, { "epoch": 0.8010599266204648, "grad_norm": 1.5339552164077759, "learning_rate": 0.0008999099741812747, "loss": 3.297, "step": 11790 }, { "epoch": 0.8013996466911265, "grad_norm": 1.7367514371871948, "learning_rate": 0.000899867509172442, "loss": 3.5797, "step": 11795 }, { "epoch": 0.8017393667617883, "grad_norm": 1.9136229753494263, "learning_rate": 0.0008998250441636092, "loss": 3.7253, "step": 11800 }, { "epoch": 0.8020790868324501, "grad_norm": 2.2528388500213623, "learning_rate": 0.0008997825791547764, "loss": 3.7591, "step": 11805 }, { "epoch": 0.8024188069031118, "grad_norm": 2.4774303436279297, "learning_rate": 0.0008997401141459438, "loss": 3.4959, "step": 11810 }, { "epoch": 0.8027585269737736, "grad_norm": 2.6450467109680176, "learning_rate": 0.000899697649137111, "loss": 3.4471, "step": 11815 }, { "epoch": 0.8030982470444353, "grad_norm": 1.835218071937561, "learning_rate": 0.0008996551841282783, "loss": 3.6009, "step": 11820 }, { "epoch": 0.8034379671150972, "grad_norm": 1.848799705505371, "learning_rate": 0.0008996127191194457, "loss": 3.8115, "step": 11825 }, { "epoch": 0.8037776871857589, "grad_norm": 1.7055764198303223, "learning_rate": 0.0008995702541106129, "loss": 3.6742, "step": 11830 }, { "epoch": 0.8041174072564207, "grad_norm": 1.4879423379898071, "learning_rate": 0.0008995277891017801, "loss": 3.6926, "step": 11835 }, { "epoch": 0.8044571273270825, "grad_norm": 2.1723389625549316, "learning_rate": 0.0008994853240929475, "loss": 3.6753, "step": 11840 }, { "epoch": 0.8047968473977443, "grad_norm": 1.9588642120361328, "learning_rate": 0.0008994428590841147, "loss": 3.5431, "step": 11845 }, { "epoch": 0.805136567468406, "grad_norm": 2.551112651824951, "learning_rate": 0.0008994003940752819, "loss": 3.634, "step": 11850 }, { "epoch": 0.8054762875390679, "grad_norm": 2.4570093154907227, "learning_rate": 0.0008993579290664494, "loss": 3.4693, "step": 11855 }, { "epoch": 0.8058160076097296, "grad_norm": 3.001985788345337, "learning_rate": 0.0008993154640576166, "loss": 3.382, "step": 11860 }, { "epoch": 0.8061557276803913, "grad_norm": 1.9961090087890625, "learning_rate": 0.0008992729990487838, "loss": 3.5699, "step": 11865 }, { "epoch": 0.8064954477510531, "grad_norm": 1.6222307682037354, "learning_rate": 0.0008992305340399511, "loss": 3.619, "step": 11870 }, { "epoch": 0.8068351678217149, "grad_norm": 1.7385674715042114, "learning_rate": 0.0008991880690311184, "loss": 3.713, "step": 11875 }, { "epoch": 0.8071748878923767, "grad_norm": 1.8533552885055542, "learning_rate": 0.0008991456040222856, "loss": 3.1989, "step": 11880 }, { "epoch": 0.8075146079630384, "grad_norm": 1.721778392791748, "learning_rate": 0.0008991031390134529, "loss": 3.7123, "step": 11885 }, { "epoch": 0.8078543280337003, "grad_norm": 1.7394253015518188, "learning_rate": 0.0008990606740046203, "loss": 3.3908, "step": 11890 }, { "epoch": 0.808194048104362, "grad_norm": 1.9676493406295776, "learning_rate": 0.0008990182089957875, "loss": 3.5208, "step": 11895 }, { "epoch": 0.8085337681750238, "grad_norm": 2.504173517227173, "learning_rate": 0.0008989757439869548, "loss": 3.5097, "step": 11900 }, { "epoch": 0.8088734882456855, "grad_norm": 2.252190113067627, "learning_rate": 0.000898933278978122, "loss": 3.5412, "step": 11905 }, { "epoch": 0.8092132083163474, "grad_norm": 1.7393004894256592, "learning_rate": 0.0008988908139692893, "loss": 3.6375, "step": 11910 }, { "epoch": 0.8095529283870091, "grad_norm": 1.7160481214523315, "learning_rate": 0.0008988483489604566, "loss": 3.667, "step": 11915 }, { "epoch": 0.8098926484576708, "grad_norm": 1.67184579372406, "learning_rate": 0.0008988058839516238, "loss": 3.3855, "step": 11920 }, { "epoch": 0.8102323685283327, "grad_norm": 1.585878610610962, "learning_rate": 0.0008987634189427912, "loss": 3.5643, "step": 11925 }, { "epoch": 0.8105720885989944, "grad_norm": 1.821559190750122, "learning_rate": 0.0008987209539339585, "loss": 3.5961, "step": 11930 }, { "epoch": 0.8109118086696562, "grad_norm": 1.5489000082015991, "learning_rate": 0.0008986784889251257, "loss": 3.7099, "step": 11935 }, { "epoch": 0.811251528740318, "grad_norm": 1.5754029750823975, "learning_rate": 0.000898636023916293, "loss": 3.4019, "step": 11940 }, { "epoch": 0.8115912488109798, "grad_norm": 1.7309927940368652, "learning_rate": 0.0008985935589074603, "loss": 3.3581, "step": 11945 }, { "epoch": 0.8119309688816415, "grad_norm": 1.8477342128753662, "learning_rate": 0.0008985510938986275, "loss": 3.6256, "step": 11950 }, { "epoch": 0.8122706889523033, "grad_norm": 3.0544886589050293, "learning_rate": 0.0008985086288897947, "loss": 3.6589, "step": 11955 }, { "epoch": 0.8126104090229651, "grad_norm": 2.0677266120910645, "learning_rate": 0.0008984661638809622, "loss": 3.585, "step": 11960 }, { "epoch": 0.8129501290936268, "grad_norm": 1.6077525615692139, "learning_rate": 0.0008984236988721294, "loss": 3.5773, "step": 11965 }, { "epoch": 0.8132898491642886, "grad_norm": 2.204925537109375, "learning_rate": 0.0008983812338632966, "loss": 3.6828, "step": 11970 }, { "epoch": 0.8136295692349504, "grad_norm": 2.3049333095550537, "learning_rate": 0.000898338768854464, "loss": 3.6755, "step": 11975 }, { "epoch": 0.8139692893056122, "grad_norm": 2.156456232070923, "learning_rate": 0.0008982963038456312, "loss": 3.4948, "step": 11980 }, { "epoch": 0.8143090093762739, "grad_norm": 1.6241933107376099, "learning_rate": 0.0008982538388367984, "loss": 3.4693, "step": 11985 }, { "epoch": 0.8146487294469357, "grad_norm": 2.424783945083618, "learning_rate": 0.0008982113738279658, "loss": 3.7133, "step": 11990 }, { "epoch": 0.8149884495175975, "grad_norm": 2.845808267593384, "learning_rate": 0.0008981774018208996, "loss": 3.5735, "step": 11995 }, { "epoch": 0.8153281695882593, "grad_norm": 1.960532546043396, "learning_rate": 0.0008981349368120669, "loss": 3.5112, "step": 12000 }, { "epoch": 0.815667889658921, "grad_norm": 2.4204962253570557, "learning_rate": 0.0008980924718032341, "loss": 3.5863, "step": 12005 }, { "epoch": 0.8160076097295829, "grad_norm": 1.9620435237884521, "learning_rate": 0.0008980500067944014, "loss": 3.6522, "step": 12010 }, { "epoch": 0.8163473298002446, "grad_norm": 1.986816167831421, "learning_rate": 0.0008980075417855688, "loss": 3.7423, "step": 12015 }, { "epoch": 0.8166870498709063, "grad_norm": 2.042957067489624, "learning_rate": 0.000897965076776736, "loss": 3.659, "step": 12020 }, { "epoch": 0.8170267699415682, "grad_norm": 1.8981882333755493, "learning_rate": 0.0008979226117679033, "loss": 3.179, "step": 12025 }, { "epoch": 0.8173664900122299, "grad_norm": 1.7098256349563599, "learning_rate": 0.0008978801467590706, "loss": 3.5903, "step": 12030 }, { "epoch": 0.8177062100828917, "grad_norm": 2.897338628768921, "learning_rate": 0.0008978376817502378, "loss": 3.583, "step": 12035 }, { "epoch": 0.8180459301535534, "grad_norm": 1.8201332092285156, "learning_rate": 0.000897795216741405, "loss": 3.4542, "step": 12040 }, { "epoch": 0.8183856502242153, "grad_norm": 1.3689368963241577, "learning_rate": 0.0008977527517325724, "loss": 3.8197, "step": 12045 }, { "epoch": 0.818725370294877, "grad_norm": 1.5806472301483154, "learning_rate": 0.0008977102867237397, "loss": 3.6638, "step": 12050 }, { "epoch": 0.8190650903655388, "grad_norm": 2.3186018466949463, "learning_rate": 0.0008976678217149069, "loss": 3.6159, "step": 12055 }, { "epoch": 0.8194048104362006, "grad_norm": 1.8880550861358643, "learning_rate": 0.0008976253567060743, "loss": 3.5779, "step": 12060 }, { "epoch": 0.8197445305068624, "grad_norm": 1.8241523504257202, "learning_rate": 0.0008975828916972415, "loss": 3.7282, "step": 12065 }, { "epoch": 0.8200842505775241, "grad_norm": 2.2253315448760986, "learning_rate": 0.0008975404266884087, "loss": 3.5834, "step": 12070 }, { "epoch": 0.8204239706481858, "grad_norm": 1.6829156875610352, "learning_rate": 0.0008974979616795761, "loss": 3.7737, "step": 12075 }, { "epoch": 0.8207636907188477, "grad_norm": 2.0937933921813965, "learning_rate": 0.0008974554966707433, "loss": 3.3254, "step": 12080 }, { "epoch": 0.8211034107895094, "grad_norm": 2.0498037338256836, "learning_rate": 0.0008974130316619106, "loss": 3.6872, "step": 12085 }, { "epoch": 0.8214431308601712, "grad_norm": 2.0770113468170166, "learning_rate": 0.000897370566653078, "loss": 3.7224, "step": 12090 }, { "epoch": 0.821782850930833, "grad_norm": 1.882180094718933, "learning_rate": 0.0008973281016442452, "loss": 3.698, "step": 12095 }, { "epoch": 0.8221225710014948, "grad_norm": 1.9945948123931885, "learning_rate": 0.0008972856366354124, "loss": 3.625, "step": 12100 }, { "epoch": 0.8224622910721565, "grad_norm": 1.926364541053772, "learning_rate": 0.0008972431716265797, "loss": 3.4819, "step": 12105 }, { "epoch": 0.8228020111428184, "grad_norm": 1.8780598640441895, "learning_rate": 0.000897200706617747, "loss": 3.6855, "step": 12110 }, { "epoch": 0.8231417312134801, "grad_norm": 1.9880369901657104, "learning_rate": 0.0008971582416089142, "loss": 3.5897, "step": 12115 }, { "epoch": 0.8234814512841419, "grad_norm": 1.9417974948883057, "learning_rate": 0.0008971157766000816, "loss": 3.6595, "step": 12120 }, { "epoch": 0.8238211713548036, "grad_norm": 2.56170392036438, "learning_rate": 0.0008970733115912489, "loss": 3.6468, "step": 12125 }, { "epoch": 0.8241608914254654, "grad_norm": 1.7900923490524292, "learning_rate": 0.0008970308465824161, "loss": 3.7406, "step": 12130 }, { "epoch": 0.8245006114961272, "grad_norm": 2.372579336166382, "learning_rate": 0.0008969883815735834, "loss": 3.3412, "step": 12135 }, { "epoch": 0.8248403315667889, "grad_norm": 1.6829882860183716, "learning_rate": 0.0008969459165647507, "loss": 3.586, "step": 12140 }, { "epoch": 0.8251800516374508, "grad_norm": 1.715254545211792, "learning_rate": 0.0008969034515559179, "loss": 3.6787, "step": 12145 }, { "epoch": 0.8255197717081125, "grad_norm": 1.9338326454162598, "learning_rate": 0.0008968609865470852, "loss": 3.5044, "step": 12150 }, { "epoch": 0.8258594917787743, "grad_norm": 1.692610502243042, "learning_rate": 0.0008968185215382525, "loss": 3.6999, "step": 12155 }, { "epoch": 0.826199211849436, "grad_norm": 2.1445670127868652, "learning_rate": 0.0008967760565294198, "loss": 3.678, "step": 12160 }, { "epoch": 0.8265389319200979, "grad_norm": 2.07627534866333, "learning_rate": 0.0008967335915205871, "loss": 3.5121, "step": 12165 }, { "epoch": 0.8268786519907596, "grad_norm": 2.3995726108551025, "learning_rate": 0.0008966911265117543, "loss": 3.6672, "step": 12170 }, { "epoch": 0.8272183720614213, "grad_norm": 2.7170708179473877, "learning_rate": 0.0008966486615029216, "loss": 3.4531, "step": 12175 }, { "epoch": 0.8275580921320832, "grad_norm": 1.6846497058868408, "learning_rate": 0.0008966061964940889, "loss": 3.665, "step": 12180 }, { "epoch": 0.8278978122027449, "grad_norm": 1.622281551361084, "learning_rate": 0.0008965637314852561, "loss": 3.5512, "step": 12185 }, { "epoch": 0.8282375322734067, "grad_norm": 2.0641210079193115, "learning_rate": 0.0008965212664764235, "loss": 3.7465, "step": 12190 }, { "epoch": 0.8285772523440685, "grad_norm": 1.587550401687622, "learning_rate": 0.0008964788014675908, "loss": 3.7658, "step": 12195 }, { "epoch": 0.8289169724147303, "grad_norm": 1.8464587926864624, "learning_rate": 0.000896436336458758, "loss": 3.7262, "step": 12200 }, { "epoch": 0.829256692485392, "grad_norm": 1.92472243309021, "learning_rate": 0.0008963938714499252, "loss": 3.7798, "step": 12205 }, { "epoch": 0.8295964125560538, "grad_norm": 1.8753942251205444, "learning_rate": 0.0008963514064410926, "loss": 3.4896, "step": 12210 }, { "epoch": 0.8299361326267156, "grad_norm": 2.2637898921966553, "learning_rate": 0.0008963089414322598, "loss": 3.6206, "step": 12215 }, { "epoch": 0.8302758526973774, "grad_norm": 1.7811702489852905, "learning_rate": 0.000896266476423427, "loss": 3.5026, "step": 12220 }, { "epoch": 0.8306155727680391, "grad_norm": 2.1178855895996094, "learning_rate": 0.0008962240114145945, "loss": 3.6893, "step": 12225 }, { "epoch": 0.830955292838701, "grad_norm": 1.8202636241912842, "learning_rate": 0.0008961815464057617, "loss": 3.4674, "step": 12230 }, { "epoch": 0.8312950129093627, "grad_norm": 2.082820415496826, "learning_rate": 0.0008961390813969289, "loss": 3.2138, "step": 12235 }, { "epoch": 0.8316347329800244, "grad_norm": 2.049116373062134, "learning_rate": 0.0008960966163880963, "loss": 3.5718, "step": 12240 }, { "epoch": 0.8319744530506862, "grad_norm": 1.7934966087341309, "learning_rate": 0.0008960541513792635, "loss": 3.2235, "step": 12245 }, { "epoch": 0.832314173121348, "grad_norm": 1.7493064403533936, "learning_rate": 0.0008960116863704307, "loss": 3.5795, "step": 12250 }, { "epoch": 0.8326538931920098, "grad_norm": 2.156275749206543, "learning_rate": 0.000895969221361598, "loss": 3.4048, "step": 12255 }, { "epoch": 0.8329936132626715, "grad_norm": 2.5682051181793213, "learning_rate": 0.0008959267563527654, "loss": 3.5626, "step": 12260 }, { "epoch": 0.8333333333333334, "grad_norm": 2.1631669998168945, "learning_rate": 0.0008958842913439326, "loss": 3.7353, "step": 12265 }, { "epoch": 0.8336730534039951, "grad_norm": 2.6062042713165283, "learning_rate": 0.0008958418263350999, "loss": 3.268, "step": 12270 }, { "epoch": 0.8340127734746569, "grad_norm": 1.6560136079788208, "learning_rate": 0.0008957993613262672, "loss": 3.7281, "step": 12275 }, { "epoch": 0.8343524935453187, "grad_norm": 2.089534282684326, "learning_rate": 0.0008957568963174344, "loss": 3.7214, "step": 12280 }, { "epoch": 0.8346922136159804, "grad_norm": 1.9766087532043457, "learning_rate": 0.0008957144313086017, "loss": 3.5015, "step": 12285 }, { "epoch": 0.8350319336866422, "grad_norm": 2.2247838973999023, "learning_rate": 0.0008956719662997689, "loss": 3.5792, "step": 12290 }, { "epoch": 0.8353716537573039, "grad_norm": 2.0758254528045654, "learning_rate": 0.0008956295012909363, "loss": 3.4632, "step": 12295 }, { "epoch": 0.8357113738279658, "grad_norm": 2.117910146713257, "learning_rate": 0.0008955870362821036, "loss": 3.7143, "step": 12300 }, { "epoch": 0.8360510938986275, "grad_norm": 2.7755537033081055, "learning_rate": 0.0008955445712732708, "loss": 3.3929, "step": 12305 }, { "epoch": 0.8363908139692893, "grad_norm": 2.1989541053771973, "learning_rate": 0.0008955021062644382, "loss": 3.8346, "step": 12310 }, { "epoch": 0.8367305340399511, "grad_norm": 2.0456106662750244, "learning_rate": 0.0008954596412556054, "loss": 3.5764, "step": 12315 }, { "epoch": 0.8370702541106129, "grad_norm": 2.08349609375, "learning_rate": 0.0008954171762467726, "loss": 3.7042, "step": 12320 }, { "epoch": 0.8374099741812746, "grad_norm": 1.8422882556915283, "learning_rate": 0.00089537471123794, "loss": 3.6729, "step": 12325 }, { "epoch": 0.8377496942519363, "grad_norm": 1.8565173149108887, "learning_rate": 0.0008953322462291073, "loss": 3.259, "step": 12330 }, { "epoch": 0.8380894143225982, "grad_norm": 1.8854093551635742, "learning_rate": 0.0008952897812202745, "loss": 3.8031, "step": 12335 }, { "epoch": 0.83842913439326, "grad_norm": 2.027522087097168, "learning_rate": 0.0008952473162114419, "loss": 3.6371, "step": 12340 }, { "epoch": 0.8387688544639217, "grad_norm": 1.8896504640579224, "learning_rate": 0.0008952048512026091, "loss": 3.5888, "step": 12345 }, { "epoch": 0.8391085745345835, "grad_norm": 1.6029467582702637, "learning_rate": 0.0008951623861937763, "loss": 3.5474, "step": 12350 }, { "epoch": 0.8394482946052453, "grad_norm": 1.6205421686172485, "learning_rate": 0.0008951199211849436, "loss": 3.6201, "step": 12355 }, { "epoch": 0.839788014675907, "grad_norm": 2.2841012477874756, "learning_rate": 0.0008950774561761109, "loss": 3.5875, "step": 12360 }, { "epoch": 0.8401277347465689, "grad_norm": 2.080737829208374, "learning_rate": 0.0008950349911672782, "loss": 3.7879, "step": 12365 }, { "epoch": 0.8404674548172306, "grad_norm": 2.5985307693481445, "learning_rate": 0.0008949925261584455, "loss": 3.8284, "step": 12370 }, { "epoch": 0.8408071748878924, "grad_norm": 1.960466742515564, "learning_rate": 0.0008949500611496128, "loss": 3.5148, "step": 12375 }, { "epoch": 0.8411468949585541, "grad_norm": 2.22343111038208, "learning_rate": 0.00089490759614078, "loss": 3.7388, "step": 12380 }, { "epoch": 0.841486615029216, "grad_norm": 2.230656385421753, "learning_rate": 0.0008948651311319473, "loss": 3.8039, "step": 12385 }, { "epoch": 0.8418263350998777, "grad_norm": 1.971765160560608, "learning_rate": 0.0008948226661231145, "loss": 3.4994, "step": 12390 }, { "epoch": 0.8421660551705394, "grad_norm": 2.013716459274292, "learning_rate": 0.0008947802011142818, "loss": 3.5454, "step": 12395 }, { "epoch": 0.8425057752412013, "grad_norm": 2.0413787364959717, "learning_rate": 0.0008947377361054492, "loss": 3.7188, "step": 12400 }, { "epoch": 0.842845495311863, "grad_norm": 2.014756441116333, "learning_rate": 0.0008946952710966164, "loss": 3.565, "step": 12405 }, { "epoch": 0.8431852153825248, "grad_norm": 2.1521565914154053, "learning_rate": 0.0008946528060877837, "loss": 3.8291, "step": 12410 }, { "epoch": 0.8435249354531865, "grad_norm": 2.0649235248565674, "learning_rate": 0.000894610341078951, "loss": 3.7589, "step": 12415 }, { "epoch": 0.8438646555238484, "grad_norm": 2.074471950531006, "learning_rate": 0.0008945678760701182, "loss": 3.886, "step": 12420 }, { "epoch": 0.8442043755945101, "grad_norm": 2.2603542804718018, "learning_rate": 0.0008945254110612855, "loss": 3.7657, "step": 12425 }, { "epoch": 0.8445440956651719, "grad_norm": 1.7593920230865479, "learning_rate": 0.0008944829460524528, "loss": 3.5995, "step": 12430 }, { "epoch": 0.8448838157358337, "grad_norm": 1.9014972448349, "learning_rate": 0.0008944404810436201, "loss": 3.7255, "step": 12435 }, { "epoch": 0.8452235358064955, "grad_norm": 2.561136484146118, "learning_rate": 0.0008943980160347873, "loss": 3.5631, "step": 12440 }, { "epoch": 0.8455632558771572, "grad_norm": 1.9853166341781616, "learning_rate": 0.0008943555510259547, "loss": 3.5563, "step": 12445 }, { "epoch": 0.845902975947819, "grad_norm": 1.7387324571609497, "learning_rate": 0.0008943130860171219, "loss": 3.5657, "step": 12450 }, { "epoch": 0.8462426960184808, "grad_norm": 2.1655447483062744, "learning_rate": 0.0008942706210082891, "loss": 3.8737, "step": 12455 }, { "epoch": 0.8465824160891425, "grad_norm": 2.074890375137329, "learning_rate": 0.0008942281559994565, "loss": 3.5512, "step": 12460 }, { "epoch": 0.8469221361598043, "grad_norm": 1.599816918373108, "learning_rate": 0.0008941856909906237, "loss": 3.7427, "step": 12465 }, { "epoch": 0.8472618562304661, "grad_norm": 2.098097324371338, "learning_rate": 0.000894143225981791, "loss": 3.5888, "step": 12470 }, { "epoch": 0.8476015763011279, "grad_norm": 1.6239452362060547, "learning_rate": 0.0008941007609729584, "loss": 3.2262, "step": 12475 }, { "epoch": 0.8479412963717896, "grad_norm": 2.3388900756835938, "learning_rate": 0.0008940582959641256, "loss": 3.5159, "step": 12480 }, { "epoch": 0.8482810164424515, "grad_norm": 2.094869375228882, "learning_rate": 0.0008940158309552928, "loss": 3.5587, "step": 12485 }, { "epoch": 0.8486207365131132, "grad_norm": 1.8293715715408325, "learning_rate": 0.0008939733659464601, "loss": 3.6512, "step": 12490 }, { "epoch": 0.848960456583775, "grad_norm": 1.900874137878418, "learning_rate": 0.0008939309009376274, "loss": 3.6132, "step": 12495 }, { "epoch": 0.8493001766544367, "grad_norm": 2.343280076980591, "learning_rate": 0.0008938884359287946, "loss": 3.6121, "step": 12500 }, { "epoch": 0.8496398967250985, "grad_norm": 1.9107239246368408, "learning_rate": 0.000893845970919962, "loss": 3.6776, "step": 12505 }, { "epoch": 0.8499796167957603, "grad_norm": 2.4147043228149414, "learning_rate": 0.0008938035059111293, "loss": 3.5085, "step": 12510 }, { "epoch": 0.850319336866422, "grad_norm": 1.7407586574554443, "learning_rate": 0.0008937610409022965, "loss": 3.4379, "step": 12515 }, { "epoch": 0.8506590569370839, "grad_norm": 1.7786179780960083, "learning_rate": 0.0008937185758934638, "loss": 3.6989, "step": 12520 }, { "epoch": 0.8509987770077456, "grad_norm": 2.388113498687744, "learning_rate": 0.000893676110884631, "loss": 3.6327, "step": 12525 }, { "epoch": 0.8513384970784074, "grad_norm": 2.158088445663452, "learning_rate": 0.0008936336458757983, "loss": 3.7886, "step": 12530 }, { "epoch": 0.8516782171490692, "grad_norm": 2.044100761413574, "learning_rate": 0.0008935911808669657, "loss": 3.7911, "step": 12535 }, { "epoch": 0.852017937219731, "grad_norm": 2.059258460998535, "learning_rate": 0.000893548715858133, "loss": 3.6803, "step": 12540 }, { "epoch": 0.8523576572903927, "grad_norm": 1.734035849571228, "learning_rate": 0.0008935062508493002, "loss": 3.6993, "step": 12545 }, { "epoch": 0.8526973773610544, "grad_norm": 1.9774596691131592, "learning_rate": 0.0008934637858404675, "loss": 3.7344, "step": 12550 }, { "epoch": 0.8530370974317163, "grad_norm": 2.2034218311309814, "learning_rate": 0.0008934213208316347, "loss": 3.6211, "step": 12555 }, { "epoch": 0.853376817502378, "grad_norm": 2.322540044784546, "learning_rate": 0.000893378855822802, "loss": 3.4733, "step": 12560 }, { "epoch": 0.8537165375730398, "grad_norm": 1.5776159763336182, "learning_rate": 0.0008933363908139693, "loss": 3.8672, "step": 12565 }, { "epoch": 0.8540562576437016, "grad_norm": 2.712397813796997, "learning_rate": 0.0008932939258051366, "loss": 3.4735, "step": 12570 }, { "epoch": 0.8543959777143634, "grad_norm": 1.9089651107788086, "learning_rate": 0.0008932514607963039, "loss": 3.6521, "step": 12575 }, { "epoch": 0.8547356977850251, "grad_norm": 1.970173954963684, "learning_rate": 0.0008932089957874712, "loss": 3.6303, "step": 12580 }, { "epoch": 0.8550754178556869, "grad_norm": 1.86710786819458, "learning_rate": 0.0008931665307786384, "loss": 3.3521, "step": 12585 }, { "epoch": 0.8554151379263487, "grad_norm": 1.856620192527771, "learning_rate": 0.0008931240657698056, "loss": 3.663, "step": 12590 }, { "epoch": 0.8557548579970105, "grad_norm": 1.7893024682998657, "learning_rate": 0.000893081600760973, "loss": 3.7552, "step": 12595 }, { "epoch": 0.8560945780676722, "grad_norm": 1.8403986692428589, "learning_rate": 0.0008930391357521402, "loss": 3.7246, "step": 12600 }, { "epoch": 0.856434298138334, "grad_norm": 2.2717044353485107, "learning_rate": 0.0008929966707433075, "loss": 3.3736, "step": 12605 }, { "epoch": 0.8567740182089958, "grad_norm": 1.604888677597046, "learning_rate": 0.0008929542057344749, "loss": 3.5811, "step": 12610 }, { "epoch": 0.8571137382796575, "grad_norm": 1.9019547700881958, "learning_rate": 0.0008929117407256421, "loss": 3.5501, "step": 12615 }, { "epoch": 0.8574534583503194, "grad_norm": 2.2787601947784424, "learning_rate": 0.0008928692757168093, "loss": 3.8557, "step": 12620 }, { "epoch": 0.8577931784209811, "grad_norm": 2.0447704792022705, "learning_rate": 0.0008928268107079767, "loss": 3.7556, "step": 12625 }, { "epoch": 0.8581328984916429, "grad_norm": 2.0168004035949707, "learning_rate": 0.0008927843456991439, "loss": 3.7493, "step": 12630 }, { "epoch": 0.8584726185623046, "grad_norm": 2.1855499744415283, "learning_rate": 0.0008927418806903111, "loss": 3.3912, "step": 12635 }, { "epoch": 0.8588123386329665, "grad_norm": 3.349332094192505, "learning_rate": 0.0008926994156814785, "loss": 3.6998, "step": 12640 }, { "epoch": 0.8591520587036282, "grad_norm": 1.6372334957122803, "learning_rate": 0.0008926569506726458, "loss": 3.6903, "step": 12645 }, { "epoch": 0.85949177877429, "grad_norm": 2.0103511810302734, "learning_rate": 0.0008926144856638131, "loss": 3.5996, "step": 12650 }, { "epoch": 0.8598314988449518, "grad_norm": 1.7262036800384521, "learning_rate": 0.0008925720206549803, "loss": 3.6479, "step": 12655 }, { "epoch": 0.8601712189156135, "grad_norm": 1.8902812004089355, "learning_rate": 0.0008925295556461476, "loss": 3.5648, "step": 12660 }, { "epoch": 0.8605109389862753, "grad_norm": 1.9974135160446167, "learning_rate": 0.0008924870906373149, "loss": 3.6146, "step": 12665 }, { "epoch": 0.860850659056937, "grad_norm": 2.0448684692382812, "learning_rate": 0.0008924446256284821, "loss": 3.4887, "step": 12670 }, { "epoch": 0.8611903791275989, "grad_norm": 1.5912675857543945, "learning_rate": 0.0008924021606196495, "loss": 3.5158, "step": 12675 }, { "epoch": 0.8615300991982606, "grad_norm": 2.382538318634033, "learning_rate": 0.0008923596956108168, "loss": 3.6074, "step": 12680 }, { "epoch": 0.8618698192689224, "grad_norm": 1.9596750736236572, "learning_rate": 0.000892317230601984, "loss": 3.661, "step": 12685 }, { "epoch": 0.8622095393395842, "grad_norm": 2.2694332599639893, "learning_rate": 0.0008922747655931512, "loss": 3.408, "step": 12690 }, { "epoch": 0.862549259410246, "grad_norm": 2.059685230255127, "learning_rate": 0.0008922323005843186, "loss": 3.7297, "step": 12695 }, { "epoch": 0.8628889794809077, "grad_norm": 2.316754102706909, "learning_rate": 0.0008921898355754858, "loss": 3.6415, "step": 12700 }, { "epoch": 0.8632286995515696, "grad_norm": 1.6933711767196655, "learning_rate": 0.000892147370566653, "loss": 3.451, "step": 12705 }, { "epoch": 0.8635684196222313, "grad_norm": 2.0257375240325928, "learning_rate": 0.0008921049055578205, "loss": 3.4419, "step": 12710 }, { "epoch": 0.863908139692893, "grad_norm": 2.5091633796691895, "learning_rate": 0.0008920624405489877, "loss": 3.7822, "step": 12715 }, { "epoch": 0.8642478597635548, "grad_norm": 1.7555491924285889, "learning_rate": 0.0008920199755401549, "loss": 3.7103, "step": 12720 }, { "epoch": 0.8645875798342166, "grad_norm": 2.0097007751464844, "learning_rate": 0.0008919775105313223, "loss": 3.79, "step": 12725 }, { "epoch": 0.8649272999048784, "grad_norm": 1.6482596397399902, "learning_rate": 0.0008919350455224895, "loss": 3.5067, "step": 12730 }, { "epoch": 0.8652670199755401, "grad_norm": 2.1771347522735596, "learning_rate": 0.0008918925805136567, "loss": 3.8857, "step": 12735 }, { "epoch": 0.865606740046202, "grad_norm": 3.130981206893921, "learning_rate": 0.000891850115504824, "loss": 3.7715, "step": 12740 }, { "epoch": 0.8659464601168637, "grad_norm": 2.1097888946533203, "learning_rate": 0.0008918076504959914, "loss": 3.7157, "step": 12745 }, { "epoch": 0.8662861801875255, "grad_norm": 1.800281286239624, "learning_rate": 0.0008917651854871586, "loss": 3.6878, "step": 12750 }, { "epoch": 0.8666259002581872, "grad_norm": 1.9135576486587524, "learning_rate": 0.0008917227204783259, "loss": 3.5771, "step": 12755 }, { "epoch": 0.866965620328849, "grad_norm": 1.881426453590393, "learning_rate": 0.0008916802554694932, "loss": 3.5048, "step": 12760 }, { "epoch": 0.8673053403995108, "grad_norm": 1.7525192499160767, "learning_rate": 0.0008916377904606604, "loss": 3.695, "step": 12765 }, { "epoch": 0.8676450604701725, "grad_norm": 1.954271912574768, "learning_rate": 0.0008915953254518277, "loss": 3.6824, "step": 12770 }, { "epoch": 0.8679847805408344, "grad_norm": 1.7567439079284668, "learning_rate": 0.000891552860442995, "loss": 3.7214, "step": 12775 }, { "epoch": 0.8683245006114961, "grad_norm": 1.6763626337051392, "learning_rate": 0.0008915103954341623, "loss": 3.576, "step": 12780 }, { "epoch": 0.8686642206821579, "grad_norm": 2.2172772884368896, "learning_rate": 0.0008914679304253296, "loss": 3.796, "step": 12785 }, { "epoch": 0.8690039407528197, "grad_norm": 2.0513710975646973, "learning_rate": 0.0008914254654164968, "loss": 3.5597, "step": 12790 }, { "epoch": 0.8693436608234815, "grad_norm": 1.6775264739990234, "learning_rate": 0.0008913830004076641, "loss": 3.589, "step": 12795 }, { "epoch": 0.8696833808941432, "grad_norm": 2.0315704345703125, "learning_rate": 0.0008913405353988314, "loss": 3.7984, "step": 12800 }, { "epoch": 0.870023100964805, "grad_norm": 1.735235571861267, "learning_rate": 0.0008912980703899986, "loss": 3.8377, "step": 12805 }, { "epoch": 0.8703628210354668, "grad_norm": 2.2356374263763428, "learning_rate": 0.0008912556053811659, "loss": 3.8381, "step": 12810 }, { "epoch": 0.8707025411061285, "grad_norm": 2.05106520652771, "learning_rate": 0.0008912131403723333, "loss": 3.5928, "step": 12815 }, { "epoch": 0.8710422611767903, "grad_norm": 2.0711517333984375, "learning_rate": 0.0008911706753635005, "loss": 3.6527, "step": 12820 }, { "epoch": 0.8713819812474521, "grad_norm": 2.4229321479797363, "learning_rate": 0.0008911282103546677, "loss": 3.8407, "step": 12825 }, { "epoch": 0.8717217013181139, "grad_norm": 1.6217141151428223, "learning_rate": 0.0008910857453458351, "loss": 3.5363, "step": 12830 }, { "epoch": 0.8720614213887756, "grad_norm": 1.5838496685028076, "learning_rate": 0.0008910432803370023, "loss": 3.4761, "step": 12835 }, { "epoch": 0.8724011414594374, "grad_norm": 2.1080658435821533, "learning_rate": 0.0008910008153281695, "loss": 3.6173, "step": 12840 }, { "epoch": 0.8727408615300992, "grad_norm": 1.7606642246246338, "learning_rate": 0.0008909583503193369, "loss": 3.5789, "step": 12845 }, { "epoch": 0.873080581600761, "grad_norm": 1.935593843460083, "learning_rate": 0.0008909158853105042, "loss": 3.8111, "step": 12850 }, { "epoch": 0.8734203016714227, "grad_norm": 1.8260825872421265, "learning_rate": 0.0008908734203016714, "loss": 3.7379, "step": 12855 }, { "epoch": 0.8737600217420846, "grad_norm": 1.692189335823059, "learning_rate": 0.0008908309552928388, "loss": 3.6197, "step": 12860 }, { "epoch": 0.8740997418127463, "grad_norm": 1.7181512117385864, "learning_rate": 0.000890788490284006, "loss": 3.5631, "step": 12865 }, { "epoch": 0.874439461883408, "grad_norm": 3.152313232421875, "learning_rate": 0.0008907460252751732, "loss": 3.4384, "step": 12870 }, { "epoch": 0.8747791819540699, "grad_norm": 1.895484209060669, "learning_rate": 0.0008907035602663406, "loss": 3.6534, "step": 12875 }, { "epoch": 0.8751189020247316, "grad_norm": 1.9974424839019775, "learning_rate": 0.0008906610952575078, "loss": 3.4106, "step": 12880 }, { "epoch": 0.8754586220953934, "grad_norm": 2.089371919631958, "learning_rate": 0.0008906186302486751, "loss": 3.6975, "step": 12885 }, { "epoch": 0.8757983421660551, "grad_norm": 1.6298573017120361, "learning_rate": 0.0008905761652398424, "loss": 3.5296, "step": 12890 }, { "epoch": 0.876138062236717, "grad_norm": 1.863574743270874, "learning_rate": 0.0008905337002310097, "loss": 3.7053, "step": 12895 }, { "epoch": 0.8764777823073787, "grad_norm": 1.767784833908081, "learning_rate": 0.0008904912352221769, "loss": 3.7454, "step": 12900 }, { "epoch": 0.8768175023780405, "grad_norm": 2.257453203201294, "learning_rate": 0.0008904487702133442, "loss": 3.5558, "step": 12905 }, { "epoch": 0.8771572224487023, "grad_norm": 2.7063040733337402, "learning_rate": 0.0008904063052045115, "loss": 3.6359, "step": 12910 }, { "epoch": 0.877496942519364, "grad_norm": 1.9771069288253784, "learning_rate": 0.0008903638401956787, "loss": 3.4325, "step": 12915 }, { "epoch": 0.8778366625900258, "grad_norm": 2.1461758613586426, "learning_rate": 0.0008903213751868461, "loss": 3.6169, "step": 12920 }, { "epoch": 0.8781763826606876, "grad_norm": 2.177166700363159, "learning_rate": 0.0008902789101780134, "loss": 3.3599, "step": 12925 }, { "epoch": 0.8785161027313494, "grad_norm": 2.281083822250366, "learning_rate": 0.0008902364451691806, "loss": 3.604, "step": 12930 }, { "epoch": 0.8788558228020111, "grad_norm": 2.23660945892334, "learning_rate": 0.0008901939801603479, "loss": 3.6128, "step": 12935 }, { "epoch": 0.8791955428726729, "grad_norm": 2.1961147785186768, "learning_rate": 0.0008901515151515151, "loss": 3.666, "step": 12940 }, { "epoch": 0.8795352629433347, "grad_norm": 1.673201560974121, "learning_rate": 0.0008901090501426824, "loss": 3.5916, "step": 12945 }, { "epoch": 0.8798749830139965, "grad_norm": 1.7850279808044434, "learning_rate": 0.0008900665851338497, "loss": 3.3692, "step": 12950 }, { "epoch": 0.8802147030846582, "grad_norm": 2.77082896232605, "learning_rate": 0.000890024120125017, "loss": 3.7647, "step": 12955 }, { "epoch": 0.8805544231553201, "grad_norm": 1.673248052597046, "learning_rate": 0.0008899816551161843, "loss": 3.574, "step": 12960 }, { "epoch": 0.8808941432259818, "grad_norm": 1.8578646183013916, "learning_rate": 0.0008899391901073516, "loss": 3.6923, "step": 12965 }, { "epoch": 0.8812338632966435, "grad_norm": 1.952796459197998, "learning_rate": 0.0008898967250985188, "loss": 3.7056, "step": 12970 }, { "epoch": 0.8815735833673053, "grad_norm": 2.0288853645324707, "learning_rate": 0.000889854260089686, "loss": 3.57, "step": 12975 }, { "epoch": 0.8819133034379671, "grad_norm": 2.0192108154296875, "learning_rate": 0.0008898117950808534, "loss": 3.7623, "step": 12980 }, { "epoch": 0.8822530235086289, "grad_norm": 2.3906116485595703, "learning_rate": 0.0008897693300720206, "loss": 3.5793, "step": 12985 }, { "epoch": 0.8825927435792906, "grad_norm": 2.6349093914031982, "learning_rate": 0.000889726865063188, "loss": 3.7004, "step": 12990 }, { "epoch": 0.8829324636499525, "grad_norm": 1.9593007564544678, "learning_rate": 0.0008896844000543553, "loss": 3.6818, "step": 12995 }, { "epoch": 0.8832721837206142, "grad_norm": 1.9457873106002808, "learning_rate": 0.0008896419350455225, "loss": 3.624, "step": 13000 }, { "epoch": 0.883611903791276, "grad_norm": 2.895799398422241, "learning_rate": 0.0008895994700366898, "loss": 3.7145, "step": 13005 }, { "epoch": 0.8839516238619378, "grad_norm": 1.746244192123413, "learning_rate": 0.0008895570050278571, "loss": 3.3976, "step": 13010 }, { "epoch": 0.8842913439325996, "grad_norm": 1.7711468935012817, "learning_rate": 0.0008895145400190243, "loss": 3.4794, "step": 13015 }, { "epoch": 0.8846310640032613, "grad_norm": 1.6826128959655762, "learning_rate": 0.0008894720750101916, "loss": 3.4179, "step": 13020 }, { "epoch": 0.884970784073923, "grad_norm": 2.317542791366577, "learning_rate": 0.000889429610001359, "loss": 3.5052, "step": 13025 }, { "epoch": 0.8853105041445849, "grad_norm": 2.3453357219696045, "learning_rate": 0.0008893871449925262, "loss": 3.5418, "step": 13030 }, { "epoch": 0.8856502242152466, "grad_norm": 2.4135615825653076, "learning_rate": 0.0008893446799836935, "loss": 3.7063, "step": 13035 }, { "epoch": 0.8859899442859084, "grad_norm": 1.9135335683822632, "learning_rate": 0.0008893022149748607, "loss": 3.5, "step": 13040 }, { "epoch": 0.8863296643565702, "grad_norm": 1.4988834857940674, "learning_rate": 0.000889259749966028, "loss": 3.5791, "step": 13045 }, { "epoch": 0.886669384427232, "grad_norm": 2.0893256664276123, "learning_rate": 0.0008892172849571953, "loss": 3.7341, "step": 13050 }, { "epoch": 0.8870091044978937, "grad_norm": 1.8149819374084473, "learning_rate": 0.0008891748199483625, "loss": 3.5194, "step": 13055 }, { "epoch": 0.8873488245685555, "grad_norm": 2.6282777786254883, "learning_rate": 0.0008891323549395299, "loss": 3.6027, "step": 13060 }, { "epoch": 0.8876885446392173, "grad_norm": 1.7614283561706543, "learning_rate": 0.0008890898899306972, "loss": 3.6226, "step": 13065 }, { "epoch": 0.8880282647098791, "grad_norm": 2.4501185417175293, "learning_rate": 0.0008890474249218644, "loss": 3.6031, "step": 13070 }, { "epoch": 0.8883679847805408, "grad_norm": 1.8967763185501099, "learning_rate": 0.0008890049599130316, "loss": 3.6431, "step": 13075 }, { "epoch": 0.8887077048512027, "grad_norm": 1.788118839263916, "learning_rate": 0.000888962494904199, "loss": 3.65, "step": 13080 }, { "epoch": 0.8890474249218644, "grad_norm": 1.9833227396011353, "learning_rate": 0.0008889200298953662, "loss": 3.5522, "step": 13085 }, { "epoch": 0.8893871449925261, "grad_norm": 1.875222086906433, "learning_rate": 0.0008888775648865334, "loss": 3.5911, "step": 13090 }, { "epoch": 0.889726865063188, "grad_norm": 1.9749059677124023, "learning_rate": 0.0008888350998777009, "loss": 3.6239, "step": 13095 }, { "epoch": 0.8900665851338497, "grad_norm": 1.8679710626602173, "learning_rate": 0.0008887926348688681, "loss": 3.616, "step": 13100 }, { "epoch": 0.8904063052045115, "grad_norm": 2.487046241760254, "learning_rate": 0.0008887501698600353, "loss": 3.6453, "step": 13105 }, { "epoch": 0.8907460252751732, "grad_norm": 2.309375286102295, "learning_rate": 0.0008887077048512027, "loss": 3.7545, "step": 13110 }, { "epoch": 0.8910857453458351, "grad_norm": 1.9856433868408203, "learning_rate": 0.0008886652398423699, "loss": 3.5933, "step": 13115 }, { "epoch": 0.8914254654164968, "grad_norm": 1.9581245183944702, "learning_rate": 0.0008886227748335371, "loss": 3.7285, "step": 13120 }, { "epoch": 0.8917651854871586, "grad_norm": 1.8128758668899536, "learning_rate": 0.0008885803098247046, "loss": 3.3889, "step": 13125 }, { "epoch": 0.8921049055578204, "grad_norm": 1.9973536729812622, "learning_rate": 0.0008885378448158718, "loss": 3.4362, "step": 13130 }, { "epoch": 0.8924446256284821, "grad_norm": 2.3604419231414795, "learning_rate": 0.000888495379807039, "loss": 3.5922, "step": 13135 }, { "epoch": 0.8927843456991439, "grad_norm": 1.7855613231658936, "learning_rate": 0.0008884529147982063, "loss": 3.6132, "step": 13140 }, { "epoch": 0.8931240657698056, "grad_norm": 1.8239879608154297, "learning_rate": 0.0008884104497893736, "loss": 3.7838, "step": 13145 }, { "epoch": 0.8934637858404675, "grad_norm": 2.4772226810455322, "learning_rate": 0.0008883679847805408, "loss": 3.6169, "step": 13150 }, { "epoch": 0.8938035059111292, "grad_norm": 1.649658203125, "learning_rate": 0.0008883255197717081, "loss": 3.8701, "step": 13155 }, { "epoch": 0.894143225981791, "grad_norm": 2.0211119651794434, "learning_rate": 0.0008882830547628755, "loss": 3.3109, "step": 13160 }, { "epoch": 0.8944829460524528, "grad_norm": 1.7986657619476318, "learning_rate": 0.0008882405897540427, "loss": 3.461, "step": 13165 }, { "epoch": 0.8948226661231146, "grad_norm": 1.7878721952438354, "learning_rate": 0.00088819812474521, "loss": 3.4357, "step": 13170 }, { "epoch": 0.8951623861937763, "grad_norm": 1.5010910034179688, "learning_rate": 0.0008881556597363772, "loss": 3.6631, "step": 13175 }, { "epoch": 0.8955021062644382, "grad_norm": 2.320896625518799, "learning_rate": 0.0008881131947275445, "loss": 3.6416, "step": 13180 }, { "epoch": 0.8958418263350999, "grad_norm": 1.9796395301818848, "learning_rate": 0.0008880707297187118, "loss": 3.9352, "step": 13185 }, { "epoch": 0.8961815464057616, "grad_norm": 1.7291181087493896, "learning_rate": 0.000888028264709879, "loss": 3.5246, "step": 13190 }, { "epoch": 0.8965212664764234, "grad_norm": 2.1038169860839844, "learning_rate": 0.0008879857997010464, "loss": 3.7854, "step": 13195 }, { "epoch": 0.8968609865470852, "grad_norm": 1.9450832605361938, "learning_rate": 0.0008879433346922137, "loss": 3.5557, "step": 13200 }, { "epoch": 0.897200706617747, "grad_norm": 1.8850667476654053, "learning_rate": 0.0008879008696833809, "loss": 3.5236, "step": 13205 }, { "epoch": 0.8975404266884087, "grad_norm": 1.8404009342193604, "learning_rate": 0.0008878584046745482, "loss": 3.6168, "step": 13210 }, { "epoch": 0.8978801467590706, "grad_norm": 2.182366371154785, "learning_rate": 0.0008878159396657155, "loss": 3.4688, "step": 13215 }, { "epoch": 0.8982198668297323, "grad_norm": 2.1269540786743164, "learning_rate": 0.0008877734746568827, "loss": 3.6284, "step": 13220 }, { "epoch": 0.8985595869003941, "grad_norm": 2.3296244144439697, "learning_rate": 0.0008877310096480499, "loss": 3.8449, "step": 13225 }, { "epoch": 0.8988993069710558, "grad_norm": 2.330920934677124, "learning_rate": 0.0008876885446392174, "loss": 3.8402, "step": 13230 }, { "epoch": 0.8992390270417177, "grad_norm": 2.0745015144348145, "learning_rate": 0.0008876460796303846, "loss": 3.5973, "step": 13235 }, { "epoch": 0.8995787471123794, "grad_norm": 1.8211861848831177, "learning_rate": 0.0008876036146215518, "loss": 3.732, "step": 13240 }, { "epoch": 0.8999184671830411, "grad_norm": 1.8286044597625732, "learning_rate": 0.0008875611496127192, "loss": 3.3653, "step": 13245 }, { "epoch": 0.900258187253703, "grad_norm": 2.009091377258301, "learning_rate": 0.0008875186846038864, "loss": 3.597, "step": 13250 }, { "epoch": 0.9005979073243647, "grad_norm": 1.828031301498413, "learning_rate": 0.0008874762195950536, "loss": 3.5598, "step": 13255 }, { "epoch": 0.9009376273950265, "grad_norm": 1.9664216041564941, "learning_rate": 0.000887433754586221, "loss": 3.5382, "step": 13260 }, { "epoch": 0.9012773474656883, "grad_norm": 1.7617771625518799, "learning_rate": 0.0008873912895773883, "loss": 3.5937, "step": 13265 }, { "epoch": 0.9016170675363501, "grad_norm": 2.1937079429626465, "learning_rate": 0.0008873488245685555, "loss": 3.6631, "step": 13270 }, { "epoch": 0.9019567876070118, "grad_norm": 2.5829665660858154, "learning_rate": 0.0008873063595597228, "loss": 3.7494, "step": 13275 }, { "epoch": 0.9022965076776736, "grad_norm": 2.451505422592163, "learning_rate": 0.0008872638945508901, "loss": 3.9759, "step": 13280 }, { "epoch": 0.9026362277483354, "grad_norm": 1.595932126045227, "learning_rate": 0.0008872214295420573, "loss": 3.672, "step": 13285 }, { "epoch": 0.9029759478189971, "grad_norm": 2.4399776458740234, "learning_rate": 0.0008871789645332246, "loss": 3.6195, "step": 13290 }, { "epoch": 0.9033156678896589, "grad_norm": 1.7161459922790527, "learning_rate": 0.0008871364995243919, "loss": 3.8535, "step": 13295 }, { "epoch": 0.9036553879603207, "grad_norm": 1.872959017753601, "learning_rate": 0.0008870940345155592, "loss": 3.6398, "step": 13300 }, { "epoch": 0.9039951080309825, "grad_norm": 2.0971107482910156, "learning_rate": 0.0008870515695067265, "loss": 3.461, "step": 13305 }, { "epoch": 0.9043348281016442, "grad_norm": 1.760083794593811, "learning_rate": 0.0008870091044978938, "loss": 3.7604, "step": 13310 }, { "epoch": 0.904674548172306, "grad_norm": 1.6464509963989258, "learning_rate": 0.000886966639489061, "loss": 3.8467, "step": 13315 }, { "epoch": 0.9050142682429678, "grad_norm": 2.098384141921997, "learning_rate": 0.0008869241744802283, "loss": 3.3838, "step": 13320 }, { "epoch": 0.9053539883136296, "grad_norm": 1.803875207901001, "learning_rate": 0.0008868817094713955, "loss": 3.3902, "step": 13325 }, { "epoch": 0.9056937083842913, "grad_norm": 2.0576374530792236, "learning_rate": 0.0008868392444625629, "loss": 3.5681, "step": 13330 }, { "epoch": 0.9060334284549532, "grad_norm": 2.0640268325805664, "learning_rate": 0.0008867967794537302, "loss": 3.7863, "step": 13335 }, { "epoch": 0.9063731485256149, "grad_norm": 1.8478448390960693, "learning_rate": 0.0008867543144448974, "loss": 3.5296, "step": 13340 }, { "epoch": 0.9067128685962766, "grad_norm": 1.6195385456085205, "learning_rate": 0.0008867118494360648, "loss": 3.655, "step": 13345 }, { "epoch": 0.9070525886669385, "grad_norm": 2.291335105895996, "learning_rate": 0.000886669384427232, "loss": 3.712, "step": 13350 }, { "epoch": 0.9073923087376002, "grad_norm": 1.8644088506698608, "learning_rate": 0.0008866269194183992, "loss": 3.5681, "step": 13355 }, { "epoch": 0.907732028808262, "grad_norm": 2.253929376602173, "learning_rate": 0.0008865844544095666, "loss": 3.5847, "step": 13360 }, { "epoch": 0.9080717488789237, "grad_norm": 2.9184274673461914, "learning_rate": 0.0008865419894007338, "loss": 3.5244, "step": 13365 }, { "epoch": 0.9084114689495856, "grad_norm": 2.0109927654266357, "learning_rate": 0.0008864995243919011, "loss": 3.6125, "step": 13370 }, { "epoch": 0.9087511890202473, "grad_norm": 1.830309271812439, "learning_rate": 0.0008864570593830684, "loss": 3.2517, "step": 13375 }, { "epoch": 0.9090909090909091, "grad_norm": 1.7639456987380981, "learning_rate": 0.0008864145943742357, "loss": 3.7856, "step": 13380 }, { "epoch": 0.9094306291615709, "grad_norm": 1.7286063432693481, "learning_rate": 0.0008863721293654029, "loss": 3.7827, "step": 13385 }, { "epoch": 0.9097703492322327, "grad_norm": 2.5229785442352295, "learning_rate": 0.0008863296643565702, "loss": 3.835, "step": 13390 }, { "epoch": 0.9101100693028944, "grad_norm": 1.9982335567474365, "learning_rate": 0.0008862871993477375, "loss": 3.4588, "step": 13395 }, { "epoch": 0.9104497893735561, "grad_norm": 2.5190393924713135, "learning_rate": 0.0008862447343389047, "loss": 3.3754, "step": 13400 }, { "epoch": 0.910789509444218, "grad_norm": 2.167062759399414, "learning_rate": 0.0008862022693300721, "loss": 3.9816, "step": 13405 }, { "epoch": 0.9111292295148797, "grad_norm": 1.836427092552185, "learning_rate": 0.0008861598043212394, "loss": 3.3927, "step": 13410 }, { "epoch": 0.9114689495855415, "grad_norm": 1.4368730783462524, "learning_rate": 0.0008861173393124066, "loss": 3.6236, "step": 13415 }, { "epoch": 0.9118086696562033, "grad_norm": 1.9384061098098755, "learning_rate": 0.0008860748743035739, "loss": 3.8511, "step": 13420 }, { "epoch": 0.9121483897268651, "grad_norm": 2.0531792640686035, "learning_rate": 0.0008860324092947411, "loss": 3.6016, "step": 13425 }, { "epoch": 0.9124881097975268, "grad_norm": 1.8253854513168335, "learning_rate": 0.0008859899442859084, "loss": 3.3942, "step": 13430 }, { "epoch": 0.9128278298681887, "grad_norm": 2.3163602352142334, "learning_rate": 0.0008859474792770757, "loss": 3.5973, "step": 13435 }, { "epoch": 0.9131675499388504, "grad_norm": 2.0344631671905518, "learning_rate": 0.000885905014268243, "loss": 3.4546, "step": 13440 }, { "epoch": 0.9135072700095122, "grad_norm": 2.0698132514953613, "learning_rate": 0.0008858625492594103, "loss": 3.5167, "step": 13445 }, { "epoch": 0.9138469900801739, "grad_norm": 1.8803751468658447, "learning_rate": 0.0008858200842505776, "loss": 3.399, "step": 13450 }, { "epoch": 0.9141867101508357, "grad_norm": 1.7588750123977661, "learning_rate": 0.0008857776192417448, "loss": 3.6008, "step": 13455 }, { "epoch": 0.9145264302214975, "grad_norm": 2.131573438644409, "learning_rate": 0.000885735154232912, "loss": 3.8067, "step": 13460 }, { "epoch": 0.9148661502921592, "grad_norm": 1.9264240264892578, "learning_rate": 0.0008856926892240794, "loss": 3.6389, "step": 13465 }, { "epoch": 0.9152058703628211, "grad_norm": 2.0972774028778076, "learning_rate": 0.0008856502242152466, "loss": 3.8029, "step": 13470 }, { "epoch": 0.9155455904334828, "grad_norm": 1.8691558837890625, "learning_rate": 0.0008856077592064139, "loss": 3.6081, "step": 13475 }, { "epoch": 0.9158853105041446, "grad_norm": 2.1729166507720947, "learning_rate": 0.0008855652941975813, "loss": 3.4545, "step": 13480 }, { "epoch": 0.9162250305748063, "grad_norm": 1.5930920839309692, "learning_rate": 0.0008855228291887485, "loss": 3.5169, "step": 13485 }, { "epoch": 0.9165647506454682, "grad_norm": 2.209080696105957, "learning_rate": 0.0008854803641799157, "loss": 3.5972, "step": 13490 }, { "epoch": 0.9169044707161299, "grad_norm": 2.3173346519470215, "learning_rate": 0.0008854378991710831, "loss": 3.6472, "step": 13495 }, { "epoch": 0.9172441907867916, "grad_norm": 2.049778938293457, "learning_rate": 0.0008853954341622503, "loss": 3.5649, "step": 13500 }, { "epoch": 0.9175839108574535, "grad_norm": 1.9413201808929443, "learning_rate": 0.0008853529691534175, "loss": 3.7105, "step": 13505 }, { "epoch": 0.9179236309281152, "grad_norm": 1.5752571821212769, "learning_rate": 0.000885310504144585, "loss": 3.8598, "step": 13510 }, { "epoch": 0.918263350998777, "grad_norm": 1.834558367729187, "learning_rate": 0.0008852680391357522, "loss": 3.1855, "step": 13515 }, { "epoch": 0.9186030710694388, "grad_norm": 3.086698532104492, "learning_rate": 0.0008852255741269194, "loss": 3.7406, "step": 13520 }, { "epoch": 0.9189427911401006, "grad_norm": 1.7497920989990234, "learning_rate": 0.0008851831091180867, "loss": 3.5703, "step": 13525 }, { "epoch": 0.9192825112107623, "grad_norm": 1.8758424520492554, "learning_rate": 0.000885140644109254, "loss": 3.3615, "step": 13530 }, { "epoch": 0.9196222312814241, "grad_norm": 2.0696873664855957, "learning_rate": 0.0008850981791004212, "loss": 3.848, "step": 13535 }, { "epoch": 0.9199619513520859, "grad_norm": 1.8828682899475098, "learning_rate": 0.0008850557140915885, "loss": 3.8616, "step": 13540 }, { "epoch": 0.9203016714227477, "grad_norm": 2.353149652481079, "learning_rate": 0.0008850132490827559, "loss": 3.4926, "step": 13545 }, { "epoch": 0.9206413914934094, "grad_norm": 1.9026312828063965, "learning_rate": 0.0008849707840739231, "loss": 3.7334, "step": 13550 }, { "epoch": 0.9209811115640713, "grad_norm": 2.1533608436584473, "learning_rate": 0.0008849283190650904, "loss": 3.7242, "step": 13555 }, { "epoch": 0.921320831634733, "grad_norm": 2.1563963890075684, "learning_rate": 0.0008848858540562576, "loss": 3.6438, "step": 13560 }, { "epoch": 0.9216605517053947, "grad_norm": 2.0614726543426514, "learning_rate": 0.0008848433890474249, "loss": 3.6835, "step": 13565 }, { "epoch": 0.9220002717760565, "grad_norm": 1.8247355222702026, "learning_rate": 0.0008848009240385922, "loss": 3.647, "step": 13570 }, { "epoch": 0.9223399918467183, "grad_norm": 1.702374815940857, "learning_rate": 0.0008847584590297594, "loss": 3.5815, "step": 13575 }, { "epoch": 0.9226797119173801, "grad_norm": 1.9625810384750366, "learning_rate": 0.0008847159940209268, "loss": 3.494, "step": 13580 }, { "epoch": 0.9230194319880418, "grad_norm": 2.6261403560638428, "learning_rate": 0.0008846735290120941, "loss": 3.3677, "step": 13585 }, { "epoch": 0.9233591520587037, "grad_norm": 1.7307296991348267, "learning_rate": 0.0008846310640032613, "loss": 3.5304, "step": 13590 }, { "epoch": 0.9236988721293654, "grad_norm": 2.2721478939056396, "learning_rate": 0.0008845885989944286, "loss": 3.5677, "step": 13595 }, { "epoch": 0.9240385922000272, "grad_norm": 1.7997431755065918, "learning_rate": 0.0008845461339855959, "loss": 3.4735, "step": 13600 }, { "epoch": 0.924378312270689, "grad_norm": 2.0148088932037354, "learning_rate": 0.0008845036689767631, "loss": 3.6919, "step": 13605 }, { "epoch": 0.9247180323413507, "grad_norm": 1.8164029121398926, "learning_rate": 0.0008844612039679303, "loss": 3.71, "step": 13610 }, { "epoch": 0.9250577524120125, "grad_norm": 2.0906689167022705, "learning_rate": 0.0008844187389590978, "loss": 3.7201, "step": 13615 }, { "epoch": 0.9253974724826742, "grad_norm": 1.8715256452560425, "learning_rate": 0.000884376273950265, "loss": 3.6176, "step": 13620 }, { "epoch": 0.9257371925533361, "grad_norm": 1.4819517135620117, "learning_rate": 0.0008843338089414322, "loss": 3.6952, "step": 13625 }, { "epoch": 0.9260769126239978, "grad_norm": 1.3904067277908325, "learning_rate": 0.0008842913439325996, "loss": 3.4914, "step": 13630 }, { "epoch": 0.9264166326946596, "grad_norm": 1.764521837234497, "learning_rate": 0.0008842488789237668, "loss": 3.657, "step": 13635 }, { "epoch": 0.9267563527653214, "grad_norm": 2.368117332458496, "learning_rate": 0.000884206413914934, "loss": 3.5315, "step": 13640 }, { "epoch": 0.9270960728359832, "grad_norm": 1.9231480360031128, "learning_rate": 0.0008841639489061015, "loss": 3.3636, "step": 13645 }, { "epoch": 0.9274357929066449, "grad_norm": 2.0033533573150635, "learning_rate": 0.0008841214838972687, "loss": 3.7539, "step": 13650 }, { "epoch": 0.9277755129773066, "grad_norm": 2.0089175701141357, "learning_rate": 0.0008840790188884359, "loss": 3.6775, "step": 13655 }, { "epoch": 0.9281152330479685, "grad_norm": 1.6453220844268799, "learning_rate": 0.0008840365538796033, "loss": 3.6644, "step": 13660 }, { "epoch": 0.9284549531186302, "grad_norm": 2.2118403911590576, "learning_rate": 0.0008839940888707705, "loss": 3.586, "step": 13665 }, { "epoch": 0.928794673189292, "grad_norm": 1.7816455364227295, "learning_rate": 0.0008839516238619378, "loss": 3.7074, "step": 13670 }, { "epoch": 0.9291343932599538, "grad_norm": 2.1607799530029297, "learning_rate": 0.000883909158853105, "loss": 3.7808, "step": 13675 }, { "epoch": 0.9294741133306156, "grad_norm": 2.2218565940856934, "learning_rate": 0.0008838666938442724, "loss": 3.6413, "step": 13680 }, { "epoch": 0.9298138334012773, "grad_norm": 2.1127982139587402, "learning_rate": 0.0008838242288354397, "loss": 3.4484, "step": 13685 }, { "epoch": 0.9301535534719392, "grad_norm": 2.521768808364868, "learning_rate": 0.0008837817638266069, "loss": 3.4473, "step": 13690 }, { "epoch": 0.9304932735426009, "grad_norm": 2.6389975547790527, "learning_rate": 0.0008837392988177742, "loss": 3.5942, "step": 13695 }, { "epoch": 0.9308329936132627, "grad_norm": 1.8438689708709717, "learning_rate": 0.0008836968338089415, "loss": 3.5393, "step": 13700 }, { "epoch": 0.9311727136839244, "grad_norm": 2.036860466003418, "learning_rate": 0.0008836543688001087, "loss": 3.6127, "step": 13705 }, { "epoch": 0.9315124337545863, "grad_norm": 1.7041594982147217, "learning_rate": 0.0008836119037912759, "loss": 3.6143, "step": 13710 }, { "epoch": 0.931852153825248, "grad_norm": 1.6788290739059448, "learning_rate": 0.0008835694387824434, "loss": 3.633, "step": 13715 }, { "epoch": 0.9321918738959097, "grad_norm": 2.0691516399383545, "learning_rate": 0.0008835269737736106, "loss": 3.5797, "step": 13720 }, { "epoch": 0.9325315939665716, "grad_norm": 1.8358869552612305, "learning_rate": 0.0008834845087647778, "loss": 3.4009, "step": 13725 }, { "epoch": 0.9328713140372333, "grad_norm": 1.8275401592254639, "learning_rate": 0.0008834420437559452, "loss": 3.5539, "step": 13730 }, { "epoch": 0.9332110341078951, "grad_norm": 2.001465320587158, "learning_rate": 0.0008833995787471124, "loss": 3.8054, "step": 13735 }, { "epoch": 0.9335507541785568, "grad_norm": 1.7882152795791626, "learning_rate": 0.0008833571137382796, "loss": 3.5335, "step": 13740 }, { "epoch": 0.9338904742492187, "grad_norm": 2.1471352577209473, "learning_rate": 0.000883314648729447, "loss": 3.8168, "step": 13745 }, { "epoch": 0.9342301943198804, "grad_norm": 2.06244158744812, "learning_rate": 0.0008832721837206143, "loss": 3.3181, "step": 13750 }, { "epoch": 0.9345699143905422, "grad_norm": 1.6277120113372803, "learning_rate": 0.0008832297187117815, "loss": 3.7896, "step": 13755 }, { "epoch": 0.934909634461204, "grad_norm": 2.338618755340576, "learning_rate": 0.0008831872537029489, "loss": 3.6393, "step": 13760 }, { "epoch": 0.9352493545318658, "grad_norm": 1.994065284729004, "learning_rate": 0.0008831447886941161, "loss": 3.5837, "step": 13765 }, { "epoch": 0.9355890746025275, "grad_norm": 1.5918954610824585, "learning_rate": 0.0008831023236852833, "loss": 3.5934, "step": 13770 }, { "epoch": 0.9359287946731893, "grad_norm": 2.4178426265716553, "learning_rate": 0.0008830598586764506, "loss": 3.4583, "step": 13775 }, { "epoch": 0.9362685147438511, "grad_norm": 2.1999034881591797, "learning_rate": 0.0008830173936676179, "loss": 3.4973, "step": 13780 }, { "epoch": 0.9366082348145128, "grad_norm": 1.544102430343628, "learning_rate": 0.0008829749286587852, "loss": 3.5908, "step": 13785 }, { "epoch": 0.9369479548851746, "grad_norm": 3.9452357292175293, "learning_rate": 0.0008829324636499525, "loss": 3.544, "step": 13790 }, { "epoch": 0.9372876749558364, "grad_norm": 2.3335533142089844, "learning_rate": 0.0008828899986411198, "loss": 3.6404, "step": 13795 }, { "epoch": 0.9376273950264982, "grad_norm": 1.682096004486084, "learning_rate": 0.000882847533632287, "loss": 3.3652, "step": 13800 }, { "epoch": 0.9379671150971599, "grad_norm": 1.6954643726348877, "learning_rate": 0.0008828050686234543, "loss": 3.6375, "step": 13805 }, { "epoch": 0.9383068351678218, "grad_norm": 1.9315797090530396, "learning_rate": 0.0008827626036146215, "loss": 3.3316, "step": 13810 }, { "epoch": 0.9386465552384835, "grad_norm": 2.085124969482422, "learning_rate": 0.0008827201386057888, "loss": 3.5518, "step": 13815 }, { "epoch": 0.9389862753091452, "grad_norm": 1.5642156600952148, "learning_rate": 0.0008826776735969562, "loss": 3.7192, "step": 13820 }, { "epoch": 0.939325995379807, "grad_norm": 2.5864579677581787, "learning_rate": 0.0008826352085881234, "loss": 3.3588, "step": 13825 }, { "epoch": 0.9396657154504688, "grad_norm": 2.2585256099700928, "learning_rate": 0.0008825927435792907, "loss": 3.7506, "step": 13830 }, { "epoch": 0.9400054355211306, "grad_norm": 2.0215156078338623, "learning_rate": 0.000882550278570458, "loss": 3.4568, "step": 13835 }, { "epoch": 0.9403451555917923, "grad_norm": 1.9118220806121826, "learning_rate": 0.0008825078135616252, "loss": 3.5339, "step": 13840 }, { "epoch": 0.9406848756624542, "grad_norm": 2.175325870513916, "learning_rate": 0.0008824653485527925, "loss": 3.4403, "step": 13845 }, { "epoch": 0.9410245957331159, "grad_norm": 1.548458456993103, "learning_rate": 0.0008824228835439598, "loss": 3.6614, "step": 13850 }, { "epoch": 0.9413643158037777, "grad_norm": 1.5792229175567627, "learning_rate": 0.0008823804185351271, "loss": 3.7154, "step": 13855 }, { "epoch": 0.9417040358744395, "grad_norm": 1.9252049922943115, "learning_rate": 0.0008823379535262943, "loss": 3.5996, "step": 13860 }, { "epoch": 0.9420437559451013, "grad_norm": 2.762803554534912, "learning_rate": 0.0008822954885174617, "loss": 3.3641, "step": 13865 }, { "epoch": 0.942383476015763, "grad_norm": 2.3896851539611816, "learning_rate": 0.0008822530235086289, "loss": 3.5844, "step": 13870 }, { "epoch": 0.9427231960864247, "grad_norm": 2.280703067779541, "learning_rate": 0.0008822105584997961, "loss": 3.6131, "step": 13875 }, { "epoch": 0.9430629161570866, "grad_norm": 2.2477965354919434, "learning_rate": 0.0008821680934909635, "loss": 3.5332, "step": 13880 }, { "epoch": 0.9434026362277483, "grad_norm": 2.2133896350860596, "learning_rate": 0.0008821256284821307, "loss": 3.3947, "step": 13885 }, { "epoch": 0.9437423562984101, "grad_norm": 1.9748222827911377, "learning_rate": 0.000882083163473298, "loss": 3.7542, "step": 13890 }, { "epoch": 0.9440820763690719, "grad_norm": 1.9047337770462036, "learning_rate": 0.0008820406984644654, "loss": 3.8037, "step": 13895 }, { "epoch": 0.9444217964397337, "grad_norm": 2.3941590785980225, "learning_rate": 0.0008819982334556326, "loss": 3.5029, "step": 13900 }, { "epoch": 0.9447615165103954, "grad_norm": 1.8878490924835205, "learning_rate": 0.0008819557684467998, "loss": 3.6068, "step": 13905 }, { "epoch": 0.9451012365810572, "grad_norm": 2.1575706005096436, "learning_rate": 0.0008819133034379671, "loss": 3.8305, "step": 13910 }, { "epoch": 0.945440956651719, "grad_norm": 2.066653251647949, "learning_rate": 0.0008818708384291344, "loss": 3.5682, "step": 13915 }, { "epoch": 0.9457806767223808, "grad_norm": 2.023790121078491, "learning_rate": 0.0008818283734203016, "loss": 3.4755, "step": 13920 }, { "epoch": 0.9461203967930425, "grad_norm": 1.7797702550888062, "learning_rate": 0.000881785908411469, "loss": 3.7667, "step": 13925 }, { "epoch": 0.9464601168637043, "grad_norm": 2.212890625, "learning_rate": 0.0008817434434026363, "loss": 3.5606, "step": 13930 }, { "epoch": 0.9467998369343661, "grad_norm": 2.6029257774353027, "learning_rate": 0.0008817009783938035, "loss": 3.6025, "step": 13935 }, { "epoch": 0.9471395570050278, "grad_norm": 2.448770523071289, "learning_rate": 0.0008816585133849708, "loss": 3.851, "step": 13940 }, { "epoch": 0.9474792770756897, "grad_norm": 2.0460829734802246, "learning_rate": 0.000881616048376138, "loss": 3.6029, "step": 13945 }, { "epoch": 0.9478189971463514, "grad_norm": 2.0398192405700684, "learning_rate": 0.0008815735833673053, "loss": 3.6155, "step": 13950 }, { "epoch": 0.9481587172170132, "grad_norm": 1.6487442255020142, "learning_rate": 0.0008815311183584726, "loss": 3.4849, "step": 13955 }, { "epoch": 0.9484984372876749, "grad_norm": 1.8178051710128784, "learning_rate": 0.00088148865334964, "loss": 3.4801, "step": 13960 }, { "epoch": 0.9488381573583368, "grad_norm": 2.1223785877227783, "learning_rate": 0.0008814461883408072, "loss": 3.4083, "step": 13965 }, { "epoch": 0.9491778774289985, "grad_norm": 2.7028403282165527, "learning_rate": 0.0008814037233319745, "loss": 3.8339, "step": 13970 }, { "epoch": 0.9495175974996602, "grad_norm": 1.934079647064209, "learning_rate": 0.0008813612583231417, "loss": 3.5958, "step": 13975 }, { "epoch": 0.9498573175703221, "grad_norm": 1.8248600959777832, "learning_rate": 0.000881318793314309, "loss": 3.7736, "step": 13980 }, { "epoch": 0.9501970376409838, "grad_norm": 2.5768723487854004, "learning_rate": 0.0008812763283054763, "loss": 3.5166, "step": 13985 }, { "epoch": 0.9505367577116456, "grad_norm": 1.6575852632522583, "learning_rate": 0.0008812338632966435, "loss": 3.5899, "step": 13990 }, { "epoch": 0.9508764777823073, "grad_norm": 2.0672733783721924, "learning_rate": 0.0008811913982878109, "loss": 3.6608, "step": 13995 }, { "epoch": 0.9512161978529692, "grad_norm": 1.7439597845077515, "learning_rate": 0.0008811489332789782, "loss": 3.6286, "step": 14000 }, { "epoch": 0.9515559179236309, "grad_norm": 1.6866226196289062, "learning_rate": 0.0008811064682701454, "loss": 3.2907, "step": 14005 }, { "epoch": 0.9518956379942927, "grad_norm": 1.9626355171203613, "learning_rate": 0.0008810640032613127, "loss": 3.4484, "step": 14010 }, { "epoch": 0.9522353580649545, "grad_norm": 1.9160981178283691, "learning_rate": 0.00088102153825248, "loss": 3.632, "step": 14015 }, { "epoch": 0.9525750781356163, "grad_norm": 1.65157151222229, "learning_rate": 0.0008809790732436472, "loss": 3.6366, "step": 14020 }, { "epoch": 0.952914798206278, "grad_norm": 1.9315438270568848, "learning_rate": 0.0008809366082348145, "loss": 3.6753, "step": 14025 }, { "epoch": 0.9532545182769399, "grad_norm": 1.5610301494598389, "learning_rate": 0.0008808941432259819, "loss": 3.5799, "step": 14030 }, { "epoch": 0.9535942383476016, "grad_norm": 1.9898490905761719, "learning_rate": 0.0008808516782171491, "loss": 3.6517, "step": 14035 }, { "epoch": 0.9539339584182633, "grad_norm": 1.8152655363082886, "learning_rate": 0.0008808092132083164, "loss": 3.5779, "step": 14040 }, { "epoch": 0.9542736784889251, "grad_norm": 2.615921974182129, "learning_rate": 0.0008807667481994837, "loss": 3.7027, "step": 14045 }, { "epoch": 0.9546133985595869, "grad_norm": 1.7086725234985352, "learning_rate": 0.0008807242831906509, "loss": 3.8549, "step": 14050 }, { "epoch": 0.9549531186302487, "grad_norm": 1.8458327054977417, "learning_rate": 0.0008806818181818182, "loss": 3.6241, "step": 14055 }, { "epoch": 0.9552928387009104, "grad_norm": 2.273630142211914, "learning_rate": 0.0008806393531729854, "loss": 3.4424, "step": 14060 }, { "epoch": 0.9556325587715723, "grad_norm": 1.9808132648468018, "learning_rate": 0.0008805968881641528, "loss": 3.6167, "step": 14065 }, { "epoch": 0.955972278842234, "grad_norm": 1.8966611623764038, "learning_rate": 0.0008805544231553201, "loss": 3.5397, "step": 14070 }, { "epoch": 0.9563119989128958, "grad_norm": 2.110846757888794, "learning_rate": 0.0008805119581464873, "loss": 3.6744, "step": 14075 }, { "epoch": 0.9566517189835575, "grad_norm": 2.3147354125976562, "learning_rate": 0.0008804694931376546, "loss": 3.662, "step": 14080 }, { "epoch": 0.9569914390542194, "grad_norm": 1.8781262636184692, "learning_rate": 0.0008804270281288219, "loss": 3.5623, "step": 14085 }, { "epoch": 0.9573311591248811, "grad_norm": 2.008286952972412, "learning_rate": 0.0008803845631199891, "loss": 3.6289, "step": 14090 }, { "epoch": 0.9576708791955428, "grad_norm": 1.8256287574768066, "learning_rate": 0.0008803420981111563, "loss": 3.709, "step": 14095 }, { "epoch": 0.9580105992662047, "grad_norm": 3.7941160202026367, "learning_rate": 0.0008802996331023238, "loss": 3.545, "step": 14100 }, { "epoch": 0.9583503193368664, "grad_norm": 2.4283041954040527, "learning_rate": 0.000880257168093491, "loss": 3.4283, "step": 14105 }, { "epoch": 0.9586900394075282, "grad_norm": 2.161839008331299, "learning_rate": 0.0008802147030846582, "loss": 3.5019, "step": 14110 }, { "epoch": 0.95902975947819, "grad_norm": 2.018364191055298, "learning_rate": 0.0008801722380758256, "loss": 3.5628, "step": 14115 }, { "epoch": 0.9593694795488518, "grad_norm": 1.9626718759536743, "learning_rate": 0.0008801297730669928, "loss": 3.9578, "step": 14120 }, { "epoch": 0.9597091996195135, "grad_norm": 2.057328701019287, "learning_rate": 0.00088008730805816, "loss": 3.8239, "step": 14125 }, { "epoch": 0.9600489196901753, "grad_norm": 1.9675297737121582, "learning_rate": 0.0008800448430493274, "loss": 3.637, "step": 14130 }, { "epoch": 0.9603886397608371, "grad_norm": 2.380305051803589, "learning_rate": 0.0008800023780404947, "loss": 3.5777, "step": 14135 }, { "epoch": 0.9607283598314988, "grad_norm": 2.0745511054992676, "learning_rate": 0.0008799599130316619, "loss": 3.7031, "step": 14140 }, { "epoch": 0.9610680799021606, "grad_norm": 1.6235716342926025, "learning_rate": 0.0008799174480228293, "loss": 3.5325, "step": 14145 }, { "epoch": 0.9614077999728224, "grad_norm": 1.8329471349716187, "learning_rate": 0.0008798749830139965, "loss": 3.7797, "step": 14150 }, { "epoch": 0.9617475200434842, "grad_norm": 2.326047658920288, "learning_rate": 0.0008798325180051637, "loss": 3.6245, "step": 14155 }, { "epoch": 0.9620872401141459, "grad_norm": 1.9152110815048218, "learning_rate": 0.000879790052996331, "loss": 3.6645, "step": 14160 }, { "epoch": 0.9624269601848077, "grad_norm": 1.8912233114242554, "learning_rate": 0.0008797475879874983, "loss": 3.6758, "step": 14165 }, { "epoch": 0.9627666802554695, "grad_norm": 1.8569263219833374, "learning_rate": 0.0008797051229786656, "loss": 3.6981, "step": 14170 }, { "epoch": 0.9631064003261313, "grad_norm": 2.0785610675811768, "learning_rate": 0.0008796626579698329, "loss": 3.6404, "step": 14175 }, { "epoch": 0.963446120396793, "grad_norm": 1.9475377798080444, "learning_rate": 0.0008796201929610002, "loss": 3.5752, "step": 14180 }, { "epoch": 0.9637858404674549, "grad_norm": 1.8719618320465088, "learning_rate": 0.0008795777279521674, "loss": 3.7545, "step": 14185 }, { "epoch": 0.9641255605381166, "grad_norm": 1.9271003007888794, "learning_rate": 0.0008795352629433347, "loss": 3.5401, "step": 14190 }, { "epoch": 0.9644652806087783, "grad_norm": 1.9113030433654785, "learning_rate": 0.000879492797934502, "loss": 3.7205, "step": 14195 }, { "epoch": 0.9648050006794402, "grad_norm": 1.6084150075912476, "learning_rate": 0.0008794503329256692, "loss": 3.4979, "step": 14200 }, { "epoch": 0.9651447207501019, "grad_norm": 2.2500150203704834, "learning_rate": 0.0008794078679168366, "loss": 3.5227, "step": 14205 }, { "epoch": 0.9654844408207637, "grad_norm": 1.657189965248108, "learning_rate": 0.0008793654029080038, "loss": 3.4983, "step": 14210 }, { "epoch": 0.9658241608914254, "grad_norm": 1.9175148010253906, "learning_rate": 0.0008793229378991711, "loss": 3.6557, "step": 14215 }, { "epoch": 0.9661638809620873, "grad_norm": 2.099168062210083, "learning_rate": 0.0008792804728903384, "loss": 3.7074, "step": 14220 }, { "epoch": 0.966503601032749, "grad_norm": 2.2602407932281494, "learning_rate": 0.0008792380078815056, "loss": 3.6237, "step": 14225 }, { "epoch": 0.9668433211034108, "grad_norm": 2.050058364868164, "learning_rate": 0.0008791955428726729, "loss": 3.6235, "step": 14230 }, { "epoch": 0.9671830411740726, "grad_norm": 1.6563403606414795, "learning_rate": 0.0008791615708656068, "loss": 3.7381, "step": 14235 }, { "epoch": 0.9675227612447344, "grad_norm": 2.1692469120025635, "learning_rate": 0.000879119105856774, "loss": 3.4072, "step": 14240 }, { "epoch": 0.9678624813153961, "grad_norm": 1.9360750913619995, "learning_rate": 0.0008790766408479413, "loss": 3.3908, "step": 14245 }, { "epoch": 0.9682022013860578, "grad_norm": 1.7334635257720947, "learning_rate": 0.0008790341758391086, "loss": 3.7509, "step": 14250 }, { "epoch": 0.9685419214567197, "grad_norm": 1.6793569326400757, "learning_rate": 0.0008789917108302758, "loss": 3.683, "step": 14255 }, { "epoch": 0.9688816415273814, "grad_norm": 1.6525400876998901, "learning_rate": 0.0008789492458214432, "loss": 3.5852, "step": 14260 }, { "epoch": 0.9692213615980432, "grad_norm": 1.758606195449829, "learning_rate": 0.0008789067808126105, "loss": 3.5142, "step": 14265 }, { "epoch": 0.969561081668705, "grad_norm": 1.8550235033035278, "learning_rate": 0.0008788643158037777, "loss": 3.5759, "step": 14270 }, { "epoch": 0.9699008017393668, "grad_norm": 2.188565969467163, "learning_rate": 0.000878821850794945, "loss": 3.6337, "step": 14275 }, { "epoch": 0.9702405218100285, "grad_norm": 2.3674464225769043, "learning_rate": 0.0008787793857861123, "loss": 3.5564, "step": 14280 }, { "epoch": 0.9705802418806904, "grad_norm": 2.6420071125030518, "learning_rate": 0.0008787369207772795, "loss": 3.6826, "step": 14285 }, { "epoch": 0.9709199619513521, "grad_norm": 1.5170254707336426, "learning_rate": 0.0008786944557684468, "loss": 3.5796, "step": 14290 }, { "epoch": 0.9712596820220138, "grad_norm": 1.9671584367752075, "learning_rate": 0.0008786519907596141, "loss": 3.5884, "step": 14295 }, { "epoch": 0.9715994020926756, "grad_norm": 1.958282709121704, "learning_rate": 0.0008786095257507814, "loss": 3.5791, "step": 14300 }, { "epoch": 0.9719391221633374, "grad_norm": 1.9872221946716309, "learning_rate": 0.0008785670607419487, "loss": 3.7319, "step": 14305 }, { "epoch": 0.9722788422339992, "grad_norm": 2.4104702472686768, "learning_rate": 0.0008785245957331159, "loss": 3.5443, "step": 14310 }, { "epoch": 0.9726185623046609, "grad_norm": 1.8595340251922607, "learning_rate": 0.0008784821307242832, "loss": 3.4087, "step": 14315 }, { "epoch": 0.9729582823753228, "grad_norm": 1.7947633266448975, "learning_rate": 0.0008784396657154505, "loss": 3.5584, "step": 14320 }, { "epoch": 0.9732980024459845, "grad_norm": 2.2689006328582764, "learning_rate": 0.0008783972007066177, "loss": 3.1246, "step": 14325 }, { "epoch": 0.9736377225166463, "grad_norm": 1.8650412559509277, "learning_rate": 0.0008783547356977851, "loss": 3.6036, "step": 14330 }, { "epoch": 0.973977442587308, "grad_norm": 1.6404372453689575, "learning_rate": 0.0008783122706889524, "loss": 3.6964, "step": 14335 }, { "epoch": 0.9743171626579699, "grad_norm": 1.3603976964950562, "learning_rate": 0.0008782698056801196, "loss": 3.5951, "step": 14340 }, { "epoch": 0.9746568827286316, "grad_norm": 1.675060749053955, "learning_rate": 0.0008782273406712868, "loss": 3.6999, "step": 14345 }, { "epoch": 0.9749966027992933, "grad_norm": 2.030258893966675, "learning_rate": 0.0008781848756624542, "loss": 3.6385, "step": 14350 }, { "epoch": 0.9753363228699552, "grad_norm": 1.77107572555542, "learning_rate": 0.0008781424106536214, "loss": 3.2339, "step": 14355 }, { "epoch": 0.9756760429406169, "grad_norm": 1.883176326751709, "learning_rate": 0.0008780999456447886, "loss": 3.7962, "step": 14360 }, { "epoch": 0.9760157630112787, "grad_norm": 1.5501704216003418, "learning_rate": 0.0008780574806359561, "loss": 3.5637, "step": 14365 }, { "epoch": 0.9763554830819405, "grad_norm": 1.8720595836639404, "learning_rate": 0.0008780150156271233, "loss": 3.7562, "step": 14370 }, { "epoch": 0.9766952031526023, "grad_norm": 1.886540412902832, "learning_rate": 0.0008779725506182905, "loss": 3.3532, "step": 14375 }, { "epoch": 0.977034923223264, "grad_norm": 2.0105247497558594, "learning_rate": 0.0008779300856094579, "loss": 3.6961, "step": 14380 }, { "epoch": 0.9773746432939258, "grad_norm": 1.5189409255981445, "learning_rate": 0.0008778876206006251, "loss": 3.3253, "step": 14385 }, { "epoch": 0.9777143633645876, "grad_norm": 2.114211082458496, "learning_rate": 0.0008778451555917923, "loss": 3.4583, "step": 14390 }, { "epoch": 0.9780540834352494, "grad_norm": 1.846476674079895, "learning_rate": 0.0008778026905829596, "loss": 3.7544, "step": 14395 }, { "epoch": 0.9783938035059111, "grad_norm": 2.0927884578704834, "learning_rate": 0.000877760225574127, "loss": 3.5852, "step": 14400 }, { "epoch": 0.978733523576573, "grad_norm": 2.1687724590301514, "learning_rate": 0.0008777177605652942, "loss": 3.4618, "step": 14405 }, { "epoch": 0.9790732436472347, "grad_norm": 1.9131320714950562, "learning_rate": 0.0008776752955564615, "loss": 3.4809, "step": 14410 }, { "epoch": 0.9794129637178964, "grad_norm": 1.8330297470092773, "learning_rate": 0.0008776328305476288, "loss": 3.7457, "step": 14415 }, { "epoch": 0.9797526837885582, "grad_norm": 2.344428062438965, "learning_rate": 0.000877590365538796, "loss": 3.3617, "step": 14420 }, { "epoch": 0.98009240385922, "grad_norm": 1.9969631433486938, "learning_rate": 0.0008775479005299633, "loss": 3.3865, "step": 14425 }, { "epoch": 0.9804321239298818, "grad_norm": 2.392282247543335, "learning_rate": 0.0008775054355211305, "loss": 3.6711, "step": 14430 }, { "epoch": 0.9807718440005435, "grad_norm": 3.7935218811035156, "learning_rate": 0.0008774629705122979, "loss": 3.2586, "step": 14435 }, { "epoch": 0.9811115640712054, "grad_norm": 2.1364338397979736, "learning_rate": 0.0008774205055034652, "loss": 3.6085, "step": 14440 }, { "epoch": 0.9814512841418671, "grad_norm": 1.9110225439071655, "learning_rate": 0.0008773780404946324, "loss": 3.665, "step": 14445 }, { "epoch": 0.9817910042125289, "grad_norm": 2.356152057647705, "learning_rate": 0.0008773355754857997, "loss": 3.3551, "step": 14450 }, { "epoch": 0.9821307242831907, "grad_norm": 1.4923828840255737, "learning_rate": 0.000877293110476967, "loss": 3.6514, "step": 14455 }, { "epoch": 0.9824704443538524, "grad_norm": 2.226295232772827, "learning_rate": 0.0008772506454681342, "loss": 3.4791, "step": 14460 }, { "epoch": 0.9828101644245142, "grad_norm": 1.6158342361450195, "learning_rate": 0.0008772081804593015, "loss": 3.6852, "step": 14465 }, { "epoch": 0.9831498844951759, "grad_norm": 2.2143540382385254, "learning_rate": 0.0008771657154504689, "loss": 3.6588, "step": 14470 }, { "epoch": 0.9834896045658378, "grad_norm": 2.1933815479278564, "learning_rate": 0.0008771232504416361, "loss": 3.3371, "step": 14475 }, { "epoch": 0.9838293246364995, "grad_norm": 2.3256137371063232, "learning_rate": 0.0008770807854328033, "loss": 3.4863, "step": 14480 }, { "epoch": 0.9841690447071613, "grad_norm": 1.8497118949890137, "learning_rate": 0.0008770383204239707, "loss": 3.6753, "step": 14485 }, { "epoch": 0.9845087647778231, "grad_norm": 1.907450795173645, "learning_rate": 0.0008769958554151379, "loss": 3.6065, "step": 14490 }, { "epoch": 0.9848484848484849, "grad_norm": 2.623647689819336, "learning_rate": 0.0008769533904063051, "loss": 3.8286, "step": 14495 }, { "epoch": 0.9851882049191466, "grad_norm": 2.073270320892334, "learning_rate": 0.0008769109253974726, "loss": 3.6561, "step": 14500 }, { "epoch": 0.9855279249898083, "grad_norm": 2.070570230484009, "learning_rate": 0.0008768684603886398, "loss": 3.5716, "step": 14505 }, { "epoch": 0.9858676450604702, "grad_norm": 2.3070898056030273, "learning_rate": 0.000876825995379807, "loss": 3.4499, "step": 14510 }, { "epoch": 0.9862073651311319, "grad_norm": 1.9124088287353516, "learning_rate": 0.0008767835303709744, "loss": 3.689, "step": 14515 }, { "epoch": 0.9865470852017937, "grad_norm": 1.8679999113082886, "learning_rate": 0.0008767410653621416, "loss": 3.4075, "step": 14520 }, { "epoch": 0.9868868052724555, "grad_norm": 1.5048828125, "learning_rate": 0.0008766986003533088, "loss": 3.3953, "step": 14525 }, { "epoch": 0.9872265253431173, "grad_norm": 1.691528558731079, "learning_rate": 0.0008766561353444761, "loss": 3.5979, "step": 14530 }, { "epoch": 0.987566245413779, "grad_norm": 1.9970310926437378, "learning_rate": 0.0008766136703356435, "loss": 3.5496, "step": 14535 }, { "epoch": 0.9879059654844409, "grad_norm": 2.662597894668579, "learning_rate": 0.0008765712053268107, "loss": 3.7332, "step": 14540 }, { "epoch": 0.9882456855551026, "grad_norm": 1.4763269424438477, "learning_rate": 0.000876528740317978, "loss": 3.479, "step": 14545 }, { "epoch": 0.9885854056257644, "grad_norm": 2.085873603820801, "learning_rate": 0.0008764862753091453, "loss": 3.5741, "step": 14550 }, { "epoch": 0.9889251256964261, "grad_norm": 2.804462432861328, "learning_rate": 0.0008764438103003126, "loss": 3.474, "step": 14555 }, { "epoch": 0.989264845767088, "grad_norm": 1.8467350006103516, "learning_rate": 0.0008764013452914798, "loss": 3.6018, "step": 14560 }, { "epoch": 0.9896045658377497, "grad_norm": 1.8074089288711548, "learning_rate": 0.0008763588802826471, "loss": 3.6293, "step": 14565 }, { "epoch": 0.9899442859084114, "grad_norm": 1.3856440782546997, "learning_rate": 0.0008763164152738145, "loss": 3.4326, "step": 14570 }, { "epoch": 0.9902840059790733, "grad_norm": 2.499509572982788, "learning_rate": 0.0008762739502649817, "loss": 3.7619, "step": 14575 }, { "epoch": 0.990623726049735, "grad_norm": 2.293208360671997, "learning_rate": 0.000876231485256149, "loss": 3.5048, "step": 14580 }, { "epoch": 0.9909634461203968, "grad_norm": 1.8815727233886719, "learning_rate": 0.0008761890202473163, "loss": 3.5133, "step": 14585 }, { "epoch": 0.9913031661910585, "grad_norm": 1.5546807050704956, "learning_rate": 0.0008761465552384835, "loss": 3.7322, "step": 14590 }, { "epoch": 0.9916428862617204, "grad_norm": 1.9158265590667725, "learning_rate": 0.0008761040902296507, "loss": 3.4699, "step": 14595 }, { "epoch": 0.9919826063323821, "grad_norm": 1.696599006652832, "learning_rate": 0.0008760616252208181, "loss": 3.7198, "step": 14600 }, { "epoch": 0.9923223264030439, "grad_norm": 2.623621702194214, "learning_rate": 0.0008760191602119854, "loss": 3.7944, "step": 14605 }, { "epoch": 0.9926620464737057, "grad_norm": 2.0798606872558594, "learning_rate": 0.0008759766952031526, "loss": 3.4825, "step": 14610 }, { "epoch": 0.9930017665443674, "grad_norm": 1.950784683227539, "learning_rate": 0.00087593423019432, "loss": 3.6423, "step": 14615 }, { "epoch": 0.9933414866150292, "grad_norm": 1.9024003744125366, "learning_rate": 0.0008758917651854872, "loss": 3.4939, "step": 14620 }, { "epoch": 0.993681206685691, "grad_norm": 1.971192717552185, "learning_rate": 0.0008758493001766544, "loss": 3.5999, "step": 14625 }, { "epoch": 0.9940209267563528, "grad_norm": 1.7604292631149292, "learning_rate": 0.0008758068351678218, "loss": 3.8051, "step": 14630 }, { "epoch": 0.9943606468270145, "grad_norm": 2.355717658996582, "learning_rate": 0.000875764370158989, "loss": 3.4761, "step": 14635 }, { "epoch": 0.9947003668976763, "grad_norm": 1.68112051486969, "learning_rate": 0.0008757219051501563, "loss": 3.6887, "step": 14640 }, { "epoch": 0.9950400869683381, "grad_norm": 2.5398108959198, "learning_rate": 0.0008756794401413236, "loss": 3.522, "step": 14645 }, { "epoch": 0.9953798070389999, "grad_norm": 2.0221140384674072, "learning_rate": 0.0008756369751324909, "loss": 3.5504, "step": 14650 }, { "epoch": 0.9957195271096616, "grad_norm": 2.4508180618286133, "learning_rate": 0.0008755945101236581, "loss": 3.502, "step": 14655 }, { "epoch": 0.9960592471803235, "grad_norm": 1.9425421953201294, "learning_rate": 0.0008755520451148254, "loss": 3.534, "step": 14660 }, { "epoch": 0.9963989672509852, "grad_norm": 2.107790470123291, "learning_rate": 0.0008755095801059927, "loss": 3.7692, "step": 14665 }, { "epoch": 0.9967386873216469, "grad_norm": 1.9684144258499146, "learning_rate": 0.0008754671150971599, "loss": 3.7525, "step": 14670 }, { "epoch": 0.9970784073923087, "grad_norm": 1.72555673122406, "learning_rate": 0.0008754246500883273, "loss": 3.6211, "step": 14675 }, { "epoch": 0.9974181274629705, "grad_norm": 1.8420687913894653, "learning_rate": 0.0008753821850794946, "loss": 3.6951, "step": 14680 }, { "epoch": 0.9977578475336323, "grad_norm": 2.2355220317840576, "learning_rate": 0.0008753397200706618, "loss": 3.5973, "step": 14685 }, { "epoch": 0.998097567604294, "grad_norm": 1.5263309478759766, "learning_rate": 0.0008752972550618291, "loss": 3.5815, "step": 14690 }, { "epoch": 0.9984372876749559, "grad_norm": 2.000595808029175, "learning_rate": 0.0008752547900529963, "loss": 3.8705, "step": 14695 }, { "epoch": 0.9987770077456176, "grad_norm": 1.916430950164795, "learning_rate": 0.0008752123250441636, "loss": 3.7767, "step": 14700 }, { "epoch": 0.9991167278162794, "grad_norm": 2.352318286895752, "learning_rate": 0.0008751698600353309, "loss": 3.4905, "step": 14705 }, { "epoch": 0.9994564478869412, "grad_norm": 2.6229474544525146, "learning_rate": 0.0008751273950264982, "loss": 3.6125, "step": 14710 }, { "epoch": 0.999796167957603, "grad_norm": 2.0234603881835938, "learning_rate": 0.0008750849300176655, "loss": 3.8394, "step": 14715 }, { "epoch": 1.0, "eval_bertscore": { "f1": 0.8271505057216788, "precision": 0.823843175992449, "recall": 0.8317866168958825 }, "eval_bleu_4": 0.008253589206003952, "eval_exact_match": 0.0, "eval_loss": 3.523176670074463, "eval_meteor": 0.09454361981482114, "eval_rouge": { "rouge1": 0.11048743266061209, "rouge2": 0.012000958608259478, "rougeL": 0.09103457598412869, "rougeLsum": 0.09105801089781293 }, "eval_runtime": 1847.1084, "eval_samples_per_second": 5.587, "eval_steps_per_second": 0.698, "step": 14718 }, { "epoch": 1.0001358880282647, "grad_norm": 1.9600398540496826, "learning_rate": 0.0008750424650088328, "loss": 3.5062, "step": 14720 }, { "epoch": 1.0004756080989265, "grad_norm": 1.756846308708191, "learning_rate": 0.000875, "loss": 3.5062, "step": 14725 }, { "epoch": 1.0008153281695882, "grad_norm": 2.492629289627075, "learning_rate": 0.0008749575349911672, "loss": 3.5044, "step": 14730 }, { "epoch": 1.00115504824025, "grad_norm": 1.8805475234985352, "learning_rate": 0.0008749150699823346, "loss": 3.5549, "step": 14735 }, { "epoch": 1.0014947683109119, "grad_norm": 1.9759495258331299, "learning_rate": 0.0008748726049735018, "loss": 3.5416, "step": 14740 }, { "epoch": 1.0018344883815735, "grad_norm": 2.5048298835754395, "learning_rate": 0.0008748301399646691, "loss": 3.652, "step": 14745 }, { "epoch": 1.0021742084522354, "grad_norm": 2.251340866088867, "learning_rate": 0.0008747876749558365, "loss": 3.3788, "step": 14750 }, { "epoch": 1.0025139285228972, "grad_norm": 1.9815601110458374, "learning_rate": 0.0008747452099470037, "loss": 3.329, "step": 14755 }, { "epoch": 1.0028536485935589, "grad_norm": 2.839458703994751, "learning_rate": 0.0008747027449381709, "loss": 3.6593, "step": 14760 }, { "epoch": 1.0031933686642207, "grad_norm": 1.9316380023956299, "learning_rate": 0.0008746602799293383, "loss": 3.5893, "step": 14765 }, { "epoch": 1.0035330887348826, "grad_norm": 1.9875068664550781, "learning_rate": 0.0008746178149205055, "loss": 3.4399, "step": 14770 }, { "epoch": 1.0038728088055442, "grad_norm": 1.9922810792922974, "learning_rate": 0.0008745753499116727, "loss": 3.5616, "step": 14775 }, { "epoch": 1.004212528876206, "grad_norm": 2.1677908897399902, "learning_rate": 0.0008745328849028402, "loss": 3.6393, "step": 14780 }, { "epoch": 1.0045522489468677, "grad_norm": 2.23287034034729, "learning_rate": 0.0008744904198940074, "loss": 3.6354, "step": 14785 }, { "epoch": 1.0048919690175295, "grad_norm": 2.6216776371002197, "learning_rate": 0.0008744479548851746, "loss": 3.6812, "step": 14790 }, { "epoch": 1.0052316890881914, "grad_norm": 2.2527360916137695, "learning_rate": 0.0008744054898763419, "loss": 3.4842, "step": 14795 }, { "epoch": 1.005571409158853, "grad_norm": 1.9554387331008911, "learning_rate": 0.0008743630248675092, "loss": 3.5281, "step": 14800 }, { "epoch": 1.0059111292295149, "grad_norm": 2.0110440254211426, "learning_rate": 0.0008743205598586764, "loss": 3.549, "step": 14805 }, { "epoch": 1.0062508493001767, "grad_norm": 2.156892776489258, "learning_rate": 0.0008742780948498437, "loss": 3.4568, "step": 14810 }, { "epoch": 1.0065905693708384, "grad_norm": 1.768970012664795, "learning_rate": 0.0008742356298410111, "loss": 3.3049, "step": 14815 }, { "epoch": 1.0069302894415002, "grad_norm": 2.2894952297210693, "learning_rate": 0.0008741931648321783, "loss": 3.8342, "step": 14820 }, { "epoch": 1.007270009512162, "grad_norm": 2.709646463394165, "learning_rate": 0.0008741506998233456, "loss": 3.6389, "step": 14825 }, { "epoch": 1.0076097295828237, "grad_norm": 2.283017873764038, "learning_rate": 0.0008741082348145128, "loss": 3.5976, "step": 14830 }, { "epoch": 1.0079494496534855, "grad_norm": 2.2111623287200928, "learning_rate": 0.0008740657698056801, "loss": 3.5878, "step": 14835 }, { "epoch": 1.0082891697241474, "grad_norm": 2.0433878898620605, "learning_rate": 0.0008740233047968474, "loss": 3.561, "step": 14840 }, { "epoch": 1.008628889794809, "grad_norm": 1.9061963558197021, "learning_rate": 0.0008739808397880146, "loss": 3.9203, "step": 14845 }, { "epoch": 1.0089686098654709, "grad_norm": 2.3698482513427734, "learning_rate": 0.000873938374779182, "loss": 3.8258, "step": 14850 }, { "epoch": 1.0093083299361327, "grad_norm": 1.6374750137329102, "learning_rate": 0.0008738959097703493, "loss": 3.8592, "step": 14855 }, { "epoch": 1.0096480500067944, "grad_norm": 2.2485101222991943, "learning_rate": 0.0008738534447615165, "loss": 3.4668, "step": 14860 }, { "epoch": 1.0099877700774562, "grad_norm": 1.947045922279358, "learning_rate": 0.0008738109797526838, "loss": 3.4477, "step": 14865 }, { "epoch": 1.0103274901481178, "grad_norm": 2.0801782608032227, "learning_rate": 0.0008737685147438511, "loss": 3.5998, "step": 14870 }, { "epoch": 1.0106672102187797, "grad_norm": 1.9803661108016968, "learning_rate": 0.0008737260497350183, "loss": 3.5518, "step": 14875 }, { "epoch": 1.0110069302894416, "grad_norm": 2.632214069366455, "learning_rate": 0.0008736835847261855, "loss": 3.5243, "step": 14880 }, { "epoch": 1.0113466503601032, "grad_norm": 1.9873261451721191, "learning_rate": 0.000873641119717353, "loss": 3.6721, "step": 14885 }, { "epoch": 1.011686370430765, "grad_norm": 2.197680950164795, "learning_rate": 0.0008735986547085202, "loss": 3.8562, "step": 14890 }, { "epoch": 1.012026090501427, "grad_norm": 1.628758430480957, "learning_rate": 0.0008735561896996874, "loss": 3.4536, "step": 14895 }, { "epoch": 1.0123658105720885, "grad_norm": 1.9390413761138916, "learning_rate": 0.0008735137246908548, "loss": 3.3752, "step": 14900 }, { "epoch": 1.0127055306427504, "grad_norm": 1.823377251625061, "learning_rate": 0.000873471259682022, "loss": 3.8354, "step": 14905 }, { "epoch": 1.0130452507134122, "grad_norm": 1.616235375404358, "learning_rate": 0.0008734287946731893, "loss": 3.7407, "step": 14910 }, { "epoch": 1.0133849707840739, "grad_norm": 2.321716070175171, "learning_rate": 0.0008733863296643566, "loss": 3.4965, "step": 14915 }, { "epoch": 1.0137246908547357, "grad_norm": 2.0638267993927, "learning_rate": 0.0008733438646555239, "loss": 3.4007, "step": 14920 }, { "epoch": 1.0140644109253976, "grad_norm": 1.7602989673614502, "learning_rate": 0.0008733013996466912, "loss": 3.57, "step": 14925 }, { "epoch": 1.0144041309960592, "grad_norm": 2.356074333190918, "learning_rate": 0.0008732589346378584, "loss": 3.4928, "step": 14930 }, { "epoch": 1.014743851066721, "grad_norm": 2.090263843536377, "learning_rate": 0.0008732164696290257, "loss": 3.4219, "step": 14935 }, { "epoch": 1.015083571137383, "grad_norm": 1.6300550699234009, "learning_rate": 0.000873174004620193, "loss": 3.402, "step": 14940 }, { "epoch": 1.0154232912080445, "grad_norm": 1.843530297279358, "learning_rate": 0.0008731315396113602, "loss": 3.7725, "step": 14945 }, { "epoch": 1.0157630112787064, "grad_norm": 1.8249263763427734, "learning_rate": 0.0008730890746025275, "loss": 3.7284, "step": 14950 }, { "epoch": 1.016102731349368, "grad_norm": 2.520519495010376, "learning_rate": 0.0008730466095936949, "loss": 3.5471, "step": 14955 }, { "epoch": 1.0164424514200299, "grad_norm": 1.759166955947876, "learning_rate": 0.0008730041445848621, "loss": 3.5697, "step": 14960 }, { "epoch": 1.0167821714906917, "grad_norm": 2.322253942489624, "learning_rate": 0.0008729616795760294, "loss": 3.5371, "step": 14965 }, { "epoch": 1.0171218915613534, "grad_norm": 1.9714688062667847, "learning_rate": 0.0008729192145671967, "loss": 3.2812, "step": 14970 }, { "epoch": 1.0174616116320152, "grad_norm": 1.999167561531067, "learning_rate": 0.0008728767495583639, "loss": 3.5298, "step": 14975 }, { "epoch": 1.017801331702677, "grad_norm": 2.0062918663024902, "learning_rate": 0.0008728342845495311, "loss": 3.4366, "step": 14980 }, { "epoch": 1.0181410517733387, "grad_norm": 2.038877010345459, "learning_rate": 0.0008727918195406986, "loss": 3.7467, "step": 14985 }, { "epoch": 1.0184807718440005, "grad_norm": 2.078812837600708, "learning_rate": 0.0008727493545318658, "loss": 3.4735, "step": 14990 }, { "epoch": 1.0188204919146624, "grad_norm": 2.5759191513061523, "learning_rate": 0.000872706889523033, "loss": 3.6205, "step": 14995 }, { "epoch": 1.019160211985324, "grad_norm": 1.8168561458587646, "learning_rate": 0.0008726644245142004, "loss": 3.5415, "step": 15000 }, { "epoch": 1.0194999320559859, "grad_norm": 2.0758819580078125, "learning_rate": 0.0008726219595053676, "loss": 3.4031, "step": 15005 }, { "epoch": 1.0198396521266477, "grad_norm": 1.6748212575912476, "learning_rate": 0.0008725794944965348, "loss": 3.6025, "step": 15010 }, { "epoch": 1.0201793721973094, "grad_norm": 1.604222059249878, "learning_rate": 0.0008725370294877022, "loss": 3.565, "step": 15015 }, { "epoch": 1.0205190922679712, "grad_norm": 2.3513643741607666, "learning_rate": 0.0008724945644788695, "loss": 3.2855, "step": 15020 }, { "epoch": 1.020858812338633, "grad_norm": 2.285306453704834, "learning_rate": 0.0008724520994700367, "loss": 3.5821, "step": 15025 }, { "epoch": 1.0211985324092947, "grad_norm": 2.343316078186035, "learning_rate": 0.000872409634461204, "loss": 3.3621, "step": 15030 }, { "epoch": 1.0215382524799566, "grad_norm": 1.6394914388656616, "learning_rate": 0.0008723671694523713, "loss": 3.7695, "step": 15035 }, { "epoch": 1.0218779725506182, "grad_norm": 1.8996260166168213, "learning_rate": 0.0008723247044435385, "loss": 3.6098, "step": 15040 }, { "epoch": 1.02221769262128, "grad_norm": 2.0852506160736084, "learning_rate": 0.0008722822394347058, "loss": 3.5591, "step": 15045 }, { "epoch": 1.022557412691942, "grad_norm": 2.556098699569702, "learning_rate": 0.0008722397744258731, "loss": 3.5948, "step": 15050 }, { "epoch": 1.0228971327626035, "grad_norm": 1.9550580978393555, "learning_rate": 0.0008721973094170404, "loss": 3.5474, "step": 15055 }, { "epoch": 1.0232368528332654, "grad_norm": 1.8626058101654053, "learning_rate": 0.0008721548444082077, "loss": 3.5819, "step": 15060 }, { "epoch": 1.0235765729039272, "grad_norm": 2.115236282348633, "learning_rate": 0.000872112379399375, "loss": 3.4233, "step": 15065 }, { "epoch": 1.0239162929745889, "grad_norm": 2.4447622299194336, "learning_rate": 0.0008720699143905422, "loss": 3.4425, "step": 15070 }, { "epoch": 1.0242560130452507, "grad_norm": 2.153507947921753, "learning_rate": 0.0008720274493817095, "loss": 3.5896, "step": 15075 }, { "epoch": 1.0245957331159126, "grad_norm": 1.8670068979263306, "learning_rate": 0.0008719849843728767, "loss": 3.7338, "step": 15080 }, { "epoch": 1.0249354531865742, "grad_norm": 2.0701053142547607, "learning_rate": 0.000871942519364044, "loss": 3.5364, "step": 15085 }, { "epoch": 1.025275173257236, "grad_norm": 1.9039487838745117, "learning_rate": 0.0008719000543552114, "loss": 3.5956, "step": 15090 }, { "epoch": 1.025614893327898, "grad_norm": 2.009763479232788, "learning_rate": 0.0008718575893463786, "loss": 3.6745, "step": 15095 }, { "epoch": 1.0259546133985595, "grad_norm": 1.8109617233276367, "learning_rate": 0.0008718151243375459, "loss": 3.5595, "step": 15100 }, { "epoch": 1.0262943334692214, "grad_norm": 2.0305261611938477, "learning_rate": 0.0008717726593287132, "loss": 3.567, "step": 15105 }, { "epoch": 1.0266340535398832, "grad_norm": 2.6199662685394287, "learning_rate": 0.0008717301943198804, "loss": 3.5314, "step": 15110 }, { "epoch": 1.0269737736105449, "grad_norm": 2.6551475524902344, "learning_rate": 0.0008716877293110476, "loss": 3.5626, "step": 15115 }, { "epoch": 1.0273134936812067, "grad_norm": 1.8297673463821411, "learning_rate": 0.000871645264302215, "loss": 3.7037, "step": 15120 }, { "epoch": 1.0276532137518684, "grad_norm": 1.9987664222717285, "learning_rate": 0.0008716027992933823, "loss": 3.4725, "step": 15125 }, { "epoch": 1.0279929338225302, "grad_norm": 2.242072343826294, "learning_rate": 0.0008715603342845495, "loss": 3.5399, "step": 15130 }, { "epoch": 1.028332653893192, "grad_norm": 2.3132078647613525, "learning_rate": 0.0008715178692757169, "loss": 3.5877, "step": 15135 }, { "epoch": 1.0286723739638537, "grad_norm": 1.6324824094772339, "learning_rate": 0.0008714754042668841, "loss": 3.6744, "step": 15140 }, { "epoch": 1.0290120940345155, "grad_norm": 1.9013210535049438, "learning_rate": 0.0008714329392580513, "loss": 3.4664, "step": 15145 }, { "epoch": 1.0293518141051774, "grad_norm": 2.1483397483825684, "learning_rate": 0.0008713904742492187, "loss": 3.6681, "step": 15150 }, { "epoch": 1.029691534175839, "grad_norm": 1.8703644275665283, "learning_rate": 0.0008713480092403859, "loss": 3.5567, "step": 15155 }, { "epoch": 1.0300312542465009, "grad_norm": 1.7782245874404907, "learning_rate": 0.0008713055442315532, "loss": 3.6556, "step": 15160 }, { "epoch": 1.0303709743171627, "grad_norm": 1.784519076347351, "learning_rate": 0.0008712630792227206, "loss": 3.5137, "step": 15165 }, { "epoch": 1.0307106943878244, "grad_norm": 1.48995041847229, "learning_rate": 0.0008712206142138878, "loss": 3.7038, "step": 15170 }, { "epoch": 1.0310504144584862, "grad_norm": 2.2664825916290283, "learning_rate": 0.000871178149205055, "loss": 3.357, "step": 15175 }, { "epoch": 1.031390134529148, "grad_norm": 2.355320453643799, "learning_rate": 0.0008711356841962223, "loss": 3.7325, "step": 15180 }, { "epoch": 1.0317298545998097, "grad_norm": 1.6314455270767212, "learning_rate": 0.0008710932191873896, "loss": 3.599, "step": 15185 }, { "epoch": 1.0320695746704716, "grad_norm": 2.141585111618042, "learning_rate": 0.0008710507541785568, "loss": 3.72, "step": 15190 }, { "epoch": 1.0324092947411334, "grad_norm": 1.7741777896881104, "learning_rate": 0.0008710082891697242, "loss": 3.565, "step": 15195 }, { "epoch": 1.032749014811795, "grad_norm": 1.7357232570648193, "learning_rate": 0.0008709658241608915, "loss": 3.5243, "step": 15200 }, { "epoch": 1.033088734882457, "grad_norm": 2.0762720108032227, "learning_rate": 0.0008709233591520587, "loss": 3.5961, "step": 15205 }, { "epoch": 1.0334284549531185, "grad_norm": 2.4996390342712402, "learning_rate": 0.000870880894143226, "loss": 3.6297, "step": 15210 }, { "epoch": 1.0337681750237804, "grad_norm": 1.5952856540679932, "learning_rate": 0.0008708384291343932, "loss": 3.6805, "step": 15215 }, { "epoch": 1.0341078950944422, "grad_norm": 1.809901475906372, "learning_rate": 0.0008707959641255605, "loss": 3.6638, "step": 15220 }, { "epoch": 1.0344476151651039, "grad_norm": 2.638607978820801, "learning_rate": 0.0008707534991167278, "loss": 3.3801, "step": 15225 }, { "epoch": 1.0347873352357657, "grad_norm": 2.0857088565826416, "learning_rate": 0.0008707110341078951, "loss": 3.5528, "step": 15230 }, { "epoch": 1.0351270553064276, "grad_norm": 1.6545789241790771, "learning_rate": 0.0008706685690990624, "loss": 3.7786, "step": 15235 }, { "epoch": 1.0354667753770892, "grad_norm": 1.9468508958816528, "learning_rate": 0.0008706261040902297, "loss": 3.5281, "step": 15240 }, { "epoch": 1.035806495447751, "grad_norm": 2.2829411029815674, "learning_rate": 0.0008705836390813969, "loss": 3.6717, "step": 15245 }, { "epoch": 1.036146215518413, "grad_norm": 1.6794816255569458, "learning_rate": 0.0008705411740725643, "loss": 3.6235, "step": 15250 }, { "epoch": 1.0364859355890745, "grad_norm": 2.0991859436035156, "learning_rate": 0.0008704987090637315, "loss": 3.6279, "step": 15255 }, { "epoch": 1.0368256556597364, "grad_norm": 1.9176138639450073, "learning_rate": 0.0008704562440548987, "loss": 3.6579, "step": 15260 }, { "epoch": 1.0371653757303982, "grad_norm": 1.7494940757751465, "learning_rate": 0.0008704137790460662, "loss": 3.5441, "step": 15265 }, { "epoch": 1.0375050958010599, "grad_norm": 1.6687918901443481, "learning_rate": 0.0008703713140372334, "loss": 3.582, "step": 15270 }, { "epoch": 1.0378448158717217, "grad_norm": 2.347456693649292, "learning_rate": 0.0008703288490284006, "loss": 3.3869, "step": 15275 }, { "epoch": 1.0381845359423836, "grad_norm": 2.175917387008667, "learning_rate": 0.0008702863840195679, "loss": 3.6213, "step": 15280 }, { "epoch": 1.0385242560130452, "grad_norm": 1.9970771074295044, "learning_rate": 0.0008702439190107352, "loss": 3.6866, "step": 15285 }, { "epoch": 1.038863976083707, "grad_norm": 1.4655038118362427, "learning_rate": 0.0008702014540019024, "loss": 3.9259, "step": 15290 }, { "epoch": 1.0392036961543687, "grad_norm": 2.450071334838867, "learning_rate": 0.0008701589889930697, "loss": 3.5409, "step": 15295 }, { "epoch": 1.0395434162250305, "grad_norm": 2.096477746963501, "learning_rate": 0.0008701165239842371, "loss": 3.5731, "step": 15300 }, { "epoch": 1.0398831362956924, "grad_norm": 2.287121295928955, "learning_rate": 0.0008700740589754043, "loss": 3.613, "step": 15305 }, { "epoch": 1.040222856366354, "grad_norm": 2.4249353408813477, "learning_rate": 0.0008700315939665716, "loss": 3.2185, "step": 15310 }, { "epoch": 1.0405625764370159, "grad_norm": 2.1367290019989014, "learning_rate": 0.0008699891289577389, "loss": 3.6703, "step": 15315 }, { "epoch": 1.0409022965076777, "grad_norm": 2.4468767642974854, "learning_rate": 0.0008699466639489061, "loss": 3.506, "step": 15320 }, { "epoch": 1.0412420165783394, "grad_norm": 1.8760778903961182, "learning_rate": 0.0008699041989400734, "loss": 3.5118, "step": 15325 }, { "epoch": 1.0415817366490012, "grad_norm": 2.0411217212677, "learning_rate": 0.0008698617339312406, "loss": 3.5707, "step": 15330 }, { "epoch": 1.041921456719663, "grad_norm": 2.0136330127716064, "learning_rate": 0.000869819268922408, "loss": 3.6508, "step": 15335 }, { "epoch": 1.0422611767903247, "grad_norm": 1.820474624633789, "learning_rate": 0.0008697768039135753, "loss": 3.8672, "step": 15340 }, { "epoch": 1.0426008968609866, "grad_norm": 1.8654916286468506, "learning_rate": 0.0008697343389047425, "loss": 3.5876, "step": 15345 }, { "epoch": 1.0429406169316484, "grad_norm": 1.8968696594238281, "learning_rate": 0.0008696918738959098, "loss": 3.4002, "step": 15350 }, { "epoch": 1.04328033700231, "grad_norm": 2.029388904571533, "learning_rate": 0.0008696494088870771, "loss": 3.3272, "step": 15355 }, { "epoch": 1.043620057072972, "grad_norm": 1.8764584064483643, "learning_rate": 0.0008696069438782443, "loss": 3.4903, "step": 15360 }, { "epoch": 1.0439597771436337, "grad_norm": 2.4578495025634766, "learning_rate": 0.0008695644788694115, "loss": 3.2984, "step": 15365 }, { "epoch": 1.0442994972142954, "grad_norm": 2.0979020595550537, "learning_rate": 0.000869522013860579, "loss": 3.6668, "step": 15370 }, { "epoch": 1.0446392172849572, "grad_norm": 1.9527559280395508, "learning_rate": 0.0008694795488517462, "loss": 3.5358, "step": 15375 }, { "epoch": 1.0449789373556189, "grad_norm": 1.7633733749389648, "learning_rate": 0.0008694370838429134, "loss": 3.4379, "step": 15380 }, { "epoch": 1.0453186574262807, "grad_norm": 2.5454318523406982, "learning_rate": 0.0008693946188340808, "loss": 3.3903, "step": 15385 }, { "epoch": 1.0456583774969426, "grad_norm": 2.3909389972686768, "learning_rate": 0.000869352153825248, "loss": 3.5498, "step": 15390 }, { "epoch": 1.0459980975676042, "grad_norm": 1.8208327293395996, "learning_rate": 0.0008693096888164152, "loss": 3.3316, "step": 15395 }, { "epoch": 1.046337817638266, "grad_norm": 2.533268451690674, "learning_rate": 0.0008692672238075826, "loss": 3.5597, "step": 15400 }, { "epoch": 1.046677537708928, "grad_norm": 2.4828500747680664, "learning_rate": 0.0008692247587987499, "loss": 3.474, "step": 15405 }, { "epoch": 1.0470172577795895, "grad_norm": 3.240659713745117, "learning_rate": 0.0008691822937899171, "loss": 3.524, "step": 15410 }, { "epoch": 1.0473569778502514, "grad_norm": 1.857590675354004, "learning_rate": 0.0008691398287810845, "loss": 3.8186, "step": 15415 }, { "epoch": 1.0476966979209132, "grad_norm": 2.2207260131835938, "learning_rate": 0.0008690973637722517, "loss": 3.4047, "step": 15420 }, { "epoch": 1.0480364179915749, "grad_norm": 1.586639404296875, "learning_rate": 0.0008690548987634189, "loss": 3.4785, "step": 15425 }, { "epoch": 1.0483761380622367, "grad_norm": 2.046123504638672, "learning_rate": 0.0008690124337545862, "loss": 3.3664, "step": 15430 }, { "epoch": 1.0487158581328986, "grad_norm": 1.8126897811889648, "learning_rate": 0.0008689699687457535, "loss": 3.5381, "step": 15435 }, { "epoch": 1.0490555782035602, "grad_norm": 1.416024923324585, "learning_rate": 0.0008689275037369208, "loss": 3.4921, "step": 15440 }, { "epoch": 1.049395298274222, "grad_norm": 2.1456315517425537, "learning_rate": 0.0008688850387280881, "loss": 3.3959, "step": 15445 }, { "epoch": 1.049735018344884, "grad_norm": 1.827132225036621, "learning_rate": 0.0008688425737192554, "loss": 3.7361, "step": 15450 }, { "epoch": 1.0500747384155455, "grad_norm": 2.0488195419311523, "learning_rate": 0.0008688001087104226, "loss": 3.5727, "step": 15455 }, { "epoch": 1.0504144584862074, "grad_norm": 2.3981051445007324, "learning_rate": 0.0008687576437015899, "loss": 3.3932, "step": 15460 }, { "epoch": 1.050754178556869, "grad_norm": 2.026653289794922, "learning_rate": 0.0008687151786927571, "loss": 3.7218, "step": 15465 }, { "epoch": 1.0510938986275309, "grad_norm": 1.9881700277328491, "learning_rate": 0.0008686727136839244, "loss": 3.6331, "step": 15470 }, { "epoch": 1.0514336186981927, "grad_norm": 1.9678088426589966, "learning_rate": 0.0008686302486750918, "loss": 3.4781, "step": 15475 }, { "epoch": 1.0517733387688544, "grad_norm": 1.9893254041671753, "learning_rate": 0.000868587783666259, "loss": 3.5866, "step": 15480 }, { "epoch": 1.0521130588395162, "grad_norm": 1.9281665086746216, "learning_rate": 0.0008685453186574263, "loss": 3.7246, "step": 15485 }, { "epoch": 1.052452778910178, "grad_norm": 1.5418881177902222, "learning_rate": 0.0008685028536485936, "loss": 3.6757, "step": 15490 }, { "epoch": 1.0527924989808397, "grad_norm": 2.2038962841033936, "learning_rate": 0.0008684603886397608, "loss": 3.4261, "step": 15495 }, { "epoch": 1.0531322190515016, "grad_norm": 1.754628300666809, "learning_rate": 0.000868417923630928, "loss": 3.4025, "step": 15500 }, { "epoch": 1.0534719391221634, "grad_norm": 2.4018056392669678, "learning_rate": 0.0008683754586220954, "loss": 3.5933, "step": 15505 }, { "epoch": 1.053811659192825, "grad_norm": 1.7070473432540894, "learning_rate": 0.0008683329936132627, "loss": 3.7592, "step": 15510 }, { "epoch": 1.054151379263487, "grad_norm": 2.0088589191436768, "learning_rate": 0.0008682905286044299, "loss": 3.4735, "step": 15515 }, { "epoch": 1.0544910993341488, "grad_norm": 2.0332865715026855, "learning_rate": 0.0008682480635955973, "loss": 3.7883, "step": 15520 }, { "epoch": 1.0548308194048104, "grad_norm": 2.103991985321045, "learning_rate": 0.0008682055985867645, "loss": 3.4735, "step": 15525 }, { "epoch": 1.0551705394754722, "grad_norm": 1.6045656204223633, "learning_rate": 0.0008681631335779317, "loss": 3.4547, "step": 15530 }, { "epoch": 1.055510259546134, "grad_norm": 2.5759077072143555, "learning_rate": 0.0008681206685690991, "loss": 3.7696, "step": 15535 }, { "epoch": 1.0558499796167957, "grad_norm": 2.1601669788360596, "learning_rate": 0.0008680782035602663, "loss": 3.6595, "step": 15540 }, { "epoch": 1.0561896996874576, "grad_norm": 1.4498013257980347, "learning_rate": 0.0008680357385514336, "loss": 3.7154, "step": 15545 }, { "epoch": 1.0565294197581192, "grad_norm": 1.7745308876037598, "learning_rate": 0.000867993273542601, "loss": 3.5854, "step": 15550 }, { "epoch": 1.056869139828781, "grad_norm": 1.8627346754074097, "learning_rate": 0.0008679508085337682, "loss": 3.4384, "step": 15555 }, { "epoch": 1.057208859899443, "grad_norm": 2.3237106800079346, "learning_rate": 0.0008679083435249354, "loss": 3.2486, "step": 15560 }, { "epoch": 1.0575485799701045, "grad_norm": 2.271146059036255, "learning_rate": 0.0008678658785161027, "loss": 3.7041, "step": 15565 }, { "epoch": 1.0578883000407664, "grad_norm": 2.633436441421509, "learning_rate": 0.00086782341350727, "loss": 3.6272, "step": 15570 }, { "epoch": 1.0582280201114282, "grad_norm": 2.3100695610046387, "learning_rate": 0.0008677809484984372, "loss": 3.4221, "step": 15575 }, { "epoch": 1.0585677401820899, "grad_norm": 1.8984709978103638, "learning_rate": 0.0008677384834896046, "loss": 3.5768, "step": 15580 }, { "epoch": 1.0589074602527517, "grad_norm": 2.0379981994628906, "learning_rate": 0.0008676960184807719, "loss": 3.559, "step": 15585 }, { "epoch": 1.0592471803234136, "grad_norm": 1.6773051023483276, "learning_rate": 0.0008676535534719392, "loss": 3.6958, "step": 15590 }, { "epoch": 1.0595869003940752, "grad_norm": 2.0571136474609375, "learning_rate": 0.0008676110884631064, "loss": 3.637, "step": 15595 }, { "epoch": 1.059926620464737, "grad_norm": 1.3990516662597656, "learning_rate": 0.0008675686234542737, "loss": 3.6613, "step": 15600 }, { "epoch": 1.060266340535399, "grad_norm": 1.8167678117752075, "learning_rate": 0.000867526158445441, "loss": 3.4752, "step": 15605 }, { "epoch": 1.0606060606060606, "grad_norm": 2.368833541870117, "learning_rate": 0.0008674836934366083, "loss": 3.6369, "step": 15610 }, { "epoch": 1.0609457806767224, "grad_norm": 2.1944382190704346, "learning_rate": 0.0008674412284277755, "loss": 3.5315, "step": 15615 }, { "epoch": 1.0612855007473843, "grad_norm": 1.6389080286026, "learning_rate": 0.0008673987634189429, "loss": 3.8969, "step": 15620 }, { "epoch": 1.061625220818046, "grad_norm": 1.9517157077789307, "learning_rate": 0.0008673562984101101, "loss": 3.6466, "step": 15625 }, { "epoch": 1.0619649408887077, "grad_norm": 1.5386991500854492, "learning_rate": 0.0008673138334012773, "loss": 3.5417, "step": 15630 }, { "epoch": 1.0623046609593694, "grad_norm": 2.0921502113342285, "learning_rate": 0.0008672713683924447, "loss": 3.7859, "step": 15635 }, { "epoch": 1.0626443810300312, "grad_norm": 2.4617464542388916, "learning_rate": 0.0008672289033836119, "loss": 3.4978, "step": 15640 }, { "epoch": 1.062984101100693, "grad_norm": 1.884922981262207, "learning_rate": 0.0008671864383747792, "loss": 3.6694, "step": 15645 }, { "epoch": 1.0633238211713547, "grad_norm": 1.8664369583129883, "learning_rate": 0.0008671439733659466, "loss": 3.5231, "step": 15650 }, { "epoch": 1.0636635412420166, "grad_norm": 2.418272018432617, "learning_rate": 0.0008671015083571138, "loss": 3.6491, "step": 15655 }, { "epoch": 1.0640032613126784, "grad_norm": 2.553699254989624, "learning_rate": 0.000867059043348281, "loss": 3.564, "step": 15660 }, { "epoch": 1.06434298138334, "grad_norm": 1.6845943927764893, "learning_rate": 0.0008670165783394483, "loss": 3.4714, "step": 15665 }, { "epoch": 1.064682701454002, "grad_norm": 1.6850422620773315, "learning_rate": 0.0008669741133306156, "loss": 3.3928, "step": 15670 }, { "epoch": 1.0650224215246638, "grad_norm": 2.9347074031829834, "learning_rate": 0.0008669316483217828, "loss": 3.4863, "step": 15675 }, { "epoch": 1.0653621415953254, "grad_norm": 2.0347790718078613, "learning_rate": 0.0008668891833129502, "loss": 3.7232, "step": 15680 }, { "epoch": 1.0657018616659872, "grad_norm": 2.2526915073394775, "learning_rate": 0.0008668467183041175, "loss": 3.492, "step": 15685 }, { "epoch": 1.066041581736649, "grad_norm": 1.7703250646591187, "learning_rate": 0.0008668042532952847, "loss": 3.6226, "step": 15690 }, { "epoch": 1.0663813018073107, "grad_norm": 2.1231343746185303, "learning_rate": 0.000866761788286452, "loss": 3.5949, "step": 15695 }, { "epoch": 1.0667210218779726, "grad_norm": 2.006420612335205, "learning_rate": 0.0008667193232776193, "loss": 3.5912, "step": 15700 }, { "epoch": 1.0670607419486344, "grad_norm": 1.9479620456695557, "learning_rate": 0.0008666768582687865, "loss": 3.2354, "step": 15705 }, { "epoch": 1.067400462019296, "grad_norm": 1.7835853099822998, "learning_rate": 0.0008666343932599538, "loss": 3.5438, "step": 15710 }, { "epoch": 1.067740182089958, "grad_norm": 2.339121103286743, "learning_rate": 0.0008665919282511211, "loss": 3.5416, "step": 15715 }, { "epoch": 1.0680799021606195, "grad_norm": 1.9069892168045044, "learning_rate": 0.0008665494632422884, "loss": 3.5035, "step": 15720 }, { "epoch": 1.0684196222312814, "grad_norm": 2.2043237686157227, "learning_rate": 0.0008665069982334557, "loss": 3.5178, "step": 15725 }, { "epoch": 1.0687593423019432, "grad_norm": 2.0666072368621826, "learning_rate": 0.0008664645332246229, "loss": 3.549, "step": 15730 }, { "epoch": 1.0690990623726049, "grad_norm": 1.8315895795822144, "learning_rate": 0.0008664220682157902, "loss": 3.6859, "step": 15735 }, { "epoch": 1.0694387824432667, "grad_norm": 1.8976649045944214, "learning_rate": 0.0008663796032069575, "loss": 3.6534, "step": 15740 }, { "epoch": 1.0697785025139286, "grad_norm": 1.844499111175537, "learning_rate": 0.0008663371381981247, "loss": 3.443, "step": 15745 }, { "epoch": 1.0701182225845902, "grad_norm": 1.7392860651016235, "learning_rate": 0.0008662946731892921, "loss": 3.264, "step": 15750 }, { "epoch": 1.070457942655252, "grad_norm": 2.1205906867980957, "learning_rate": 0.0008662522081804594, "loss": 3.9139, "step": 15755 }, { "epoch": 1.070797662725914, "grad_norm": 2.1438543796539307, "learning_rate": 0.0008662097431716266, "loss": 3.5752, "step": 15760 }, { "epoch": 1.0711373827965756, "grad_norm": 1.8642868995666504, "learning_rate": 0.0008661672781627938, "loss": 3.7175, "step": 15765 }, { "epoch": 1.0714771028672374, "grad_norm": 2.0892772674560547, "learning_rate": 0.0008661248131539612, "loss": 3.3044, "step": 15770 }, { "epoch": 1.0718168229378993, "grad_norm": 2.0792412757873535, "learning_rate": 0.0008660823481451284, "loss": 3.5736, "step": 15775 }, { "epoch": 1.072156543008561, "grad_norm": 2.2761149406433105, "learning_rate": 0.0008660398831362956, "loss": 3.6444, "step": 15780 }, { "epoch": 1.0724962630792227, "grad_norm": 1.9193285703659058, "learning_rate": 0.0008659974181274631, "loss": 3.3575, "step": 15785 }, { "epoch": 1.0728359831498846, "grad_norm": 2.0862627029418945, "learning_rate": 0.0008659549531186303, "loss": 3.6009, "step": 15790 }, { "epoch": 1.0731757032205462, "grad_norm": 1.9217522144317627, "learning_rate": 0.0008659124881097975, "loss": 3.7689, "step": 15795 }, { "epoch": 1.073515423291208, "grad_norm": 2.4093847274780273, "learning_rate": 0.0008658700231009649, "loss": 3.601, "step": 15800 }, { "epoch": 1.07385514336187, "grad_norm": 1.8487608432769775, "learning_rate": 0.0008658275580921321, "loss": 3.6824, "step": 15805 }, { "epoch": 1.0741948634325316, "grad_norm": 1.6492232084274292, "learning_rate": 0.0008657850930832993, "loss": 3.4888, "step": 15810 }, { "epoch": 1.0745345835031934, "grad_norm": 2.0533668994903564, "learning_rate": 0.0008657426280744666, "loss": 3.6855, "step": 15815 }, { "epoch": 1.074874303573855, "grad_norm": 1.6125171184539795, "learning_rate": 0.000865700163065634, "loss": 3.5687, "step": 15820 }, { "epoch": 1.075214023644517, "grad_norm": 2.028865098953247, "learning_rate": 0.0008656576980568012, "loss": 3.6854, "step": 15825 }, { "epoch": 1.0755537437151788, "grad_norm": 1.7530596256256104, "learning_rate": 0.0008656152330479685, "loss": 3.6495, "step": 15830 }, { "epoch": 1.0758934637858404, "grad_norm": 1.9018452167510986, "learning_rate": 0.0008655727680391358, "loss": 3.2926, "step": 15835 }, { "epoch": 1.0762331838565022, "grad_norm": 1.9561002254486084, "learning_rate": 0.000865530303030303, "loss": 3.5591, "step": 15840 }, { "epoch": 1.076572903927164, "grad_norm": 2.1373164653778076, "learning_rate": 0.0008654878380214703, "loss": 3.182, "step": 15845 }, { "epoch": 1.0769126239978257, "grad_norm": 1.8760206699371338, "learning_rate": 0.0008654453730126375, "loss": 3.3009, "step": 15850 }, { "epoch": 1.0772523440684876, "grad_norm": 1.378880500793457, "learning_rate": 0.0008654029080038049, "loss": 3.5605, "step": 15855 }, { "epoch": 1.0775920641391494, "grad_norm": 2.3526089191436768, "learning_rate": 0.0008653604429949722, "loss": 3.7934, "step": 15860 }, { "epoch": 1.077931784209811, "grad_norm": 2.0193917751312256, "learning_rate": 0.0008653179779861394, "loss": 3.4234, "step": 15865 }, { "epoch": 1.078271504280473, "grad_norm": 1.8427447080612183, "learning_rate": 0.0008652755129773067, "loss": 3.6185, "step": 15870 }, { "epoch": 1.0786112243511348, "grad_norm": 1.835538387298584, "learning_rate": 0.000865233047968474, "loss": 3.4581, "step": 15875 }, { "epoch": 1.0789509444217964, "grad_norm": 2.565955400466919, "learning_rate": 0.0008651905829596412, "loss": 3.5414, "step": 15880 }, { "epoch": 1.0792906644924583, "grad_norm": 1.865757703781128, "learning_rate": 0.0008651481179508085, "loss": 3.6417, "step": 15885 }, { "epoch": 1.0796303845631199, "grad_norm": 1.5222692489624023, "learning_rate": 0.0008651056529419759, "loss": 3.4502, "step": 15890 }, { "epoch": 1.0799701046337817, "grad_norm": 2.07380747795105, "learning_rate": 0.0008650631879331431, "loss": 3.5609, "step": 15895 }, { "epoch": 1.0803098247044436, "grad_norm": 2.2390685081481934, "learning_rate": 0.0008650207229243103, "loss": 3.2208, "step": 15900 }, { "epoch": 1.0806495447751052, "grad_norm": 1.8292839527130127, "learning_rate": 0.0008649782579154777, "loss": 3.7142, "step": 15905 }, { "epoch": 1.080989264845767, "grad_norm": 2.2788493633270264, "learning_rate": 0.0008649357929066449, "loss": 3.8378, "step": 15910 }, { "epoch": 1.081328984916429, "grad_norm": 1.9648842811584473, "learning_rate": 0.0008648933278978121, "loss": 3.6226, "step": 15915 }, { "epoch": 1.0816687049870906, "grad_norm": 2.8702352046966553, "learning_rate": 0.0008648508628889795, "loss": 3.5583, "step": 15920 }, { "epoch": 1.0820084250577524, "grad_norm": 1.9765013456344604, "learning_rate": 0.0008648083978801468, "loss": 3.494, "step": 15925 }, { "epoch": 1.0823481451284143, "grad_norm": 1.7596725225448608, "learning_rate": 0.0008647659328713141, "loss": 3.5749, "step": 15930 }, { "epoch": 1.082687865199076, "grad_norm": 1.8924535512924194, "learning_rate": 0.0008647234678624814, "loss": 3.4861, "step": 15935 }, { "epoch": 1.0830275852697377, "grad_norm": 2.6077306270599365, "learning_rate": 0.0008646810028536486, "loss": 3.6566, "step": 15940 }, { "epoch": 1.0833673053403996, "grad_norm": 1.928897500038147, "learning_rate": 0.0008646385378448159, "loss": 3.6148, "step": 15945 }, { "epoch": 1.0837070254110612, "grad_norm": 2.0044965744018555, "learning_rate": 0.0008645960728359831, "loss": 3.4335, "step": 15950 }, { "epoch": 1.084046745481723, "grad_norm": 1.9666606187820435, "learning_rate": 0.0008645536078271504, "loss": 3.6707, "step": 15955 }, { "epoch": 1.084386465552385, "grad_norm": 1.9615682363510132, "learning_rate": 0.0008645111428183178, "loss": 3.4904, "step": 15960 }, { "epoch": 1.0847261856230466, "grad_norm": 2.003560781478882, "learning_rate": 0.000864468677809485, "loss": 3.68, "step": 15965 }, { "epoch": 1.0850659056937084, "grad_norm": 1.61203932762146, "learning_rate": 0.0008644262128006523, "loss": 3.5265, "step": 15970 }, { "epoch": 1.0854056257643703, "grad_norm": 1.8621315956115723, "learning_rate": 0.0008643837477918196, "loss": 3.7975, "step": 15975 }, { "epoch": 1.085745345835032, "grad_norm": 1.8880733251571655, "learning_rate": 0.0008643412827829868, "loss": 3.7325, "step": 15980 }, { "epoch": 1.0860850659056938, "grad_norm": 2.024794578552246, "learning_rate": 0.0008642988177741541, "loss": 3.5924, "step": 15985 }, { "epoch": 1.0864247859763554, "grad_norm": 1.8948320150375366, "learning_rate": 0.0008642563527653214, "loss": 3.4822, "step": 15990 }, { "epoch": 1.0867645060470172, "grad_norm": 1.9864780902862549, "learning_rate": 0.0008642138877564887, "loss": 3.6084, "step": 15995 }, { "epoch": 1.087104226117679, "grad_norm": 2.3227431774139404, "learning_rate": 0.000864171422747656, "loss": 3.6918, "step": 16000 }, { "epoch": 1.0874439461883407, "grad_norm": 2.0684146881103516, "learning_rate": 0.0008641289577388233, "loss": 3.655, "step": 16005 }, { "epoch": 1.0877836662590026, "grad_norm": 1.9417333602905273, "learning_rate": 0.0008640864927299905, "loss": 3.8362, "step": 16010 }, { "epoch": 1.0881233863296644, "grad_norm": 1.4274019002914429, "learning_rate": 0.0008640440277211577, "loss": 3.5103, "step": 16015 }, { "epoch": 1.088463106400326, "grad_norm": 1.58189857006073, "learning_rate": 0.0008640015627123251, "loss": 3.5067, "step": 16020 }, { "epoch": 1.088802826470988, "grad_norm": 1.7206321954727173, "learning_rate": 0.0008639590977034923, "loss": 3.5185, "step": 16025 }, { "epoch": 1.0891425465416498, "grad_norm": 1.9601035118103027, "learning_rate": 0.0008639166326946596, "loss": 3.5603, "step": 16030 }, { "epoch": 1.0894822666123114, "grad_norm": 1.9219088554382324, "learning_rate": 0.000863874167685827, "loss": 3.422, "step": 16035 }, { "epoch": 1.0898219866829733, "grad_norm": 1.971696138381958, "learning_rate": 0.0008638317026769942, "loss": 3.3615, "step": 16040 }, { "epoch": 1.090161706753635, "grad_norm": 1.9799219369888306, "learning_rate": 0.0008637892376681614, "loss": 3.372, "step": 16045 }, { "epoch": 1.0905014268242967, "grad_norm": 1.4053491353988647, "learning_rate": 0.0008637467726593288, "loss": 3.8126, "step": 16050 }, { "epoch": 1.0908411468949586, "grad_norm": 1.4465314149856567, "learning_rate": 0.000863704307650496, "loss": 3.5412, "step": 16055 }, { "epoch": 1.0911808669656202, "grad_norm": 2.14005184173584, "learning_rate": 0.0008636618426416632, "loss": 3.3308, "step": 16060 }, { "epoch": 1.091520587036282, "grad_norm": 2.1572248935699463, "learning_rate": 0.0008636193776328306, "loss": 3.586, "step": 16065 }, { "epoch": 1.091860307106944, "grad_norm": 2.4956021308898926, "learning_rate": 0.0008635769126239979, "loss": 3.5507, "step": 16070 }, { "epoch": 1.0922000271776056, "grad_norm": 1.8780879974365234, "learning_rate": 0.0008635344476151651, "loss": 3.594, "step": 16075 }, { "epoch": 1.0925397472482674, "grad_norm": 2.074657440185547, "learning_rate": 0.0008634919826063324, "loss": 3.6859, "step": 16080 }, { "epoch": 1.0928794673189293, "grad_norm": 2.0733330249786377, "learning_rate": 0.0008634495175974997, "loss": 3.5097, "step": 16085 }, { "epoch": 1.093219187389591, "grad_norm": 2.1306140422821045, "learning_rate": 0.0008634070525886669, "loss": 3.3473, "step": 16090 }, { "epoch": 1.0935589074602527, "grad_norm": 2.308112382888794, "learning_rate": 0.0008633645875798342, "loss": 3.7759, "step": 16095 }, { "epoch": 1.0938986275309146, "grad_norm": 1.8661099672317505, "learning_rate": 0.0008633221225710016, "loss": 3.6552, "step": 16100 }, { "epoch": 1.0942383476015762, "grad_norm": 2.15556001663208, "learning_rate": 0.0008632796575621688, "loss": 3.4828, "step": 16105 }, { "epoch": 1.094578067672238, "grad_norm": 2.203812837600708, "learning_rate": 0.0008632371925533361, "loss": 3.5589, "step": 16110 }, { "epoch": 1.0949177877429, "grad_norm": 1.9986944198608398, "learning_rate": 0.0008631947275445033, "loss": 3.6535, "step": 16115 }, { "epoch": 1.0952575078135616, "grad_norm": 1.745473861694336, "learning_rate": 0.0008631522625356706, "loss": 3.6383, "step": 16120 }, { "epoch": 1.0955972278842234, "grad_norm": 2.56156587600708, "learning_rate": 0.0008631097975268379, "loss": 3.5287, "step": 16125 }, { "epoch": 1.0959369479548853, "grad_norm": 1.7441692352294922, "learning_rate": 0.0008630673325180051, "loss": 3.7815, "step": 16130 }, { "epoch": 1.096276668025547, "grad_norm": 2.026888847351074, "learning_rate": 0.0008630248675091725, "loss": 3.3615, "step": 16135 }, { "epoch": 1.0966163880962088, "grad_norm": 1.5127131938934326, "learning_rate": 0.0008629824025003398, "loss": 3.6671, "step": 16140 }, { "epoch": 1.0969561081668706, "grad_norm": 1.7027591466903687, "learning_rate": 0.000862939937491507, "loss": 3.4789, "step": 16145 }, { "epoch": 1.0972958282375322, "grad_norm": 1.687286376953125, "learning_rate": 0.0008628974724826742, "loss": 3.5572, "step": 16150 }, { "epoch": 1.097635548308194, "grad_norm": 1.882132887840271, "learning_rate": 0.0008628550074738416, "loss": 3.5602, "step": 16155 }, { "epoch": 1.0979752683788557, "grad_norm": 2.09370756149292, "learning_rate": 0.0008628125424650088, "loss": 3.4978, "step": 16160 }, { "epoch": 1.0983149884495176, "grad_norm": 2.3138935565948486, "learning_rate": 0.000862770077456176, "loss": 3.4771, "step": 16165 }, { "epoch": 1.0986547085201794, "grad_norm": 2.2920382022857666, "learning_rate": 0.0008627276124473435, "loss": 3.5249, "step": 16170 }, { "epoch": 1.098994428590841, "grad_norm": 2.384439706802368, "learning_rate": 0.0008626851474385107, "loss": 3.648, "step": 16175 }, { "epoch": 1.099334148661503, "grad_norm": 2.173184633255005, "learning_rate": 0.0008626426824296779, "loss": 3.5159, "step": 16180 }, { "epoch": 1.0996738687321648, "grad_norm": 1.5977014303207397, "learning_rate": 0.0008626002174208453, "loss": 3.6086, "step": 16185 }, { "epoch": 1.1000135888028264, "grad_norm": 2.18552827835083, "learning_rate": 0.0008625577524120125, "loss": 3.2639, "step": 16190 }, { "epoch": 1.1003533088734883, "grad_norm": 2.2805747985839844, "learning_rate": 0.0008625152874031797, "loss": 3.619, "step": 16195 }, { "epoch": 1.10069302894415, "grad_norm": 1.8263763189315796, "learning_rate": 0.0008624728223943472, "loss": 3.5204, "step": 16200 }, { "epoch": 1.1010327490148117, "grad_norm": 1.6699109077453613, "learning_rate": 0.0008624303573855144, "loss": 3.6139, "step": 16205 }, { "epoch": 1.1013724690854736, "grad_norm": 1.7257508039474487, "learning_rate": 0.0008623878923766816, "loss": 3.3538, "step": 16210 }, { "epoch": 1.1017121891561354, "grad_norm": 1.5938912630081177, "learning_rate": 0.0008623454273678489, "loss": 3.3287, "step": 16215 }, { "epoch": 1.102051909226797, "grad_norm": 1.5540988445281982, "learning_rate": 0.0008623029623590162, "loss": 3.5213, "step": 16220 }, { "epoch": 1.102391629297459, "grad_norm": 1.9862416982650757, "learning_rate": 0.0008622604973501834, "loss": 3.7443, "step": 16225 }, { "epoch": 1.1027313493681206, "grad_norm": 2.1623878479003906, "learning_rate": 0.0008622180323413507, "loss": 3.6252, "step": 16230 }, { "epoch": 1.1030710694387824, "grad_norm": 2.3364694118499756, "learning_rate": 0.0008621755673325181, "loss": 3.6784, "step": 16235 }, { "epoch": 1.1034107895094443, "grad_norm": 1.5754084587097168, "learning_rate": 0.0008621331023236853, "loss": 3.7019, "step": 16240 }, { "epoch": 1.103750509580106, "grad_norm": 1.8303090333938599, "learning_rate": 0.0008620906373148526, "loss": 3.7127, "step": 16245 }, { "epoch": 1.1040902296507678, "grad_norm": 1.9630625247955322, "learning_rate": 0.0008620481723060198, "loss": 3.6085, "step": 16250 }, { "epoch": 1.1044299497214296, "grad_norm": 1.8564234972000122, "learning_rate": 0.0008620057072971871, "loss": 3.5842, "step": 16255 }, { "epoch": 1.1047696697920912, "grad_norm": 1.7101085186004639, "learning_rate": 0.0008619632422883544, "loss": 3.3995, "step": 16260 }, { "epoch": 1.105109389862753, "grad_norm": 2.180288791656494, "learning_rate": 0.0008619207772795216, "loss": 3.4662, "step": 16265 }, { "epoch": 1.105449109933415, "grad_norm": 2.4715542793273926, "learning_rate": 0.0008618783122706891, "loss": 3.3959, "step": 16270 }, { "epoch": 1.1057888300040766, "grad_norm": 1.930604338645935, "learning_rate": 0.0008618358472618563, "loss": 3.7626, "step": 16275 }, { "epoch": 1.1061285500747384, "grad_norm": 2.1035962104797363, "learning_rate": 0.0008617933822530235, "loss": 3.5331, "step": 16280 }, { "epoch": 1.1064682701454003, "grad_norm": 1.9946281909942627, "learning_rate": 0.0008617509172441909, "loss": 3.5328, "step": 16285 }, { "epoch": 1.106807990216062, "grad_norm": 1.9954321384429932, "learning_rate": 0.0008617084522353581, "loss": 3.5246, "step": 16290 }, { "epoch": 1.1071477102867238, "grad_norm": 2.182764768600464, "learning_rate": 0.0008616659872265253, "loss": 3.3154, "step": 16295 }, { "epoch": 1.1074874303573856, "grad_norm": 3.4203033447265625, "learning_rate": 0.0008616235222176926, "loss": 3.2834, "step": 16300 }, { "epoch": 1.1078271504280472, "grad_norm": 1.563283085823059, "learning_rate": 0.00086158105720886, "loss": 3.4776, "step": 16305 }, { "epoch": 1.108166870498709, "grad_norm": 1.7186145782470703, "learning_rate": 0.0008615385922000272, "loss": 3.6808, "step": 16310 }, { "epoch": 1.108506590569371, "grad_norm": 1.9685771465301514, "learning_rate": 0.0008614961271911945, "loss": 3.3539, "step": 16315 }, { "epoch": 1.1088463106400326, "grad_norm": 1.9809551239013672, "learning_rate": 0.0008614536621823618, "loss": 3.6892, "step": 16320 }, { "epoch": 1.1091860307106944, "grad_norm": 1.69011652469635, "learning_rate": 0.000861411197173529, "loss": 3.5759, "step": 16325 }, { "epoch": 1.109525750781356, "grad_norm": 2.4004383087158203, "learning_rate": 0.0008613687321646963, "loss": 3.4646, "step": 16330 }, { "epoch": 1.109865470852018, "grad_norm": 1.599149227142334, "learning_rate": 0.0008613262671558636, "loss": 3.711, "step": 16335 }, { "epoch": 1.1102051909226798, "grad_norm": 1.6666784286499023, "learning_rate": 0.0008612838021470309, "loss": 3.4258, "step": 16340 }, { "epoch": 1.1105449109933414, "grad_norm": 2.0106520652770996, "learning_rate": 0.0008612413371381982, "loss": 3.7733, "step": 16345 }, { "epoch": 1.1108846310640033, "grad_norm": 1.5830994844436646, "learning_rate": 0.0008611988721293654, "loss": 3.5345, "step": 16350 }, { "epoch": 1.1112243511346651, "grad_norm": 2.430173873901367, "learning_rate": 0.0008611564071205327, "loss": 3.6077, "step": 16355 }, { "epoch": 1.1115640712053267, "grad_norm": 1.9125194549560547, "learning_rate": 0.0008611139421117, "loss": 3.3394, "step": 16360 }, { "epoch": 1.1119037912759886, "grad_norm": 1.6719861030578613, "learning_rate": 0.0008610714771028672, "loss": 3.5228, "step": 16365 }, { "epoch": 1.1122435113466504, "grad_norm": 1.8944880962371826, "learning_rate": 0.0008610290120940345, "loss": 3.5292, "step": 16370 }, { "epoch": 1.112583231417312, "grad_norm": 1.903696060180664, "learning_rate": 0.0008609865470852019, "loss": 3.2834, "step": 16375 }, { "epoch": 1.112922951487974, "grad_norm": 2.075456380844116, "learning_rate": 0.0008609440820763691, "loss": 3.6886, "step": 16380 }, { "epoch": 1.1132626715586358, "grad_norm": 1.8382105827331543, "learning_rate": 0.0008609016170675364, "loss": 3.7402, "step": 16385 }, { "epoch": 1.1136023916292974, "grad_norm": 1.777482032775879, "learning_rate": 0.0008608591520587037, "loss": 3.8095, "step": 16390 }, { "epoch": 1.1139421116999593, "grad_norm": 1.6784884929656982, "learning_rate": 0.0008608166870498709, "loss": 3.7768, "step": 16395 }, { "epoch": 1.114281831770621, "grad_norm": 1.8853839635849, "learning_rate": 0.0008607742220410381, "loss": 3.5072, "step": 16400 }, { "epoch": 1.1146215518412828, "grad_norm": 2.181187391281128, "learning_rate": 0.0008607317570322055, "loss": 3.4927, "step": 16405 }, { "epoch": 1.1149612719119446, "grad_norm": 1.8721954822540283, "learning_rate": 0.0008606892920233728, "loss": 3.6746, "step": 16410 }, { "epoch": 1.1153009919826062, "grad_norm": 2.2893168926239014, "learning_rate": 0.00086064682701454, "loss": 3.568, "step": 16415 }, { "epoch": 1.115640712053268, "grad_norm": 2.0850021839141846, "learning_rate": 0.0008606043620057074, "loss": 3.5819, "step": 16420 }, { "epoch": 1.11598043212393, "grad_norm": 1.6973708868026733, "learning_rate": 0.0008605618969968746, "loss": 3.3824, "step": 16425 }, { "epoch": 1.1163201521945916, "grad_norm": 2.182859182357788, "learning_rate": 0.0008605194319880418, "loss": 3.7125, "step": 16430 }, { "epoch": 1.1166598722652534, "grad_norm": 2.513636350631714, "learning_rate": 0.0008604769669792092, "loss": 3.5123, "step": 16435 }, { "epoch": 1.1169995923359153, "grad_norm": 2.169243335723877, "learning_rate": 0.0008604345019703764, "loss": 3.5257, "step": 16440 }, { "epoch": 1.117339312406577, "grad_norm": 2.121238946914673, "learning_rate": 0.0008603920369615437, "loss": 3.5184, "step": 16445 }, { "epoch": 1.1176790324772388, "grad_norm": 2.155561923980713, "learning_rate": 0.000860349571952711, "loss": 3.8877, "step": 16450 }, { "epoch": 1.1180187525479006, "grad_norm": 1.8846555948257446, "learning_rate": 0.0008603071069438783, "loss": 3.5627, "step": 16455 }, { "epoch": 1.1183584726185622, "grad_norm": 2.8350913524627686, "learning_rate": 0.0008602646419350455, "loss": 3.5555, "step": 16460 }, { "epoch": 1.118698192689224, "grad_norm": 2.0280234813690186, "learning_rate": 0.0008602221769262128, "loss": 3.7769, "step": 16465 }, { "epoch": 1.119037912759886, "grad_norm": 2.6908392906188965, "learning_rate": 0.0008601797119173801, "loss": 3.3798, "step": 16470 }, { "epoch": 1.1193776328305476, "grad_norm": 1.4951399564743042, "learning_rate": 0.0008601372469085473, "loss": 3.5839, "step": 16475 }, { "epoch": 1.1197173529012094, "grad_norm": 1.8314279317855835, "learning_rate": 0.0008600947818997147, "loss": 3.6359, "step": 16480 }, { "epoch": 1.1200570729718713, "grad_norm": 1.9490203857421875, "learning_rate": 0.000860052316890882, "loss": 3.5518, "step": 16485 }, { "epoch": 1.120396793042533, "grad_norm": 1.915137529373169, "learning_rate": 0.0008600098518820492, "loss": 3.6035, "step": 16490 }, { "epoch": 1.1207365131131948, "grad_norm": 1.7036677598953247, "learning_rate": 0.0008599673868732165, "loss": 3.5336, "step": 16495 }, { "epoch": 1.1210762331838564, "grad_norm": 1.924709677696228, "learning_rate": 0.0008599249218643837, "loss": 3.6324, "step": 16500 }, { "epoch": 1.1214159532545183, "grad_norm": 1.7602657079696655, "learning_rate": 0.000859882456855551, "loss": 3.5244, "step": 16505 }, { "epoch": 1.1217556733251801, "grad_norm": 1.796631097793579, "learning_rate": 0.0008598399918467183, "loss": 3.5029, "step": 16510 }, { "epoch": 1.1220953933958417, "grad_norm": 1.7954131364822388, "learning_rate": 0.0008597975268378856, "loss": 3.5565, "step": 16515 }, { "epoch": 1.1224351134665036, "grad_norm": 1.9589929580688477, "learning_rate": 0.0008597550618290529, "loss": 3.6229, "step": 16520 }, { "epoch": 1.1227748335371655, "grad_norm": 1.8552443981170654, "learning_rate": 0.0008597125968202202, "loss": 3.3709, "step": 16525 }, { "epoch": 1.123114553607827, "grad_norm": 1.7866418361663818, "learning_rate": 0.0008596701318113874, "loss": 3.5484, "step": 16530 }, { "epoch": 1.123454273678489, "grad_norm": 1.8245631456375122, "learning_rate": 0.0008596276668025546, "loss": 3.473, "step": 16535 }, { "epoch": 1.1237939937491508, "grad_norm": 1.937386155128479, "learning_rate": 0.000859585201793722, "loss": 3.7153, "step": 16540 }, { "epoch": 1.1241337138198124, "grad_norm": 1.9402655363082886, "learning_rate": 0.0008595427367848892, "loss": 3.5753, "step": 16545 }, { "epoch": 1.1244734338904743, "grad_norm": 2.139143943786621, "learning_rate": 0.0008595002717760565, "loss": 3.558, "step": 16550 }, { "epoch": 1.1248131539611361, "grad_norm": 2.1117031574249268, "learning_rate": 0.0008594578067672239, "loss": 3.5194, "step": 16555 }, { "epoch": 1.1251528740317978, "grad_norm": 1.9341312646865845, "learning_rate": 0.0008594153417583911, "loss": 3.5998, "step": 16560 }, { "epoch": 1.1254925941024596, "grad_norm": 2.2304937839508057, "learning_rate": 0.0008593728767495583, "loss": 3.6777, "step": 16565 }, { "epoch": 1.1258323141731212, "grad_norm": 2.0624313354492188, "learning_rate": 0.0008593304117407257, "loss": 3.5698, "step": 16570 }, { "epoch": 1.126172034243783, "grad_norm": 2.122368335723877, "learning_rate": 0.0008592879467318929, "loss": 3.7028, "step": 16575 }, { "epoch": 1.126511754314445, "grad_norm": 2.0859012603759766, "learning_rate": 0.0008592454817230601, "loss": 3.6693, "step": 16580 }, { "epoch": 1.1268514743851066, "grad_norm": 1.4155548810958862, "learning_rate": 0.0008592030167142276, "loss": 3.6187, "step": 16585 }, { "epoch": 1.1271911944557684, "grad_norm": 2.1286122798919678, "learning_rate": 0.0008591605517053948, "loss": 3.4808, "step": 16590 }, { "epoch": 1.1275309145264303, "grad_norm": 1.8394315242767334, "learning_rate": 0.000859118086696562, "loss": 3.9108, "step": 16595 }, { "epoch": 1.127870634597092, "grad_norm": 2.6061575412750244, "learning_rate": 0.0008590756216877293, "loss": 3.5925, "step": 16600 }, { "epoch": 1.1282103546677538, "grad_norm": 1.9714298248291016, "learning_rate": 0.0008590331566788966, "loss": 3.5764, "step": 16605 }, { "epoch": 1.1285500747384156, "grad_norm": 1.8551124334335327, "learning_rate": 0.0008589906916700639, "loss": 3.6553, "step": 16610 }, { "epoch": 1.1288897948090773, "grad_norm": 2.056062936782837, "learning_rate": 0.0008589482266612311, "loss": 3.593, "step": 16615 }, { "epoch": 1.129229514879739, "grad_norm": 1.8903244733810425, "learning_rate": 0.0008589057616523985, "loss": 3.4355, "step": 16620 }, { "epoch": 1.129569234950401, "grad_norm": 2.3328495025634766, "learning_rate": 0.0008588632966435658, "loss": 3.5619, "step": 16625 }, { "epoch": 1.1299089550210626, "grad_norm": 2.7592074871063232, "learning_rate": 0.000858820831634733, "loss": 3.4589, "step": 16630 }, { "epoch": 1.1302486750917244, "grad_norm": 1.8449887037277222, "learning_rate": 0.0008587783666259002, "loss": 3.638, "step": 16635 }, { "epoch": 1.1305883951623863, "grad_norm": 2.090413808822632, "learning_rate": 0.0008587359016170676, "loss": 3.6906, "step": 16640 }, { "epoch": 1.130928115233048, "grad_norm": 1.9758833646774292, "learning_rate": 0.0008586934366082348, "loss": 3.5636, "step": 16645 }, { "epoch": 1.1312678353037098, "grad_norm": 2.249112606048584, "learning_rate": 0.000858650971599402, "loss": 3.891, "step": 16650 }, { "epoch": 1.1316075553743716, "grad_norm": 1.787674069404602, "learning_rate": 0.0008586085065905695, "loss": 3.3176, "step": 16655 }, { "epoch": 1.1319472754450333, "grad_norm": 2.0539159774780273, "learning_rate": 0.0008585660415817367, "loss": 3.6186, "step": 16660 }, { "epoch": 1.1322869955156951, "grad_norm": 1.9349993467330933, "learning_rate": 0.0008585235765729039, "loss": 3.4701, "step": 16665 }, { "epoch": 1.1326267155863567, "grad_norm": 2.052931308746338, "learning_rate": 0.0008584811115640713, "loss": 3.6363, "step": 16670 }, { "epoch": 1.1329664356570186, "grad_norm": 1.8514739274978638, "learning_rate": 0.0008584386465552385, "loss": 3.5811, "step": 16675 }, { "epoch": 1.1333061557276805, "grad_norm": 2.300128936767578, "learning_rate": 0.0008583961815464057, "loss": 3.564, "step": 16680 }, { "epoch": 1.133645875798342, "grad_norm": 1.772621750831604, "learning_rate": 0.0008583537165375732, "loss": 3.5809, "step": 16685 }, { "epoch": 1.133985595869004, "grad_norm": 2.4654057025909424, "learning_rate": 0.0008583112515287404, "loss": 3.6384, "step": 16690 }, { "epoch": 1.1343253159396658, "grad_norm": 1.8975369930267334, "learning_rate": 0.0008582687865199076, "loss": 3.5269, "step": 16695 }, { "epoch": 1.1346650360103274, "grad_norm": 2.0323736667633057, "learning_rate": 0.0008582263215110749, "loss": 3.4091, "step": 16700 }, { "epoch": 1.1350047560809893, "grad_norm": 1.974496841430664, "learning_rate": 0.0008581838565022422, "loss": 3.7414, "step": 16705 }, { "epoch": 1.1353444761516511, "grad_norm": 2.305094003677368, "learning_rate": 0.0008581413914934094, "loss": 3.6463, "step": 16710 }, { "epoch": 1.1356841962223128, "grad_norm": 1.8598576784133911, "learning_rate": 0.0008580989264845767, "loss": 3.5899, "step": 16715 }, { "epoch": 1.1360239162929746, "grad_norm": 1.895500898361206, "learning_rate": 0.0008580564614757441, "loss": 3.7768, "step": 16720 }, { "epoch": 1.1363636363636362, "grad_norm": 2.1372580528259277, "learning_rate": 0.0008580139964669113, "loss": 3.645, "step": 16725 }, { "epoch": 1.136703356434298, "grad_norm": 1.98237943649292, "learning_rate": 0.0008579715314580786, "loss": 3.7134, "step": 16730 }, { "epoch": 1.13704307650496, "grad_norm": 1.9223995208740234, "learning_rate": 0.0008579290664492459, "loss": 3.6898, "step": 16735 }, { "epoch": 1.1373827965756216, "grad_norm": 2.1175570487976074, "learning_rate": 0.0008578866014404131, "loss": 3.5019, "step": 16740 }, { "epoch": 1.1377225166462834, "grad_norm": 1.9431058168411255, "learning_rate": 0.0008578441364315804, "loss": 3.5204, "step": 16745 }, { "epoch": 1.1380622367169453, "grad_norm": 2.4348039627075195, "learning_rate": 0.0008578016714227476, "loss": 3.5678, "step": 16750 }, { "epoch": 1.138401956787607, "grad_norm": 2.6844658851623535, "learning_rate": 0.000857759206413915, "loss": 3.8422, "step": 16755 }, { "epoch": 1.1387416768582688, "grad_norm": 2.7835757732391357, "learning_rate": 0.0008577167414050823, "loss": 3.5648, "step": 16760 }, { "epoch": 1.1390813969289306, "grad_norm": 1.7647943496704102, "learning_rate": 0.0008576742763962495, "loss": 3.7484, "step": 16765 }, { "epoch": 1.1394211169995923, "grad_norm": 2.329324960708618, "learning_rate": 0.0008576318113874168, "loss": 3.5395, "step": 16770 }, { "epoch": 1.139760837070254, "grad_norm": 2.2729835510253906, "learning_rate": 0.0008575893463785841, "loss": 3.6004, "step": 16775 }, { "epoch": 1.140100557140916, "grad_norm": 1.6530370712280273, "learning_rate": 0.0008575468813697513, "loss": 3.5702, "step": 16780 }, { "epoch": 1.1404402772115776, "grad_norm": 2.04164981842041, "learning_rate": 0.0008575044163609185, "loss": 3.5652, "step": 16785 }, { "epoch": 1.1407799972822394, "grad_norm": 1.8786139488220215, "learning_rate": 0.000857461951352086, "loss": 3.8339, "step": 16790 }, { "epoch": 1.1411197173529013, "grad_norm": 2.199887752532959, "learning_rate": 0.0008574194863432532, "loss": 3.4231, "step": 16795 }, { "epoch": 1.141459437423563, "grad_norm": 2.205906867980957, "learning_rate": 0.0008573770213344204, "loss": 3.597, "step": 16800 }, { "epoch": 1.1417991574942248, "grad_norm": 2.4756665229797363, "learning_rate": 0.0008573345563255878, "loss": 3.5974, "step": 16805 }, { "epoch": 1.1421388775648866, "grad_norm": 2.1316540241241455, "learning_rate": 0.000857292091316755, "loss": 3.6283, "step": 16810 }, { "epoch": 1.1424785976355483, "grad_norm": 2.5669686794281006, "learning_rate": 0.0008572496263079222, "loss": 3.5318, "step": 16815 }, { "epoch": 1.1428183177062101, "grad_norm": 1.9858146905899048, "learning_rate": 0.0008572071612990896, "loss": 3.4255, "step": 16820 }, { "epoch": 1.143158037776872, "grad_norm": 2.381223678588867, "learning_rate": 0.0008571646962902569, "loss": 3.5578, "step": 16825 }, { "epoch": 1.1434977578475336, "grad_norm": 1.8045024871826172, "learning_rate": 0.0008571222312814241, "loss": 3.5718, "step": 16830 }, { "epoch": 1.1438374779181955, "grad_norm": 2.4406850337982178, "learning_rate": 0.0008570797662725915, "loss": 3.6639, "step": 16835 }, { "epoch": 1.144177197988857, "grad_norm": 2.0628151893615723, "learning_rate": 0.0008570373012637587, "loss": 3.6709, "step": 16840 }, { "epoch": 1.144516918059519, "grad_norm": 1.7434704303741455, "learning_rate": 0.0008569948362549259, "loss": 3.5756, "step": 16845 }, { "epoch": 1.1448566381301808, "grad_norm": 2.2544448375701904, "learning_rate": 0.0008569523712460932, "loss": 3.682, "step": 16850 }, { "epoch": 1.1451963582008424, "grad_norm": 2.186014413833618, "learning_rate": 0.0008569099062372605, "loss": 3.4808, "step": 16855 }, { "epoch": 1.1455360782715043, "grad_norm": 1.674890398979187, "learning_rate": 0.0008568674412284278, "loss": 3.5915, "step": 16860 }, { "epoch": 1.1458757983421661, "grad_norm": 2.083306074142456, "learning_rate": 0.0008568249762195951, "loss": 3.4941, "step": 16865 }, { "epoch": 1.1462155184128278, "grad_norm": 2.1135125160217285, "learning_rate": 0.0008567825112107624, "loss": 3.5733, "step": 16870 }, { "epoch": 1.1465552384834896, "grad_norm": 2.080170154571533, "learning_rate": 0.0008567400462019296, "loss": 3.7823, "step": 16875 }, { "epoch": 1.1468949585541515, "grad_norm": 1.8718178272247314, "learning_rate": 0.0008566975811930969, "loss": 3.4917, "step": 16880 }, { "epoch": 1.147234678624813, "grad_norm": 2.0722203254699707, "learning_rate": 0.0008566551161842641, "loss": 3.647, "step": 16885 }, { "epoch": 1.147574398695475, "grad_norm": 1.9395283460617065, "learning_rate": 0.0008566126511754314, "loss": 3.5775, "step": 16890 }, { "epoch": 1.1479141187661366, "grad_norm": 2.141054391860962, "learning_rate": 0.0008565701861665988, "loss": 3.5926, "step": 16895 }, { "epoch": 1.1482538388367984, "grad_norm": 1.881306767463684, "learning_rate": 0.000856527721157766, "loss": 3.4647, "step": 16900 }, { "epoch": 1.1485935589074603, "grad_norm": 1.9365615844726562, "learning_rate": 0.0008564852561489333, "loss": 3.7508, "step": 16905 }, { "epoch": 1.148933278978122, "grad_norm": 2.088608503341675, "learning_rate": 0.0008564427911401006, "loss": 3.4782, "step": 16910 }, { "epoch": 1.1492729990487838, "grad_norm": 1.9818320274353027, "learning_rate": 0.0008564003261312678, "loss": 3.4259, "step": 16915 }, { "epoch": 1.1496127191194456, "grad_norm": 2.3629000186920166, "learning_rate": 0.000856357861122435, "loss": 3.4866, "step": 16920 }, { "epoch": 1.1499524391901073, "grad_norm": 2.100407123565674, "learning_rate": 0.0008563153961136024, "loss": 3.4876, "step": 16925 }, { "epoch": 1.150292159260769, "grad_norm": 2.155121326446533, "learning_rate": 0.0008562729311047697, "loss": 3.6319, "step": 16930 }, { "epoch": 1.150631879331431, "grad_norm": 2.397502899169922, "learning_rate": 0.0008562304660959369, "loss": 3.5193, "step": 16935 }, { "epoch": 1.1509715994020926, "grad_norm": 1.7697081565856934, "learning_rate": 0.0008561880010871043, "loss": 3.7341, "step": 16940 }, { "epoch": 1.1513113194727544, "grad_norm": 1.680200219154358, "learning_rate": 0.0008561455360782715, "loss": 3.4038, "step": 16945 }, { "epoch": 1.1516510395434163, "grad_norm": 1.7492401599884033, "learning_rate": 0.0008561030710694388, "loss": 3.5385, "step": 16950 }, { "epoch": 1.151990759614078, "grad_norm": 1.8324297666549683, "learning_rate": 0.0008560606060606061, "loss": 3.3078, "step": 16955 }, { "epoch": 1.1523304796847398, "grad_norm": 2.051682949066162, "learning_rate": 0.0008560181410517733, "loss": 3.3779, "step": 16960 }, { "epoch": 1.1526701997554016, "grad_norm": 1.7047878503799438, "learning_rate": 0.0008559756760429407, "loss": 3.5227, "step": 16965 }, { "epoch": 1.1530099198260633, "grad_norm": 2.4762251377105713, "learning_rate": 0.000855933211034108, "loss": 3.545, "step": 16970 }, { "epoch": 1.1533496398967251, "grad_norm": 1.9005025625228882, "learning_rate": 0.0008558907460252752, "loss": 3.4043, "step": 16975 }, { "epoch": 1.153689359967387, "grad_norm": 2.4338173866271973, "learning_rate": 0.0008558482810164425, "loss": 3.5685, "step": 16980 }, { "epoch": 1.1540290800380486, "grad_norm": 2.2414324283599854, "learning_rate": 0.0008558058160076097, "loss": 3.7293, "step": 16985 }, { "epoch": 1.1543688001087105, "grad_norm": 1.9791510105133057, "learning_rate": 0.000855763350998777, "loss": 3.4112, "step": 16990 }, { "epoch": 1.1547085201793723, "grad_norm": 1.8326914310455322, "learning_rate": 0.0008557208859899443, "loss": 3.557, "step": 16995 }, { "epoch": 1.155048240250034, "grad_norm": 2.018770694732666, "learning_rate": 0.0008556784209811116, "loss": 3.6065, "step": 17000 }, { "epoch": 1.1553879603206958, "grad_norm": 2.688831329345703, "learning_rate": 0.0008556359559722789, "loss": 3.6172, "step": 17005 }, { "epoch": 1.1557276803913574, "grad_norm": 2.0945701599121094, "learning_rate": 0.0008555934909634462, "loss": 3.6973, "step": 17010 }, { "epoch": 1.1560674004620193, "grad_norm": 1.7410026788711548, "learning_rate": 0.0008555510259546134, "loss": 3.64, "step": 17015 }, { "epoch": 1.1564071205326811, "grad_norm": 1.7020312547683716, "learning_rate": 0.0008555085609457807, "loss": 3.6234, "step": 17020 }, { "epoch": 1.1567468406033428, "grad_norm": 2.3330445289611816, "learning_rate": 0.000855466095936948, "loss": 3.8309, "step": 17025 }, { "epoch": 1.1570865606740046, "grad_norm": 2.1446940898895264, "learning_rate": 0.0008554236309281152, "loss": 3.5848, "step": 17030 }, { "epoch": 1.1574262807446665, "grad_norm": 2.5597894191741943, "learning_rate": 0.0008553811659192825, "loss": 3.5585, "step": 17035 }, { "epoch": 1.157766000815328, "grad_norm": 2.3438825607299805, "learning_rate": 0.0008553387009104499, "loss": 3.3179, "step": 17040 }, { "epoch": 1.15810572088599, "grad_norm": 1.5878838300704956, "learning_rate": 0.0008552962359016171, "loss": 3.8424, "step": 17045 }, { "epoch": 1.1584454409566518, "grad_norm": 2.031625747680664, "learning_rate": 0.0008552537708927843, "loss": 3.296, "step": 17050 }, { "epoch": 1.1587851610273134, "grad_norm": 1.6748493909835815, "learning_rate": 0.0008552113058839517, "loss": 3.6407, "step": 17055 }, { "epoch": 1.1591248810979753, "grad_norm": 2.083644151687622, "learning_rate": 0.0008551688408751189, "loss": 3.5783, "step": 17060 }, { "epoch": 1.159464601168637, "grad_norm": 1.5899056196212769, "learning_rate": 0.0008551263758662861, "loss": 3.6124, "step": 17065 }, { "epoch": 1.1598043212392988, "grad_norm": 1.7896467447280884, "learning_rate": 0.0008550839108574536, "loss": 3.5976, "step": 17070 }, { "epoch": 1.1601440413099606, "grad_norm": 2.0796635150909424, "learning_rate": 0.0008550414458486208, "loss": 3.5849, "step": 17075 }, { "epoch": 1.1604837613806223, "grad_norm": 2.452714204788208, "learning_rate": 0.000854998980839788, "loss": 3.3107, "step": 17080 }, { "epoch": 1.1608234814512841, "grad_norm": 2.3488903045654297, "learning_rate": 0.0008549565158309553, "loss": 3.5022, "step": 17085 }, { "epoch": 1.161163201521946, "grad_norm": 1.8453553915023804, "learning_rate": 0.0008549140508221226, "loss": 3.4706, "step": 17090 }, { "epoch": 1.1615029215926076, "grad_norm": 1.8664199113845825, "learning_rate": 0.0008548715858132898, "loss": 3.5366, "step": 17095 }, { "epoch": 1.1618426416632694, "grad_norm": 1.8244986534118652, "learning_rate": 0.0008548291208044571, "loss": 3.4413, "step": 17100 }, { "epoch": 1.1621823617339313, "grad_norm": 1.8712557554244995, "learning_rate": 0.0008547866557956245, "loss": 3.5697, "step": 17105 }, { "epoch": 1.162522081804593, "grad_norm": 1.5470391511917114, "learning_rate": 0.0008547441907867917, "loss": 3.3798, "step": 17110 }, { "epoch": 1.1628618018752548, "grad_norm": 1.9400405883789062, "learning_rate": 0.000854701725777959, "loss": 3.4818, "step": 17115 }, { "epoch": 1.1632015219459166, "grad_norm": 1.9845280647277832, "learning_rate": 0.0008546592607691263, "loss": 3.4717, "step": 17120 }, { "epoch": 1.1635412420165783, "grad_norm": 2.254887342453003, "learning_rate": 0.0008546167957602935, "loss": 3.4462, "step": 17125 }, { "epoch": 1.1638809620872401, "grad_norm": 1.6699001789093018, "learning_rate": 0.0008545743307514608, "loss": 3.8571, "step": 17130 }, { "epoch": 1.164220682157902, "grad_norm": 2.0597896575927734, "learning_rate": 0.000854531865742628, "loss": 3.418, "step": 17135 }, { "epoch": 1.1645604022285636, "grad_norm": 2.0482864379882812, "learning_rate": 0.0008544894007337954, "loss": 3.7874, "step": 17140 }, { "epoch": 1.1649001222992255, "grad_norm": 1.960223913192749, "learning_rate": 0.0008544469357249627, "loss": 3.5863, "step": 17145 }, { "epoch": 1.1652398423698873, "grad_norm": 2.487131357192993, "learning_rate": 0.0008544044707161299, "loss": 3.6503, "step": 17150 }, { "epoch": 1.165579562440549, "grad_norm": 1.6412959098815918, "learning_rate": 0.0008543620057072972, "loss": 3.5372, "step": 17155 }, { "epoch": 1.1659192825112108, "grad_norm": 1.7394795417785645, "learning_rate": 0.0008543195406984645, "loss": 3.7251, "step": 17160 }, { "epoch": 1.1662590025818727, "grad_norm": 1.8904122114181519, "learning_rate": 0.0008542770756896317, "loss": 3.6196, "step": 17165 }, { "epoch": 1.1665987226525343, "grad_norm": 1.5075860023498535, "learning_rate": 0.000854234610680799, "loss": 3.5126, "step": 17170 }, { "epoch": 1.1669384427231961, "grad_norm": 2.261249303817749, "learning_rate": 0.0008541921456719664, "loss": 3.7651, "step": 17175 }, { "epoch": 1.1672781627938578, "grad_norm": 1.8133624792099, "learning_rate": 0.0008541496806631336, "loss": 3.6675, "step": 17180 }, { "epoch": 1.1676178828645196, "grad_norm": 2.0197057723999023, "learning_rate": 0.0008541072156543008, "loss": 3.7679, "step": 17185 }, { "epoch": 1.1679576029351815, "grad_norm": 2.282151460647583, "learning_rate": 0.0008540647506454682, "loss": 3.4452, "step": 17190 }, { "epoch": 1.168297323005843, "grad_norm": 2.5657057762145996, "learning_rate": 0.0008540222856366354, "loss": 3.5061, "step": 17195 }, { "epoch": 1.168637043076505, "grad_norm": 2.165231704711914, "learning_rate": 0.0008539798206278026, "loss": 3.7427, "step": 17200 }, { "epoch": 1.1689767631471668, "grad_norm": 1.7124062776565552, "learning_rate": 0.00085393735561897, "loss": 3.5112, "step": 17205 }, { "epoch": 1.1693164832178284, "grad_norm": 2.0090227127075195, "learning_rate": 0.0008538948906101373, "loss": 3.5599, "step": 17210 }, { "epoch": 1.1696562032884903, "grad_norm": 2.086730718612671, "learning_rate": 0.0008538524256013045, "loss": 3.5808, "step": 17215 }, { "epoch": 1.1699959233591521, "grad_norm": 1.5184528827667236, "learning_rate": 0.0008538099605924719, "loss": 3.6317, "step": 17220 }, { "epoch": 1.1703356434298138, "grad_norm": 1.7130539417266846, "learning_rate": 0.0008537674955836391, "loss": 3.4973, "step": 17225 }, { "epoch": 1.1706753635004756, "grad_norm": 2.1319081783294678, "learning_rate": 0.0008537250305748063, "loss": 3.5374, "step": 17230 }, { "epoch": 1.1710150835711373, "grad_norm": 2.1040406227111816, "learning_rate": 0.0008536825655659736, "loss": 3.3267, "step": 17235 }, { "epoch": 1.1713548036417991, "grad_norm": 2.5711634159088135, "learning_rate": 0.0008536401005571409, "loss": 3.4078, "step": 17240 }, { "epoch": 1.171694523712461, "grad_norm": 2.1095192432403564, "learning_rate": 0.0008535976355483082, "loss": 3.569, "step": 17245 }, { "epoch": 1.1720342437831226, "grad_norm": 2.0878241062164307, "learning_rate": 0.0008535551705394755, "loss": 3.5618, "step": 17250 }, { "epoch": 1.1723739638537845, "grad_norm": 2.2805182933807373, "learning_rate": 0.0008535127055306428, "loss": 3.7584, "step": 17255 }, { "epoch": 1.1727136839244463, "grad_norm": 1.7818342447280884, "learning_rate": 0.00085347024052181, "loss": 3.6147, "step": 17260 }, { "epoch": 1.173053403995108, "grad_norm": 1.7564647197723389, "learning_rate": 0.0008534277755129773, "loss": 3.5462, "step": 17265 }, { "epoch": 1.1733931240657698, "grad_norm": 1.9139090776443481, "learning_rate": 0.0008533853105041445, "loss": 3.4912, "step": 17270 }, { "epoch": 1.1737328441364316, "grad_norm": 1.9355636835098267, "learning_rate": 0.0008533428454953118, "loss": 3.258, "step": 17275 }, { "epoch": 1.1740725642070933, "grad_norm": 2.287770986557007, "learning_rate": 0.0008533003804864792, "loss": 3.7827, "step": 17280 }, { "epoch": 1.1744122842777551, "grad_norm": 2.371129035949707, "learning_rate": 0.0008532579154776464, "loss": 3.5078, "step": 17285 }, { "epoch": 1.174752004348417, "grad_norm": 1.8820973634719849, "learning_rate": 0.0008532154504688138, "loss": 3.6456, "step": 17290 }, { "epoch": 1.1750917244190786, "grad_norm": 1.7440475225448608, "learning_rate": 0.000853172985459981, "loss": 3.6097, "step": 17295 }, { "epoch": 1.1754314444897405, "grad_norm": 1.827925443649292, "learning_rate": 0.0008531305204511482, "loss": 3.804, "step": 17300 }, { "epoch": 1.1757711645604023, "grad_norm": 1.9209486246109009, "learning_rate": 0.0008530880554423156, "loss": 3.6875, "step": 17305 }, { "epoch": 1.176110884631064, "grad_norm": 2.3478384017944336, "learning_rate": 0.0008530455904334829, "loss": 3.7122, "step": 17310 }, { "epoch": 1.1764506047017258, "grad_norm": 2.3292672634124756, "learning_rate": 0.0008530031254246501, "loss": 3.5693, "step": 17315 }, { "epoch": 1.1767903247723877, "grad_norm": 1.6351920366287231, "learning_rate": 0.0008529606604158175, "loss": 3.7151, "step": 17320 }, { "epoch": 1.1771300448430493, "grad_norm": 2.0014638900756836, "learning_rate": 0.0008529181954069847, "loss": 3.4847, "step": 17325 }, { "epoch": 1.1774697649137111, "grad_norm": 1.752602458000183, "learning_rate": 0.0008528757303981519, "loss": 3.7628, "step": 17330 }, { "epoch": 1.177809484984373, "grad_norm": 1.4919582605361938, "learning_rate": 0.0008528332653893192, "loss": 3.7207, "step": 17335 }, { "epoch": 1.1781492050550346, "grad_norm": 1.9858567714691162, "learning_rate": 0.0008527908003804865, "loss": 3.6775, "step": 17340 }, { "epoch": 1.1784889251256965, "grad_norm": 2.1133081912994385, "learning_rate": 0.0008527483353716538, "loss": 3.6222, "step": 17345 }, { "epoch": 1.178828645196358, "grad_norm": 1.6485265493392944, "learning_rate": 0.0008527058703628211, "loss": 3.5357, "step": 17350 }, { "epoch": 1.17916836526702, "grad_norm": 1.7135611772537231, "learning_rate": 0.0008526634053539884, "loss": 3.3943, "step": 17355 }, { "epoch": 1.1795080853376818, "grad_norm": 1.9500927925109863, "learning_rate": 0.0008526209403451556, "loss": 3.6597, "step": 17360 }, { "epoch": 1.1798478054083434, "grad_norm": 2.738847017288208, "learning_rate": 0.0008525784753363229, "loss": 3.2892, "step": 17365 }, { "epoch": 1.1801875254790053, "grad_norm": 1.9220227003097534, "learning_rate": 0.0008525360103274901, "loss": 3.5176, "step": 17370 }, { "epoch": 1.1805272455496671, "grad_norm": 1.9090993404388428, "learning_rate": 0.0008524935453186574, "loss": 3.451, "step": 17375 }, { "epoch": 1.1808669656203288, "grad_norm": 1.8312193155288696, "learning_rate": 0.0008524510803098248, "loss": 3.5395, "step": 17380 }, { "epoch": 1.1812066856909906, "grad_norm": 2.174745798110962, "learning_rate": 0.000852408615300992, "loss": 3.4562, "step": 17385 }, { "epoch": 1.1815464057616525, "grad_norm": 2.243473529815674, "learning_rate": 0.0008523661502921593, "loss": 3.5714, "step": 17390 }, { "epoch": 1.1818861258323141, "grad_norm": 2.3789498805999756, "learning_rate": 0.0008523236852833266, "loss": 3.4424, "step": 17395 }, { "epoch": 1.182225845902976, "grad_norm": 1.9032756090164185, "learning_rate": 0.0008522812202744938, "loss": 3.3911, "step": 17400 }, { "epoch": 1.1825655659736376, "grad_norm": 2.378178119659424, "learning_rate": 0.0008522387552656611, "loss": 3.4748, "step": 17405 }, { "epoch": 1.1829052860442995, "grad_norm": 1.9095661640167236, "learning_rate": 0.0008521962902568284, "loss": 3.7821, "step": 17410 }, { "epoch": 1.1832450061149613, "grad_norm": 1.7250527143478394, "learning_rate": 0.0008521538252479957, "loss": 3.5296, "step": 17415 }, { "epoch": 1.183584726185623, "grad_norm": 2.050326108932495, "learning_rate": 0.000852111360239163, "loss": 3.3978, "step": 17420 }, { "epoch": 1.1839244462562848, "grad_norm": 1.8733359575271606, "learning_rate": 0.0008520688952303303, "loss": 3.4895, "step": 17425 }, { "epoch": 1.1842641663269466, "grad_norm": 1.914300560951233, "learning_rate": 0.0008520264302214975, "loss": 3.7188, "step": 17430 }, { "epoch": 1.1846038863976083, "grad_norm": 1.7168296575546265, "learning_rate": 0.0008519839652126647, "loss": 3.7624, "step": 17435 }, { "epoch": 1.1849436064682701, "grad_norm": 2.471918821334839, "learning_rate": 0.0008519415002038321, "loss": 3.4513, "step": 17440 }, { "epoch": 1.185283326538932, "grad_norm": 1.9626896381378174, "learning_rate": 0.0008518990351949993, "loss": 3.3697, "step": 17445 }, { "epoch": 1.1856230466095936, "grad_norm": 2.1658833026885986, "learning_rate": 0.0008518565701861666, "loss": 3.5469, "step": 17450 }, { "epoch": 1.1859627666802555, "grad_norm": 2.062964677810669, "learning_rate": 0.000851814105177334, "loss": 3.7297, "step": 17455 }, { "epoch": 1.1863024867509173, "grad_norm": 1.5033652782440186, "learning_rate": 0.0008517716401685012, "loss": 3.5807, "step": 17460 }, { "epoch": 1.186642206821579, "grad_norm": 2.104787588119507, "learning_rate": 0.0008517291751596684, "loss": 3.5149, "step": 17465 }, { "epoch": 1.1869819268922408, "grad_norm": 1.8736763000488281, "learning_rate": 0.0008516867101508358, "loss": 3.7155, "step": 17470 }, { "epoch": 1.1873216469629027, "grad_norm": 2.072507858276367, "learning_rate": 0.000851644245142003, "loss": 3.2287, "step": 17475 }, { "epoch": 1.1876613670335643, "grad_norm": 2.414212703704834, "learning_rate": 0.0008516017801331702, "loss": 3.6586, "step": 17480 }, { "epoch": 1.1880010871042261, "grad_norm": 2.5012271404266357, "learning_rate": 0.0008515593151243376, "loss": 3.6979, "step": 17485 }, { "epoch": 1.188340807174888, "grad_norm": 2.0201821327209473, "learning_rate": 0.0008515168501155049, "loss": 3.2029, "step": 17490 }, { "epoch": 1.1886805272455496, "grad_norm": 2.036747932434082, "learning_rate": 0.0008514743851066721, "loss": 3.3714, "step": 17495 }, { "epoch": 1.1890202473162115, "grad_norm": 1.8051092624664307, "learning_rate": 0.0008514319200978394, "loss": 3.4783, "step": 17500 }, { "epoch": 1.1893599673868733, "grad_norm": 1.8705356121063232, "learning_rate": 0.0008513894550890067, "loss": 3.4662, "step": 17505 }, { "epoch": 1.189699687457535, "grad_norm": 1.69522225856781, "learning_rate": 0.0008513469900801739, "loss": 3.4524, "step": 17510 }, { "epoch": 1.1900394075281968, "grad_norm": 2.050438165664673, "learning_rate": 0.0008513045250713412, "loss": 3.7626, "step": 17515 }, { "epoch": 1.1903791275988584, "grad_norm": 2.6483118534088135, "learning_rate": 0.0008512620600625086, "loss": 3.4544, "step": 17520 }, { "epoch": 1.1907188476695203, "grad_norm": 2.6782705783843994, "learning_rate": 0.0008512195950536758, "loss": 3.529, "step": 17525 }, { "epoch": 1.1910585677401822, "grad_norm": 1.6906206607818604, "learning_rate": 0.0008511771300448431, "loss": 3.5626, "step": 17530 }, { "epoch": 1.1913982878108438, "grad_norm": 1.7634577751159668, "learning_rate": 0.0008511346650360103, "loss": 3.7479, "step": 17535 }, { "epoch": 1.1917380078815056, "grad_norm": 2.6373226642608643, "learning_rate": 0.0008510922000271776, "loss": 3.5213, "step": 17540 }, { "epoch": 1.1920777279521675, "grad_norm": 2.5679335594177246, "learning_rate": 0.0008510497350183449, "loss": 3.6536, "step": 17545 }, { "epoch": 1.1924174480228291, "grad_norm": 1.8686248064041138, "learning_rate": 0.0008510072700095121, "loss": 3.6344, "step": 17550 }, { "epoch": 1.192757168093491, "grad_norm": 2.6723060607910156, "learning_rate": 0.0008509648050006795, "loss": 3.5528, "step": 17555 }, { "epoch": 1.1930968881641528, "grad_norm": 2.150097131729126, "learning_rate": 0.0008509223399918468, "loss": 3.6167, "step": 17560 }, { "epoch": 1.1934366082348145, "grad_norm": 1.9446667432785034, "learning_rate": 0.000850879874983014, "loss": 3.2729, "step": 17565 }, { "epoch": 1.1937763283054763, "grad_norm": 1.9633255004882812, "learning_rate": 0.0008508374099741812, "loss": 3.5739, "step": 17570 }, { "epoch": 1.194116048376138, "grad_norm": 2.37727689743042, "learning_rate": 0.0008507949449653486, "loss": 3.36, "step": 17575 }, { "epoch": 1.1944557684467998, "grad_norm": 2.0973968505859375, "learning_rate": 0.0008507524799565158, "loss": 3.5791, "step": 17580 }, { "epoch": 1.1947954885174616, "grad_norm": 2.3224036693573, "learning_rate": 0.000850710014947683, "loss": 3.7283, "step": 17585 }, { "epoch": 1.1951352085881233, "grad_norm": 1.9160079956054688, "learning_rate": 0.0008506675499388505, "loss": 3.5947, "step": 17590 }, { "epoch": 1.1954749286587851, "grad_norm": 1.7783706188201904, "learning_rate": 0.0008506250849300177, "loss": 3.6442, "step": 17595 }, { "epoch": 1.195814648729447, "grad_norm": 1.7561384439468384, "learning_rate": 0.0008505826199211849, "loss": 3.6513, "step": 17600 }, { "epoch": 1.1961543688001086, "grad_norm": 2.3711211681365967, "learning_rate": 0.0008505401549123523, "loss": 3.3701, "step": 17605 }, { "epoch": 1.1964940888707705, "grad_norm": 2.065034866333008, "learning_rate": 0.0008504976899035195, "loss": 3.1028, "step": 17610 }, { "epoch": 1.1968338089414323, "grad_norm": 2.5157856941223145, "learning_rate": 0.0008504552248946867, "loss": 3.7297, "step": 17615 }, { "epoch": 1.197173529012094, "grad_norm": 2.6928863525390625, "learning_rate": 0.000850412759885854, "loss": 3.5719, "step": 17620 }, { "epoch": 1.1975132490827558, "grad_norm": 2.0920770168304443, "learning_rate": 0.0008503702948770214, "loss": 3.2548, "step": 17625 }, { "epoch": 1.1978529691534177, "grad_norm": 1.7273770570755005, "learning_rate": 0.0008503278298681887, "loss": 3.5379, "step": 17630 }, { "epoch": 1.1981926892240793, "grad_norm": 1.7598638534545898, "learning_rate": 0.0008502853648593559, "loss": 3.3937, "step": 17635 }, { "epoch": 1.1985324092947411, "grad_norm": 2.2040271759033203, "learning_rate": 0.0008502428998505232, "loss": 3.7991, "step": 17640 }, { "epoch": 1.198872129365403, "grad_norm": 2.0769448280334473, "learning_rate": 0.0008502004348416905, "loss": 3.4867, "step": 17645 }, { "epoch": 1.1992118494360646, "grad_norm": 2.1615304946899414, "learning_rate": 0.0008501579698328577, "loss": 3.6252, "step": 17650 }, { "epoch": 1.1995515695067265, "grad_norm": 2.242249011993408, "learning_rate": 0.000850115504824025, "loss": 3.6608, "step": 17655 }, { "epoch": 1.1998912895773883, "grad_norm": 2.289668083190918, "learning_rate": 0.0008500730398151924, "loss": 3.6731, "step": 17660 }, { "epoch": 1.20023100964805, "grad_norm": 2.1470768451690674, "learning_rate": 0.0008500305748063596, "loss": 3.5862, "step": 17665 }, { "epoch": 1.2005707297187118, "grad_norm": 1.8129137754440308, "learning_rate": 0.0008499881097975268, "loss": 3.4553, "step": 17670 }, { "epoch": 1.2009104497893737, "grad_norm": 1.7451536655426025, "learning_rate": 0.0008499456447886942, "loss": 3.5589, "step": 17675 }, { "epoch": 1.2012501698600353, "grad_norm": 1.6233737468719482, "learning_rate": 0.0008499031797798614, "loss": 3.3473, "step": 17680 }, { "epoch": 1.2015898899306972, "grad_norm": 2.4765048027038574, "learning_rate": 0.0008498607147710286, "loss": 3.6722, "step": 17685 }, { "epoch": 1.2019296100013588, "grad_norm": 1.5942848920822144, "learning_rate": 0.000849818249762196, "loss": 3.7455, "step": 17690 }, { "epoch": 1.2022693300720206, "grad_norm": 1.8935418128967285, "learning_rate": 0.0008497757847533633, "loss": 3.2248, "step": 17695 }, { "epoch": 1.2026090501426825, "grad_norm": 1.5125211477279663, "learning_rate": 0.0008497333197445305, "loss": 3.4574, "step": 17700 }, { "epoch": 1.2029487702133441, "grad_norm": 2.392657518386841, "learning_rate": 0.0008496908547356979, "loss": 3.4023, "step": 17705 }, { "epoch": 1.203288490284006, "grad_norm": 1.9293133020401, "learning_rate": 0.0008496483897268651, "loss": 3.8208, "step": 17710 }, { "epoch": 1.2036282103546678, "grad_norm": 2.1135201454162598, "learning_rate": 0.0008496059247180323, "loss": 3.3466, "step": 17715 }, { "epoch": 1.2039679304253295, "grad_norm": 1.5698391199111938, "learning_rate": 0.0008495634597091996, "loss": 3.6206, "step": 17720 }, { "epoch": 1.2043076504959913, "grad_norm": 1.7760757207870483, "learning_rate": 0.0008495209947003669, "loss": 3.6503, "step": 17725 }, { "epoch": 1.2046473705666532, "grad_norm": 2.282825231552124, "learning_rate": 0.0008494785296915342, "loss": 3.554, "step": 17730 }, { "epoch": 1.2049870906373148, "grad_norm": 2.386223077774048, "learning_rate": 0.0008494360646827015, "loss": 3.5993, "step": 17735 }, { "epoch": 1.2053268107079766, "grad_norm": 1.606630802154541, "learning_rate": 0.0008493935996738688, "loss": 3.4298, "step": 17740 }, { "epoch": 1.2056665307786383, "grad_norm": 1.541431188583374, "learning_rate": 0.000849351134665036, "loss": 3.4808, "step": 17745 }, { "epoch": 1.2060062508493001, "grad_norm": 2.1311771869659424, "learning_rate": 0.0008493086696562033, "loss": 3.714, "step": 17750 }, { "epoch": 1.206345970919962, "grad_norm": 2.490872859954834, "learning_rate": 0.0008492662046473706, "loss": 3.397, "step": 17755 }, { "epoch": 1.2066856909906236, "grad_norm": 1.931036114692688, "learning_rate": 0.0008492237396385378, "loss": 3.5917, "step": 17760 }, { "epoch": 1.2070254110612855, "grad_norm": 1.73344886302948, "learning_rate": 0.0008491812746297052, "loss": 3.6421, "step": 17765 }, { "epoch": 1.2073651311319473, "grad_norm": 2.007688283920288, "learning_rate": 0.0008491388096208724, "loss": 3.38, "step": 17770 }, { "epoch": 1.207704851202609, "grad_norm": 2.057305335998535, "learning_rate": 0.0008490963446120397, "loss": 3.73, "step": 17775 }, { "epoch": 1.2080445712732708, "grad_norm": NaN, "learning_rate": 0.0008490623726049735, "loss": 3.7162, "step": 17780 }, { "epoch": 1.2083842913439327, "grad_norm": 2.0667600631713867, "learning_rate": 0.0008490199075961408, "loss": 3.4111, "step": 17785 }, { "epoch": 1.2087240114145943, "grad_norm": 2.1313233375549316, "learning_rate": 0.0008489774425873081, "loss": 3.8697, "step": 17790 }, { "epoch": 1.2090637314852561, "grad_norm": 1.7254570722579956, "learning_rate": 0.0008489349775784754, "loss": 3.6472, "step": 17795 }, { "epoch": 1.209403451555918, "grad_norm": 2.2116494178771973, "learning_rate": 0.0008488925125696426, "loss": 3.771, "step": 17800 }, { "epoch": 1.2097431716265796, "grad_norm": 2.33630633354187, "learning_rate": 0.0008488500475608098, "loss": 3.3877, "step": 17805 }, { "epoch": 1.2100828916972415, "grad_norm": 2.5781707763671875, "learning_rate": 0.0008488075825519772, "loss": 3.4967, "step": 17810 }, { "epoch": 1.2104226117679033, "grad_norm": 3.1278789043426514, "learning_rate": 0.0008487651175431444, "loss": 3.6282, "step": 17815 }, { "epoch": 1.210762331838565, "grad_norm": 1.8098803758621216, "learning_rate": 0.0008487226525343117, "loss": 3.5844, "step": 17820 }, { "epoch": 1.2111020519092268, "grad_norm": 2.943101406097412, "learning_rate": 0.0008486801875254791, "loss": 3.671, "step": 17825 }, { "epoch": 1.2114417719798887, "grad_norm": 1.5902198553085327, "learning_rate": 0.0008486377225166463, "loss": 3.7003, "step": 17830 }, { "epoch": 1.2117814920505503, "grad_norm": 1.9919922351837158, "learning_rate": 0.0008485952575078136, "loss": 3.6499, "step": 17835 }, { "epoch": 1.2121212121212122, "grad_norm": 2.097424268722534, "learning_rate": 0.0008485527924989809, "loss": 3.8104, "step": 17840 }, { "epoch": 1.212460932191874, "grad_norm": 1.8864223957061768, "learning_rate": 0.0008485103274901481, "loss": 3.6909, "step": 17845 }, { "epoch": 1.2128006522625356, "grad_norm": 2.2208774089813232, "learning_rate": 0.0008484678624813154, "loss": 3.6042, "step": 17850 }, { "epoch": 1.2131403723331975, "grad_norm": 2.2651870250701904, "learning_rate": 0.0008484253974724828, "loss": 3.6458, "step": 17855 }, { "epoch": 1.2134800924038591, "grad_norm": 1.476393461227417, "learning_rate": 0.00084838293246365, "loss": 3.4043, "step": 17860 }, { "epoch": 1.213819812474521, "grad_norm": 1.7945348024368286, "learning_rate": 0.0008483404674548173, "loss": 3.7629, "step": 17865 }, { "epoch": 1.2141595325451828, "grad_norm": 1.92212975025177, "learning_rate": 0.0008482980024459845, "loss": 3.4347, "step": 17870 }, { "epoch": 1.2144992526158445, "grad_norm": 1.7697643041610718, "learning_rate": 0.0008482555374371518, "loss": 3.7206, "step": 17875 }, { "epoch": 1.2148389726865063, "grad_norm": 1.8356177806854248, "learning_rate": 0.0008482130724283191, "loss": 3.6653, "step": 17880 }, { "epoch": 1.2151786927571682, "grad_norm": 2.452268600463867, "learning_rate": 0.0008481706074194863, "loss": 3.5182, "step": 17885 }, { "epoch": 1.2155184128278298, "grad_norm": 1.5168235301971436, "learning_rate": 0.0008481281424106537, "loss": 3.7412, "step": 17890 }, { "epoch": 1.2158581328984917, "grad_norm": 1.6048868894577026, "learning_rate": 0.000848085677401821, "loss": 3.714, "step": 17895 }, { "epoch": 1.2161978529691535, "grad_norm": 1.8339476585388184, "learning_rate": 0.0008480432123929882, "loss": 3.7095, "step": 17900 }, { "epoch": 1.2165375730398151, "grad_norm": 2.505448341369629, "learning_rate": 0.0008480007473841554, "loss": 3.5946, "step": 17905 }, { "epoch": 1.216877293110477, "grad_norm": 2.1674020290374756, "learning_rate": 0.0008479582823753228, "loss": 3.4854, "step": 17910 }, { "epoch": 1.2172170131811386, "grad_norm": 2.232039213180542, "learning_rate": 0.00084791581736649, "loss": 3.55, "step": 17915 }, { "epoch": 1.2175567332518005, "grad_norm": 1.8663480281829834, "learning_rate": 0.0008478733523576572, "loss": 3.6065, "step": 17920 }, { "epoch": 1.2178964533224623, "grad_norm": 1.9972773790359497, "learning_rate": 0.0008478308873488247, "loss": 3.6664, "step": 17925 }, { "epoch": 1.218236173393124, "grad_norm": 2.317490339279175, "learning_rate": 0.0008477884223399919, "loss": 3.3595, "step": 17930 }, { "epoch": 1.2185758934637858, "grad_norm": 1.8225007057189941, "learning_rate": 0.0008477459573311591, "loss": 3.7249, "step": 17935 }, { "epoch": 1.2189156135344477, "grad_norm": 2.365962266921997, "learning_rate": 0.0008477034923223265, "loss": 3.4042, "step": 17940 }, { "epoch": 1.2192553336051093, "grad_norm": 1.7087156772613525, "learning_rate": 0.0008476610273134937, "loss": 3.8999, "step": 17945 }, { "epoch": 1.2195950536757711, "grad_norm": 1.9235752820968628, "learning_rate": 0.0008476185623046609, "loss": 3.5558, "step": 17950 }, { "epoch": 1.219934773746433, "grad_norm": 2.6981735229492188, "learning_rate": 0.0008475760972958282, "loss": 3.452, "step": 17955 }, { "epoch": 1.2202744938170946, "grad_norm": 1.9907363653182983, "learning_rate": 0.0008475336322869956, "loss": 3.5596, "step": 17960 }, { "epoch": 1.2206142138877565, "grad_norm": 1.9155895709991455, "learning_rate": 0.0008474911672781628, "loss": 3.605, "step": 17965 }, { "epoch": 1.2209539339584183, "grad_norm": 1.6062812805175781, "learning_rate": 0.0008474487022693301, "loss": 3.5802, "step": 17970 }, { "epoch": 1.22129365402908, "grad_norm": 2.3070008754730225, "learning_rate": 0.0008474062372604974, "loss": 3.5769, "step": 17975 }, { "epoch": 1.2216333740997418, "grad_norm": 1.546897053718567, "learning_rate": 0.0008473637722516646, "loss": 3.5743, "step": 17980 }, { "epoch": 1.2219730941704037, "grad_norm": 1.9635777473449707, "learning_rate": 0.0008473213072428319, "loss": 3.3083, "step": 17985 }, { "epoch": 1.2223128142410653, "grad_norm": 2.6278023719787598, "learning_rate": 0.0008472788422339992, "loss": 3.7748, "step": 17990 }, { "epoch": 1.2226525343117272, "grad_norm": 2.053971290588379, "learning_rate": 0.0008472363772251665, "loss": 3.6748, "step": 17995 }, { "epoch": 1.222992254382389, "grad_norm": 2.9751365184783936, "learning_rate": 0.0008471939122163338, "loss": 3.6452, "step": 18000 }, { "epoch": 1.2233319744530506, "grad_norm": 2.1251769065856934, "learning_rate": 0.000847151447207501, "loss": 3.4287, "step": 18005 }, { "epoch": 1.2236716945237125, "grad_norm": 2.037700653076172, "learning_rate": 0.0008471089821986683, "loss": 3.5805, "step": 18010 }, { "epoch": 1.2240114145943743, "grad_norm": 1.6718037128448486, "learning_rate": 0.0008470665171898356, "loss": 3.5027, "step": 18015 }, { "epoch": 1.224351134665036, "grad_norm": 1.7554056644439697, "learning_rate": 0.0008470240521810028, "loss": 3.7878, "step": 18020 }, { "epoch": 1.2246908547356978, "grad_norm": 1.8056507110595703, "learning_rate": 0.0008469815871721701, "loss": 3.479, "step": 18025 }, { "epoch": 1.2250305748063597, "grad_norm": 1.90060555934906, "learning_rate": 0.0008469391221633375, "loss": 3.339, "step": 18030 }, { "epoch": 1.2253702948770213, "grad_norm": 1.900716781616211, "learning_rate": 0.0008468966571545047, "loss": 3.8809, "step": 18035 }, { "epoch": 1.2257100149476832, "grad_norm": 2.2378551959991455, "learning_rate": 0.000846854192145672, "loss": 3.7392, "step": 18040 }, { "epoch": 1.2260497350183448, "grad_norm": 1.799375057220459, "learning_rate": 0.0008468117271368393, "loss": 3.7158, "step": 18045 }, { "epoch": 1.2263894550890067, "grad_norm": 1.877743124961853, "learning_rate": 0.0008467692621280065, "loss": 3.4214, "step": 18050 }, { "epoch": 1.2267291751596685, "grad_norm": 2.1686644554138184, "learning_rate": 0.0008467267971191737, "loss": 3.6101, "step": 18055 }, { "epoch": 1.2270688952303301, "grad_norm": 2.5581367015838623, "learning_rate": 0.0008466843321103411, "loss": 3.3641, "step": 18060 }, { "epoch": 1.227408615300992, "grad_norm": 1.8954845666885376, "learning_rate": 0.0008466418671015084, "loss": 3.5561, "step": 18065 }, { "epoch": 1.2277483353716538, "grad_norm": 2.2595932483673096, "learning_rate": 0.0008465994020926756, "loss": 3.6886, "step": 18070 }, { "epoch": 1.2280880554423155, "grad_norm": 1.8587950468063354, "learning_rate": 0.000846556937083843, "loss": 3.6734, "step": 18075 }, { "epoch": 1.2284277755129773, "grad_norm": 1.969395637512207, "learning_rate": 0.0008465144720750102, "loss": 3.4814, "step": 18080 }, { "epoch": 1.228767495583639, "grad_norm": 2.275649309158325, "learning_rate": 0.0008464720070661774, "loss": 3.4867, "step": 18085 }, { "epoch": 1.2291072156543008, "grad_norm": 2.3850958347320557, "learning_rate": 0.0008464295420573448, "loss": 3.6083, "step": 18090 }, { "epoch": 1.2294469357249627, "grad_norm": 2.163510799407959, "learning_rate": 0.000846387077048512, "loss": 3.5034, "step": 18095 }, { "epoch": 1.2297866557956243, "grad_norm": 1.659718632698059, "learning_rate": 0.0008463446120396793, "loss": 3.7573, "step": 18100 }, { "epoch": 1.2301263758662861, "grad_norm": 1.6920853853225708, "learning_rate": 0.0008463021470308466, "loss": 3.6488, "step": 18105 }, { "epoch": 1.230466095936948, "grad_norm": 1.7880595922470093, "learning_rate": 0.0008462596820220139, "loss": 3.5, "step": 18110 }, { "epoch": 1.2308058160076096, "grad_norm": 1.78448486328125, "learning_rate": 0.0008462172170131811, "loss": 3.5937, "step": 18115 }, { "epoch": 1.2311455360782715, "grad_norm": 1.8554399013519287, "learning_rate": 0.0008461747520043484, "loss": 3.7581, "step": 18120 }, { "epoch": 1.2314852561489333, "grad_norm": 1.9160727262496948, "learning_rate": 0.0008461322869955157, "loss": 3.4902, "step": 18125 }, { "epoch": 1.231824976219595, "grad_norm": 1.775938630104065, "learning_rate": 0.0008460898219866829, "loss": 3.4497, "step": 18130 }, { "epoch": 1.2321646962902568, "grad_norm": 1.9522027969360352, "learning_rate": 0.0008460473569778503, "loss": 3.5531, "step": 18135 }, { "epoch": 1.2325044163609187, "grad_norm": 2.1863508224487305, "learning_rate": 0.0008460048919690176, "loss": 3.5592, "step": 18140 }, { "epoch": 1.2328441364315803, "grad_norm": 2.612517833709717, "learning_rate": 0.0008459624269601848, "loss": 3.5501, "step": 18145 }, { "epoch": 1.2331838565022422, "grad_norm": 2.0531420707702637, "learning_rate": 0.0008459199619513521, "loss": 3.564, "step": 18150 }, { "epoch": 1.233523576572904, "grad_norm": 1.6712645292282104, "learning_rate": 0.0008458774969425193, "loss": 3.4285, "step": 18155 }, { "epoch": 1.2338632966435656, "grad_norm": 2.518333673477173, "learning_rate": 0.0008458350319336866, "loss": 3.5164, "step": 18160 }, { "epoch": 1.2342030167142275, "grad_norm": 2.647038698196411, "learning_rate": 0.000845792566924854, "loss": 3.3697, "step": 18165 }, { "epoch": 1.2345427367848893, "grad_norm": 2.4556610584259033, "learning_rate": 0.0008457501019160212, "loss": 3.5352, "step": 18170 }, { "epoch": 1.234882456855551, "grad_norm": 2.257113456726074, "learning_rate": 0.0008457076369071886, "loss": 3.4952, "step": 18175 }, { "epoch": 1.2352221769262128, "grad_norm": 2.196476459503174, "learning_rate": 0.0008456651718983558, "loss": 3.5115, "step": 18180 }, { "epoch": 1.2355618969968747, "grad_norm": 2.567445755004883, "learning_rate": 0.000845622706889523, "loss": 3.597, "step": 18185 }, { "epoch": 1.2359016170675363, "grad_norm": 2.0581212043762207, "learning_rate": 0.0008455802418806904, "loss": 3.5246, "step": 18190 }, { "epoch": 1.2362413371381982, "grad_norm": 1.8779634237289429, "learning_rate": 0.0008455377768718576, "loss": 3.4649, "step": 18195 }, { "epoch": 1.23658105720886, "grad_norm": 1.9267122745513916, "learning_rate": 0.0008454953118630249, "loss": 3.5265, "step": 18200 }, { "epoch": 1.2369207772795217, "grad_norm": 2.191026449203491, "learning_rate": 0.0008454528468541922, "loss": 3.4514, "step": 18205 }, { "epoch": 1.2372604973501835, "grad_norm": 2.4866089820861816, "learning_rate": 0.0008454103818453595, "loss": 3.5054, "step": 18210 }, { "epoch": 1.2376002174208451, "grad_norm": 1.6975446939468384, "learning_rate": 0.0008453679168365267, "loss": 3.5226, "step": 18215 }, { "epoch": 1.237939937491507, "grad_norm": 2.053661346435547, "learning_rate": 0.000845325451827694, "loss": 3.298, "step": 18220 }, { "epoch": 1.2382796575621688, "grad_norm": 2.1667163372039795, "learning_rate": 0.0008452829868188613, "loss": 3.8337, "step": 18225 }, { "epoch": 1.2386193776328305, "grad_norm": 2.273313045501709, "learning_rate": 0.0008452405218100285, "loss": 3.639, "step": 18230 }, { "epoch": 1.2389590977034923, "grad_norm": 2.2435824871063232, "learning_rate": 0.0008451980568011959, "loss": 3.6236, "step": 18235 }, { "epoch": 1.2392988177741542, "grad_norm": 1.4925525188446045, "learning_rate": 0.0008451555917923632, "loss": 3.4404, "step": 18240 }, { "epoch": 1.2396385378448158, "grad_norm": 2.728323459625244, "learning_rate": 0.0008451131267835304, "loss": 3.6241, "step": 18245 }, { "epoch": 1.2399782579154777, "grad_norm": 1.6669790744781494, "learning_rate": 0.0008450706617746977, "loss": 3.5772, "step": 18250 }, { "epoch": 1.2403179779861393, "grad_norm": 1.9545239210128784, "learning_rate": 0.0008450281967658649, "loss": 3.5335, "step": 18255 }, { "epoch": 1.2406576980568012, "grad_norm": 2.209352493286133, "learning_rate": 0.0008449857317570322, "loss": 3.4303, "step": 18260 }, { "epoch": 1.240997418127463, "grad_norm": 1.927733063697815, "learning_rate": 0.0008449432667481995, "loss": 3.7595, "step": 18265 }, { "epoch": 1.2413371381981246, "grad_norm": 2.094484329223633, "learning_rate": 0.0008449008017393668, "loss": 3.2289, "step": 18270 }, { "epoch": 1.2416768582687865, "grad_norm": 2.0396482944488525, "learning_rate": 0.0008448583367305341, "loss": 3.4208, "step": 18275 }, { "epoch": 1.2420165783394483, "grad_norm": 1.8606765270233154, "learning_rate": 0.0008448158717217014, "loss": 3.647, "step": 18280 }, { "epoch": 1.24235629841011, "grad_norm": 1.8940743207931519, "learning_rate": 0.0008447734067128686, "loss": 3.8265, "step": 18285 }, { "epoch": 1.2426960184807718, "grad_norm": 2.198092222213745, "learning_rate": 0.0008447309417040358, "loss": 3.5627, "step": 18290 }, { "epoch": 1.2430357385514337, "grad_norm": 1.9646661281585693, "learning_rate": 0.0008446884766952032, "loss": 3.491, "step": 18295 }, { "epoch": 1.2433754586220953, "grad_norm": 2.4234869480133057, "learning_rate": 0.0008446460116863704, "loss": 3.7359, "step": 18300 }, { "epoch": 1.2437151786927572, "grad_norm": 1.956574559211731, "learning_rate": 0.0008446035466775377, "loss": 3.6657, "step": 18305 }, { "epoch": 1.244054898763419, "grad_norm": 2.135592222213745, "learning_rate": 0.0008445610816687051, "loss": 3.7755, "step": 18310 }, { "epoch": 1.2443946188340806, "grad_norm": 2.0779008865356445, "learning_rate": 0.0008445186166598723, "loss": 3.674, "step": 18315 }, { "epoch": 1.2447343389047425, "grad_norm": 2.1796185970306396, "learning_rate": 0.0008444761516510395, "loss": 3.5909, "step": 18320 }, { "epoch": 1.2450740589754044, "grad_norm": 2.035565137863159, "learning_rate": 0.0008444336866422069, "loss": 3.3911, "step": 18325 }, { "epoch": 1.245413779046066, "grad_norm": 2.093289613723755, "learning_rate": 0.0008443912216333741, "loss": 3.3942, "step": 18330 }, { "epoch": 1.2457534991167278, "grad_norm": 2.4353933334350586, "learning_rate": 0.0008443487566245413, "loss": 3.543, "step": 18335 }, { "epoch": 1.2460932191873897, "grad_norm": 2.152371406555176, "learning_rate": 0.0008443062916157088, "loss": 3.5594, "step": 18340 }, { "epoch": 1.2464329392580513, "grad_norm": 1.86212158203125, "learning_rate": 0.000844263826606876, "loss": 3.6073, "step": 18345 }, { "epoch": 1.2467726593287132, "grad_norm": 1.9581011533737183, "learning_rate": 0.0008442213615980432, "loss": 3.4297, "step": 18350 }, { "epoch": 1.247112379399375, "grad_norm": 1.6012276411056519, "learning_rate": 0.0008441788965892105, "loss": 3.7615, "step": 18355 }, { "epoch": 1.2474520994700367, "grad_norm": 1.5564689636230469, "learning_rate": 0.0008441364315803778, "loss": 3.6864, "step": 18360 }, { "epoch": 1.2477918195406985, "grad_norm": 1.3971658945083618, "learning_rate": 0.000844093966571545, "loss": 3.5737, "step": 18365 }, { "epoch": 1.2481315396113604, "grad_norm": 2.0731894969940186, "learning_rate": 0.0008440515015627123, "loss": 3.4049, "step": 18370 }, { "epoch": 1.248471259682022, "grad_norm": 1.6889256238937378, "learning_rate": 0.0008440090365538797, "loss": 3.7573, "step": 18375 }, { "epoch": 1.2488109797526838, "grad_norm": 2.034219741821289, "learning_rate": 0.0008439665715450469, "loss": 3.2416, "step": 18380 }, { "epoch": 1.2491506998233455, "grad_norm": 1.753332257270813, "learning_rate": 0.0008439241065362142, "loss": 3.8665, "step": 18385 }, { "epoch": 1.2494904198940073, "grad_norm": 2.115269660949707, "learning_rate": 0.0008438816415273814, "loss": 3.4496, "step": 18390 }, { "epoch": 1.2498301399646692, "grad_norm": 1.7260210514068604, "learning_rate": 0.0008438391765185487, "loss": 3.5459, "step": 18395 }, { "epoch": 1.2501698600353308, "grad_norm": 1.6431113481521606, "learning_rate": 0.000843796711509716, "loss": 3.6894, "step": 18400 }, { "epoch": 1.2505095801059927, "grad_norm": 1.933716893196106, "learning_rate": 0.0008437542465008832, "loss": 3.7215, "step": 18405 }, { "epoch": 1.2508493001766543, "grad_norm": 1.7394294738769531, "learning_rate": 0.0008437117814920506, "loss": 3.5925, "step": 18410 }, { "epoch": 1.2511890202473162, "grad_norm": 1.9347469806671143, "learning_rate": 0.0008436693164832179, "loss": 3.2387, "step": 18415 }, { "epoch": 1.251528740317978, "grad_norm": 2.093820095062256, "learning_rate": 0.0008436268514743851, "loss": 3.4322, "step": 18420 }, { "epoch": 1.2518684603886396, "grad_norm": 2.1356751918792725, "learning_rate": 0.0008435843864655524, "loss": 3.5935, "step": 18425 }, { "epoch": 1.2522081804593015, "grad_norm": 2.0836479663848877, "learning_rate": 0.0008435419214567197, "loss": 3.5102, "step": 18430 }, { "epoch": 1.2525479005299633, "grad_norm": 2.063568353652954, "learning_rate": 0.0008434994564478869, "loss": 3.6943, "step": 18435 }, { "epoch": 1.252887620600625, "grad_norm": 1.977246642112732, "learning_rate": 0.0008434569914390541, "loss": 3.661, "step": 18440 }, { "epoch": 1.2532273406712868, "grad_norm": 2.254812479019165, "learning_rate": 0.0008434145264302216, "loss": 3.5142, "step": 18445 }, { "epoch": 1.2535670607419487, "grad_norm": 1.7498053312301636, "learning_rate": 0.0008433720614213888, "loss": 3.6402, "step": 18450 }, { "epoch": 1.2539067808126103, "grad_norm": 1.6477025747299194, "learning_rate": 0.000843329596412556, "loss": 3.4193, "step": 18455 }, { "epoch": 1.2542465008832722, "grad_norm": 1.9956412315368652, "learning_rate": 0.0008432871314037234, "loss": 3.6768, "step": 18460 }, { "epoch": 1.254586220953934, "grad_norm": 2.1354191303253174, "learning_rate": 0.0008432446663948906, "loss": 3.4687, "step": 18465 }, { "epoch": 1.2549259410245956, "grad_norm": 2.273434638977051, "learning_rate": 0.0008432022013860578, "loss": 3.2146, "step": 18470 }, { "epoch": 1.2552656610952575, "grad_norm": 2.221771717071533, "learning_rate": 0.0008431597363772252, "loss": 3.5414, "step": 18475 }, { "epoch": 1.2556053811659194, "grad_norm": 2.274744749069214, "learning_rate": 0.0008431172713683925, "loss": 3.4625, "step": 18480 }, { "epoch": 1.255945101236581, "grad_norm": 2.022540330886841, "learning_rate": 0.0008430748063595597, "loss": 3.3733, "step": 18485 }, { "epoch": 1.2562848213072428, "grad_norm": 2.370293378829956, "learning_rate": 0.000843032341350727, "loss": 3.628, "step": 18490 }, { "epoch": 1.2566245413779047, "grad_norm": 2.1271896362304688, "learning_rate": 0.0008429898763418943, "loss": 3.581, "step": 18495 }, { "epoch": 1.2569642614485663, "grad_norm": 2.1044018268585205, "learning_rate": 0.0008429474113330615, "loss": 3.4438, "step": 18500 }, { "epoch": 1.2573039815192282, "grad_norm": 1.8321641683578491, "learning_rate": 0.0008429049463242288, "loss": 3.5394, "step": 18505 }, { "epoch": 1.25764370158989, "grad_norm": 2.7527711391448975, "learning_rate": 0.0008428624813153961, "loss": 3.5522, "step": 18510 }, { "epoch": 1.2579834216605517, "grad_norm": 2.481262445449829, "learning_rate": 0.0008428200163065635, "loss": 3.7441, "step": 18515 }, { "epoch": 1.2583231417312135, "grad_norm": 2.711437225341797, "learning_rate": 0.0008427775512977307, "loss": 3.6165, "step": 18520 }, { "epoch": 1.2586628618018754, "grad_norm": 1.9025455713272095, "learning_rate": 0.000842735086288898, "loss": 3.8169, "step": 18525 }, { "epoch": 1.259002581872537, "grad_norm": 1.7264020442962646, "learning_rate": 0.0008426926212800653, "loss": 3.6471, "step": 18530 }, { "epoch": 1.2593423019431988, "grad_norm": 1.913443684577942, "learning_rate": 0.0008426501562712325, "loss": 3.7627, "step": 18535 }, { "epoch": 1.2596820220138607, "grad_norm": 1.9993077516555786, "learning_rate": 0.0008426076912623997, "loss": 3.4727, "step": 18540 }, { "epoch": 1.2600217420845223, "grad_norm": 2.311701774597168, "learning_rate": 0.0008425652262535671, "loss": 3.8007, "step": 18545 }, { "epoch": 1.2603614621551842, "grad_norm": 1.7738229036331177, "learning_rate": 0.0008425227612447344, "loss": 3.8948, "step": 18550 }, { "epoch": 1.260701182225846, "grad_norm": 2.300686836242676, "learning_rate": 0.0008424802962359016, "loss": 3.4899, "step": 18555 }, { "epoch": 1.2610409022965077, "grad_norm": 1.7203505039215088, "learning_rate": 0.000842437831227069, "loss": 3.9628, "step": 18560 }, { "epoch": 1.2613806223671695, "grad_norm": 2.419499635696411, "learning_rate": 0.0008423953662182362, "loss": 3.6434, "step": 18565 }, { "epoch": 1.2617203424378312, "grad_norm": 1.9348256587982178, "learning_rate": 0.0008423529012094034, "loss": 3.8296, "step": 18570 }, { "epoch": 1.262060062508493, "grad_norm": 1.8900822401046753, "learning_rate": 0.0008423104362005708, "loss": 3.3762, "step": 18575 }, { "epoch": 1.2623997825791546, "grad_norm": 2.7749814987182617, "learning_rate": 0.000842267971191738, "loss": 3.4969, "step": 18580 }, { "epoch": 1.2627395026498165, "grad_norm": 2.8245906829833984, "learning_rate": 0.0008422255061829053, "loss": 3.6359, "step": 18585 }, { "epoch": 1.2630792227204783, "grad_norm": 2.1347155570983887, "learning_rate": 0.0008421830411740727, "loss": 3.7858, "step": 18590 }, { "epoch": 1.26341894279114, "grad_norm": 1.7671009302139282, "learning_rate": 0.0008421405761652399, "loss": 3.5827, "step": 18595 }, { "epoch": 1.2637586628618018, "grad_norm": 1.7505216598510742, "learning_rate": 0.0008420981111564071, "loss": 3.6999, "step": 18600 }, { "epoch": 1.2640983829324637, "grad_norm": 2.015866756439209, "learning_rate": 0.0008420556461475744, "loss": 3.556, "step": 18605 }, { "epoch": 1.2644381030031253, "grad_norm": 1.7525229454040527, "learning_rate": 0.0008420131811387417, "loss": 3.2767, "step": 18610 }, { "epoch": 1.2647778230737872, "grad_norm": 1.8651031255722046, "learning_rate": 0.0008419707161299089, "loss": 3.7018, "step": 18615 }, { "epoch": 1.265117543144449, "grad_norm": 1.9379000663757324, "learning_rate": 0.0008419282511210763, "loss": 3.8742, "step": 18620 }, { "epoch": 1.2654572632151107, "grad_norm": 2.417576789855957, "learning_rate": 0.0008418857861122436, "loss": 3.3849, "step": 18625 }, { "epoch": 1.2657969832857725, "grad_norm": 2.1129941940307617, "learning_rate": 0.0008418433211034108, "loss": 3.3773, "step": 18630 }, { "epoch": 1.2661367033564344, "grad_norm": 2.1544418334960938, "learning_rate": 0.0008418008560945781, "loss": 3.8436, "step": 18635 }, { "epoch": 1.266476423427096, "grad_norm": 1.7953035831451416, "learning_rate": 0.0008417583910857453, "loss": 3.2667, "step": 18640 }, { "epoch": 1.2668161434977578, "grad_norm": 2.181389570236206, "learning_rate": 0.0008417159260769126, "loss": 3.3519, "step": 18645 }, { "epoch": 1.2671558635684197, "grad_norm": 2.2931690216064453, "learning_rate": 0.00084167346106808, "loss": 3.5622, "step": 18650 }, { "epoch": 1.2674955836390813, "grad_norm": 1.582778811454773, "learning_rate": 0.0008416309960592472, "loss": 3.6652, "step": 18655 }, { "epoch": 1.2678353037097432, "grad_norm": 2.3667726516723633, "learning_rate": 0.0008415885310504145, "loss": 3.7373, "step": 18660 }, { "epoch": 1.268175023780405, "grad_norm": 2.1093292236328125, "learning_rate": 0.0008415460660415818, "loss": 3.7412, "step": 18665 }, { "epoch": 1.2685147438510667, "grad_norm": 2.809974431991577, "learning_rate": 0.000841503601032749, "loss": 3.4899, "step": 18670 }, { "epoch": 1.2688544639217285, "grad_norm": 1.9935122728347778, "learning_rate": 0.0008414611360239163, "loss": 3.4624, "step": 18675 }, { "epoch": 1.2691941839923904, "grad_norm": 2.0950334072113037, "learning_rate": 0.0008414186710150836, "loss": 3.3958, "step": 18680 }, { "epoch": 1.269533904063052, "grad_norm": 2.1276137828826904, "learning_rate": 0.0008413762060062509, "loss": 3.8104, "step": 18685 }, { "epoch": 1.2698736241337139, "grad_norm": 2.1431779861450195, "learning_rate": 0.0008413337409974181, "loss": 3.7839, "step": 18690 }, { "epoch": 1.2702133442043757, "grad_norm": 2.353989362716675, "learning_rate": 0.0008412912759885855, "loss": 3.5704, "step": 18695 }, { "epoch": 1.2705530642750373, "grad_norm": 2.0290660858154297, "learning_rate": 0.0008412488109797527, "loss": 3.7302, "step": 18700 }, { "epoch": 1.2708927843456992, "grad_norm": 1.6013556718826294, "learning_rate": 0.0008412063459709199, "loss": 3.5739, "step": 18705 }, { "epoch": 1.271232504416361, "grad_norm": 2.1157546043395996, "learning_rate": 0.0008411638809620873, "loss": 3.5942, "step": 18710 }, { "epoch": 1.2715722244870227, "grad_norm": 2.3693814277648926, "learning_rate": 0.0008411214159532545, "loss": 3.4998, "step": 18715 }, { "epoch": 1.2719119445576845, "grad_norm": 2.2092854976654053, "learning_rate": 0.0008410789509444218, "loss": 3.402, "step": 18720 }, { "epoch": 1.2722516646283464, "grad_norm": 1.7217273712158203, "learning_rate": 0.0008410364859355892, "loss": 3.6203, "step": 18725 }, { "epoch": 1.272591384699008, "grad_norm": 2.047572612762451, "learning_rate": 0.0008409940209267564, "loss": 3.4684, "step": 18730 }, { "epoch": 1.2729311047696699, "grad_norm": 1.6237516403198242, "learning_rate": 0.0008409515559179236, "loss": 3.6048, "step": 18735 }, { "epoch": 1.2732708248403315, "grad_norm": 1.7767105102539062, "learning_rate": 0.000840909090909091, "loss": 3.391, "step": 18740 }, { "epoch": 1.2736105449109933, "grad_norm": 2.2614033222198486, "learning_rate": 0.0008408666259002582, "loss": 3.4386, "step": 18745 }, { "epoch": 1.273950264981655, "grad_norm": 2.2419188022613525, "learning_rate": 0.0008408241608914254, "loss": 3.6417, "step": 18750 }, { "epoch": 1.2742899850523168, "grad_norm": 2.7467846870422363, "learning_rate": 0.0008407816958825928, "loss": 3.7596, "step": 18755 }, { "epoch": 1.2746297051229787, "grad_norm": 1.9540339708328247, "learning_rate": 0.0008407392308737601, "loss": 3.5638, "step": 18760 }, { "epoch": 1.2749694251936403, "grad_norm": 1.8364781141281128, "learning_rate": 0.0008406967658649273, "loss": 3.4215, "step": 18765 }, { "epoch": 1.2753091452643022, "grad_norm": 1.863296627998352, "learning_rate": 0.0008406543008560946, "loss": 3.4768, "step": 18770 }, { "epoch": 1.275648865334964, "grad_norm": 2.147495746612549, "learning_rate": 0.0008406118358472619, "loss": 3.5939, "step": 18775 }, { "epoch": 1.2759885854056257, "grad_norm": 1.6152207851409912, "learning_rate": 0.0008405693708384291, "loss": 3.7707, "step": 18780 }, { "epoch": 1.2763283054762875, "grad_norm": 2.2872867584228516, "learning_rate": 0.0008405269058295964, "loss": 3.6509, "step": 18785 }, { "epoch": 1.2766680255469494, "grad_norm": 1.960585594177246, "learning_rate": 0.0008404844408207637, "loss": 3.7402, "step": 18790 }, { "epoch": 1.277007745617611, "grad_norm": 1.7754727602005005, "learning_rate": 0.000840441975811931, "loss": 3.6564, "step": 18795 }, { "epoch": 1.2773474656882728, "grad_norm": 1.9831069707870483, "learning_rate": 0.0008403995108030983, "loss": 3.4867, "step": 18800 }, { "epoch": 1.2776871857589347, "grad_norm": 1.7566406726837158, "learning_rate": 0.0008403570457942655, "loss": 3.5926, "step": 18805 }, { "epoch": 1.2780269058295963, "grad_norm": 1.6287628412246704, "learning_rate": 0.0008403145807854328, "loss": 3.5953, "step": 18810 }, { "epoch": 1.2783666259002582, "grad_norm": 1.989606499671936, "learning_rate": 0.0008402721157766001, "loss": 3.6077, "step": 18815 }, { "epoch": 1.27870634597092, "grad_norm": 2.445011854171753, "learning_rate": 0.0008402296507677673, "loss": 3.6878, "step": 18820 }, { "epoch": 1.2790460660415817, "grad_norm": 1.7821341753005981, "learning_rate": 0.0008401871857589347, "loss": 3.6122, "step": 18825 }, { "epoch": 1.2793857861122435, "grad_norm": 2.012007474899292, "learning_rate": 0.000840144720750102, "loss": 3.5623, "step": 18830 }, { "epoch": 1.2797255061829054, "grad_norm": 2.254720687866211, "learning_rate": 0.0008401022557412692, "loss": 3.8546, "step": 18835 }, { "epoch": 1.280065226253567, "grad_norm": 2.0982306003570557, "learning_rate": 0.0008400597907324364, "loss": 3.6249, "step": 18840 }, { "epoch": 1.2804049463242289, "grad_norm": 1.9664418697357178, "learning_rate": 0.0008400173257236038, "loss": 3.4832, "step": 18845 }, { "epoch": 1.2807446663948907, "grad_norm": 3.254751682281494, "learning_rate": 0.000839974860714771, "loss": 3.5094, "step": 18850 }, { "epoch": 1.2810843864655523, "grad_norm": 1.8193080425262451, "learning_rate": 0.0008399323957059383, "loss": 3.5362, "step": 18855 }, { "epoch": 1.2814241065362142, "grad_norm": 1.853629469871521, "learning_rate": 0.0008398899306971057, "loss": 3.5977, "step": 18860 }, { "epoch": 1.281763826606876, "grad_norm": 1.6645039319992065, "learning_rate": 0.0008398474656882729, "loss": 3.4174, "step": 18865 }, { "epoch": 1.2821035466775377, "grad_norm": 1.9145714044570923, "learning_rate": 0.0008398050006794402, "loss": 3.6927, "step": 18870 }, { "epoch": 1.2824432667481995, "grad_norm": 1.8449379205703735, "learning_rate": 0.0008397625356706075, "loss": 3.5534, "step": 18875 }, { "epoch": 1.2827829868188614, "grad_norm": 1.8996036052703857, "learning_rate": 0.0008397200706617747, "loss": 3.408, "step": 18880 }, { "epoch": 1.283122706889523, "grad_norm": 1.920233130455017, "learning_rate": 0.000839677605652942, "loss": 3.5998, "step": 18885 }, { "epoch": 1.2834624269601849, "grad_norm": 2.325377941131592, "learning_rate": 0.0008396351406441092, "loss": 3.3221, "step": 18890 }, { "epoch": 1.2838021470308467, "grad_norm": 1.841391682624817, "learning_rate": 0.0008395926756352766, "loss": 3.5622, "step": 18895 }, { "epoch": 1.2841418671015083, "grad_norm": 1.5820095539093018, "learning_rate": 0.0008395502106264439, "loss": 3.5512, "step": 18900 }, { "epoch": 1.2844815871721702, "grad_norm": 2.3460090160369873, "learning_rate": 0.0008395077456176111, "loss": 3.4852, "step": 18905 }, { "epoch": 1.2848213072428318, "grad_norm": 1.8157627582550049, "learning_rate": 0.0008394652806087784, "loss": 3.6925, "step": 18910 }, { "epoch": 1.2851610273134937, "grad_norm": 2.081144332885742, "learning_rate": 0.0008394228155999457, "loss": 3.2486, "step": 18915 }, { "epoch": 1.2855007473841553, "grad_norm": 1.8157294988632202, "learning_rate": 0.0008393803505911129, "loss": 3.6168, "step": 18920 }, { "epoch": 1.2858404674548172, "grad_norm": 1.7312568426132202, "learning_rate": 0.0008393378855822801, "loss": 3.494, "step": 18925 }, { "epoch": 1.286180187525479, "grad_norm": 1.840582013130188, "learning_rate": 0.0008392954205734476, "loss": 3.6168, "step": 18930 }, { "epoch": 1.2865199075961407, "grad_norm": 1.78810715675354, "learning_rate": 0.0008392529555646148, "loss": 3.382, "step": 18935 }, { "epoch": 1.2868596276668025, "grad_norm": 2.6507155895233154, "learning_rate": 0.000839210490555782, "loss": 3.6971, "step": 18940 }, { "epoch": 1.2871993477374644, "grad_norm": 2.163133144378662, "learning_rate": 0.0008391680255469494, "loss": 3.5859, "step": 18945 }, { "epoch": 1.287539067808126, "grad_norm": 1.8213719129562378, "learning_rate": 0.0008391255605381166, "loss": 3.794, "step": 18950 }, { "epoch": 1.2878787878787878, "grad_norm": 1.6655442714691162, "learning_rate": 0.0008390830955292838, "loss": 3.6548, "step": 18955 }, { "epoch": 1.2882185079494497, "grad_norm": 2.7064802646636963, "learning_rate": 0.0008390406305204512, "loss": 3.4228, "step": 18960 }, { "epoch": 1.2885582280201113, "grad_norm": 2.2788381576538086, "learning_rate": 0.0008389981655116185, "loss": 3.4888, "step": 18965 }, { "epoch": 1.2888979480907732, "grad_norm": 2.217463254928589, "learning_rate": 0.0008389557005027857, "loss": 3.6065, "step": 18970 }, { "epoch": 1.289237668161435, "grad_norm": 1.7810453176498413, "learning_rate": 0.0008389132354939531, "loss": 3.6471, "step": 18975 }, { "epoch": 1.2895773882320967, "grad_norm": 1.5850239992141724, "learning_rate": 0.0008388707704851203, "loss": 3.4851, "step": 18980 }, { "epoch": 1.2899171083027585, "grad_norm": 1.6858367919921875, "learning_rate": 0.0008388283054762875, "loss": 3.6831, "step": 18985 }, { "epoch": 1.2902568283734204, "grad_norm": 1.7316839694976807, "learning_rate": 0.0008387858404674548, "loss": 3.4341, "step": 18990 }, { "epoch": 1.290596548444082, "grad_norm": 2.7884984016418457, "learning_rate": 0.0008387433754586221, "loss": 3.9162, "step": 18995 }, { "epoch": 1.2909362685147439, "grad_norm": 1.699349284172058, "learning_rate": 0.0008387009104497894, "loss": 3.4428, "step": 19000 }, { "epoch": 1.2912759885854057, "grad_norm": 2.06885027885437, "learning_rate": 0.0008386584454409567, "loss": 3.4926, "step": 19005 }, { "epoch": 1.2916157086560673, "grad_norm": 1.8260194063186646, "learning_rate": 0.000838615980432124, "loss": 3.6184, "step": 19010 }, { "epoch": 1.2919554287267292, "grad_norm": 2.382791519165039, "learning_rate": 0.0008385735154232912, "loss": 3.6702, "step": 19015 }, { "epoch": 1.292295148797391, "grad_norm": 2.451507329940796, "learning_rate": 0.0008385310504144585, "loss": 3.6144, "step": 19020 }, { "epoch": 1.2926348688680527, "grad_norm": 1.8927676677703857, "learning_rate": 0.0008384885854056257, "loss": 3.6132, "step": 19025 }, { "epoch": 1.2929745889387145, "grad_norm": 2.379171133041382, "learning_rate": 0.000838446120396793, "loss": 3.6112, "step": 19030 }, { "epoch": 1.2933143090093764, "grad_norm": 2.018777370452881, "learning_rate": 0.0008384036553879604, "loss": 3.7667, "step": 19035 }, { "epoch": 1.293654029080038, "grad_norm": 1.8782713413238525, "learning_rate": 0.0008383611903791276, "loss": 3.5864, "step": 19040 }, { "epoch": 1.2939937491506999, "grad_norm": 2.0346357822418213, "learning_rate": 0.0008383187253702949, "loss": 3.645, "step": 19045 }, { "epoch": 1.2943334692213617, "grad_norm": 1.7764112949371338, "learning_rate": 0.0008382762603614622, "loss": 3.2901, "step": 19050 }, { "epoch": 1.2946731892920234, "grad_norm": 1.5759658813476562, "learning_rate": 0.0008382337953526294, "loss": 3.6649, "step": 19055 }, { "epoch": 1.2950129093626852, "grad_norm": 1.9455915689468384, "learning_rate": 0.0008381913303437967, "loss": 3.6538, "step": 19060 }, { "epoch": 1.295352629433347, "grad_norm": 1.807922601699829, "learning_rate": 0.000838148865334964, "loss": 3.3967, "step": 19065 }, { "epoch": 1.2956923495040087, "grad_norm": 2.0219125747680664, "learning_rate": 0.0008381064003261313, "loss": 3.3332, "step": 19070 }, { "epoch": 1.2960320695746705, "grad_norm": 2.6755354404449463, "learning_rate": 0.0008380639353172985, "loss": 3.4763, "step": 19075 }, { "epoch": 1.2963717896453322, "grad_norm": 2.3609299659729004, "learning_rate": 0.0008380214703084659, "loss": 3.8486, "step": 19080 }, { "epoch": 1.296711509715994, "grad_norm": 2.0612921714782715, "learning_rate": 0.0008379790052996331, "loss": 3.7558, "step": 19085 }, { "epoch": 1.2970512297866557, "grad_norm": 1.9279594421386719, "learning_rate": 0.0008379365402908003, "loss": 3.6282, "step": 19090 }, { "epoch": 1.2973909498573175, "grad_norm": 2.115755558013916, "learning_rate": 0.0008378940752819677, "loss": 3.4654, "step": 19095 }, { "epoch": 1.2977306699279794, "grad_norm": 2.5894477367401123, "learning_rate": 0.0008378516102731349, "loss": 3.5362, "step": 19100 }, { "epoch": 1.298070389998641, "grad_norm": 1.9002902507781982, "learning_rate": 0.0008378091452643022, "loss": 3.5387, "step": 19105 }, { "epoch": 1.2984101100693028, "grad_norm": 2.13779354095459, "learning_rate": 0.0008377666802554696, "loss": 3.6402, "step": 19110 }, { "epoch": 1.2987498301399647, "grad_norm": 3.0516037940979004, "learning_rate": 0.0008377242152466368, "loss": 3.5222, "step": 19115 }, { "epoch": 1.2990895502106263, "grad_norm": 1.7539101839065552, "learning_rate": 0.000837681750237804, "loss": 3.6601, "step": 19120 }, { "epoch": 1.2994292702812882, "grad_norm": 2.342068910598755, "learning_rate": 0.0008376392852289713, "loss": 3.6225, "step": 19125 }, { "epoch": 1.29976899035195, "grad_norm": 1.936541199684143, "learning_rate": 0.0008375968202201386, "loss": 3.6586, "step": 19130 }, { "epoch": 1.3001087104226117, "grad_norm": 2.4636154174804688, "learning_rate": 0.0008375543552113058, "loss": 3.468, "step": 19135 }, { "epoch": 1.3004484304932735, "grad_norm": 2.1807162761688232, "learning_rate": 0.0008375118902024732, "loss": 3.4689, "step": 19140 }, { "epoch": 1.3007881505639354, "grad_norm": 2.5020339488983154, "learning_rate": 0.0008374694251936405, "loss": 3.5582, "step": 19145 }, { "epoch": 1.301127870634597, "grad_norm": 1.5850512981414795, "learning_rate": 0.0008374269601848077, "loss": 3.6647, "step": 19150 }, { "epoch": 1.3014675907052589, "grad_norm": 1.7619774341583252, "learning_rate": 0.000837384495175975, "loss": 3.3917, "step": 19155 }, { "epoch": 1.3018073107759207, "grad_norm": 2.096238851547241, "learning_rate": 0.0008373420301671423, "loss": 3.6482, "step": 19160 }, { "epoch": 1.3021470308465823, "grad_norm": 2.8266682624816895, "learning_rate": 0.0008372995651583095, "loss": 3.5756, "step": 19165 }, { "epoch": 1.3024867509172442, "grad_norm": 2.174067497253418, "learning_rate": 0.0008372571001494768, "loss": 3.8887, "step": 19170 }, { "epoch": 1.302826470987906, "grad_norm": 1.946333646774292, "learning_rate": 0.0008372146351406442, "loss": 3.3846, "step": 19175 }, { "epoch": 1.3031661910585677, "grad_norm": 2.1738195419311523, "learning_rate": 0.0008371721701318114, "loss": 3.6226, "step": 19180 }, { "epoch": 1.3035059111292295, "grad_norm": 1.6683791875839233, "learning_rate": 0.0008371297051229787, "loss": 3.4806, "step": 19185 }, { "epoch": 1.3038456311998914, "grad_norm": 1.5524935722351074, "learning_rate": 0.0008370872401141459, "loss": 3.8215, "step": 19190 }, { "epoch": 1.304185351270553, "grad_norm": 1.6219203472137451, "learning_rate": 0.0008370447751053133, "loss": 3.4804, "step": 19195 }, { "epoch": 1.3045250713412149, "grad_norm": 1.954336166381836, "learning_rate": 0.0008370023100964805, "loss": 3.8542, "step": 19200 }, { "epoch": 1.3048647914118767, "grad_norm": 2.532778024673462, "learning_rate": 0.0008369598450876477, "loss": 3.5226, "step": 19205 }, { "epoch": 1.3052045114825384, "grad_norm": 1.6198649406433105, "learning_rate": 0.0008369173800788152, "loss": 3.5405, "step": 19210 }, { "epoch": 1.3055442315532002, "grad_norm": 1.8773008584976196, "learning_rate": 0.0008368749150699824, "loss": 3.7503, "step": 19215 }, { "epoch": 1.305883951623862, "grad_norm": 1.6642402410507202, "learning_rate": 0.0008368324500611496, "loss": 3.4338, "step": 19220 }, { "epoch": 1.3062236716945237, "grad_norm": 2.3245949745178223, "learning_rate": 0.000836789985052317, "loss": 3.75, "step": 19225 }, { "epoch": 1.3065633917651855, "grad_norm": 2.1943421363830566, "learning_rate": 0.0008367475200434842, "loss": 3.6106, "step": 19230 }, { "epoch": 1.3069031118358474, "grad_norm": 2.365039825439453, "learning_rate": 0.0008367050550346514, "loss": 3.3558, "step": 19235 }, { "epoch": 1.307242831906509, "grad_norm": 2.0912303924560547, "learning_rate": 0.0008366625900258188, "loss": 3.5272, "step": 19240 }, { "epoch": 1.3075825519771709, "grad_norm": 1.7988801002502441, "learning_rate": 0.0008366201250169861, "loss": 3.8412, "step": 19245 }, { "epoch": 1.3079222720478325, "grad_norm": 2.2441649436950684, "learning_rate": 0.0008365776600081533, "loss": 3.7784, "step": 19250 }, { "epoch": 1.3082619921184944, "grad_norm": 1.7128783464431763, "learning_rate": 0.0008365351949993206, "loss": 3.8614, "step": 19255 }, { "epoch": 1.308601712189156, "grad_norm": 2.22184157371521, "learning_rate": 0.0008364927299904879, "loss": 3.7149, "step": 19260 }, { "epoch": 1.3089414322598178, "grad_norm": 1.8911006450653076, "learning_rate": 0.0008364502649816551, "loss": 3.1559, "step": 19265 }, { "epoch": 1.3092811523304797, "grad_norm": 4.180904388427734, "learning_rate": 0.0008364077999728224, "loss": 3.5912, "step": 19270 }, { "epoch": 1.3096208724011413, "grad_norm": 1.7741758823394775, "learning_rate": 0.0008363653349639898, "loss": 3.4613, "step": 19275 }, { "epoch": 1.3099605924718032, "grad_norm": 1.8539209365844727, "learning_rate": 0.000836322869955157, "loss": 3.1626, "step": 19280 }, { "epoch": 1.310300312542465, "grad_norm": 1.8678840398788452, "learning_rate": 0.0008362804049463243, "loss": 3.5812, "step": 19285 }, { "epoch": 1.3106400326131267, "grad_norm": 1.5387846231460571, "learning_rate": 0.0008362379399374915, "loss": 3.4137, "step": 19290 }, { "epoch": 1.3109797526837885, "grad_norm": 1.775020718574524, "learning_rate": 0.0008361954749286588, "loss": 3.5404, "step": 19295 }, { "epoch": 1.3113194727544504, "grad_norm": 1.83073091506958, "learning_rate": 0.0008361530099198261, "loss": 3.6006, "step": 19300 }, { "epoch": 1.311659192825112, "grad_norm": 1.8228131532669067, "learning_rate": 0.0008361105449109933, "loss": 3.6725, "step": 19305 }, { "epoch": 1.3119989128957739, "grad_norm": 1.8461527824401855, "learning_rate": 0.0008360680799021607, "loss": 3.3415, "step": 19310 }, { "epoch": 1.3123386329664357, "grad_norm": 1.994187355041504, "learning_rate": 0.000836025614893328, "loss": 3.3787, "step": 19315 }, { "epoch": 1.3126783530370973, "grad_norm": 1.5153119564056396, "learning_rate": 0.0008359831498844952, "loss": 3.4508, "step": 19320 }, { "epoch": 1.3130180731077592, "grad_norm": 1.8336739540100098, "learning_rate": 0.0008359406848756624, "loss": 3.5974, "step": 19325 }, { "epoch": 1.313357793178421, "grad_norm": 2.375060558319092, "learning_rate": 0.0008358982198668298, "loss": 3.6432, "step": 19330 }, { "epoch": 1.3136975132490827, "grad_norm": 2.1082191467285156, "learning_rate": 0.000835855754857997, "loss": 3.5002, "step": 19335 }, { "epoch": 1.3140372333197445, "grad_norm": 2.6502368450164795, "learning_rate": 0.0008358132898491642, "loss": 3.678, "step": 19340 }, { "epoch": 1.3143769533904064, "grad_norm": 2.21297025680542, "learning_rate": 0.0008357708248403317, "loss": 3.5517, "step": 19345 }, { "epoch": 1.314716673461068, "grad_norm": 2.373430013656616, "learning_rate": 0.0008357283598314989, "loss": 3.574, "step": 19350 }, { "epoch": 1.3150563935317299, "grad_norm": 3.221149444580078, "learning_rate": 0.0008356858948226661, "loss": 3.4361, "step": 19355 }, { "epoch": 1.3153961136023917, "grad_norm": 2.34417986869812, "learning_rate": 0.0008356434298138335, "loss": 3.4611, "step": 19360 }, { "epoch": 1.3157358336730534, "grad_norm": 1.8805365562438965, "learning_rate": 0.0008356009648050007, "loss": 3.6509, "step": 19365 }, { "epoch": 1.3160755537437152, "grad_norm": 1.8390344381332397, "learning_rate": 0.0008355584997961679, "loss": 3.615, "step": 19370 }, { "epoch": 1.316415273814377, "grad_norm": 1.7285363674163818, "learning_rate": 0.0008355160347873352, "loss": 3.8192, "step": 19375 }, { "epoch": 1.3167549938850387, "grad_norm": 2.100813627243042, "learning_rate": 0.0008354735697785026, "loss": 3.5299, "step": 19380 }, { "epoch": 1.3170947139557005, "grad_norm": 2.137233257293701, "learning_rate": 0.0008354311047696698, "loss": 3.706, "step": 19385 }, { "epoch": 1.3174344340263624, "grad_norm": 2.310746669769287, "learning_rate": 0.0008353886397608371, "loss": 3.7246, "step": 19390 }, { "epoch": 1.317774154097024, "grad_norm": 2.969937324523926, "learning_rate": 0.0008353461747520044, "loss": 3.2786, "step": 19395 }, { "epoch": 1.3181138741676859, "grad_norm": 1.7650234699249268, "learning_rate": 0.0008353037097431716, "loss": 3.7009, "step": 19400 }, { "epoch": 1.3184535942383477, "grad_norm": 2.058828115463257, "learning_rate": 0.0008352612447343389, "loss": 3.4843, "step": 19405 }, { "epoch": 1.3187933143090094, "grad_norm": 1.8502538204193115, "learning_rate": 0.0008352187797255062, "loss": 3.6436, "step": 19410 }, { "epoch": 1.3191330343796712, "grad_norm": 2.29065203666687, "learning_rate": 0.0008351763147166735, "loss": 3.4618, "step": 19415 }, { "epoch": 1.3194727544503329, "grad_norm": 1.745789885520935, "learning_rate": 0.0008351338497078408, "loss": 3.8166, "step": 19420 }, { "epoch": 1.3198124745209947, "grad_norm": 1.5542020797729492, "learning_rate": 0.000835091384699008, "loss": 3.4765, "step": 19425 }, { "epoch": 1.3201521945916566, "grad_norm": 1.832241177558899, "learning_rate": 0.0008350489196901753, "loss": 3.6226, "step": 19430 }, { "epoch": 1.3204919146623182, "grad_norm": 1.826765775680542, "learning_rate": 0.0008350064546813426, "loss": 3.6089, "step": 19435 }, { "epoch": 1.32083163473298, "grad_norm": 2.0642449855804443, "learning_rate": 0.0008349639896725098, "loss": 3.503, "step": 19440 }, { "epoch": 1.3211713548036417, "grad_norm": 2.067537546157837, "learning_rate": 0.0008349215246636771, "loss": 3.5974, "step": 19445 }, { "epoch": 1.3215110748743035, "grad_norm": 2.6551005840301514, "learning_rate": 0.0008348790596548445, "loss": 3.5961, "step": 19450 }, { "epoch": 1.3218507949449654, "grad_norm": 2.0643136501312256, "learning_rate": 0.0008348365946460117, "loss": 3.7398, "step": 19455 }, { "epoch": 1.322190515015627, "grad_norm": 2.1541337966918945, "learning_rate": 0.000834794129637179, "loss": 3.8133, "step": 19460 }, { "epoch": 1.3225302350862889, "grad_norm": 1.6622823476791382, "learning_rate": 0.0008347516646283463, "loss": 3.45, "step": 19465 }, { "epoch": 1.3228699551569507, "grad_norm": 2.452615737915039, "learning_rate": 0.0008347091996195135, "loss": 3.6226, "step": 19470 }, { "epoch": 1.3232096752276123, "grad_norm": 2.2921643257141113, "learning_rate": 0.0008346667346106807, "loss": 3.6063, "step": 19475 }, { "epoch": 1.3235493952982742, "grad_norm": 2.0741517543792725, "learning_rate": 0.0008346242696018481, "loss": 3.579, "step": 19480 }, { "epoch": 1.323889115368936, "grad_norm": 2.095653772354126, "learning_rate": 0.0008345818045930154, "loss": 3.6033, "step": 19485 }, { "epoch": 1.3242288354395977, "grad_norm": 1.7539538145065308, "learning_rate": 0.0008345393395841826, "loss": 3.3329, "step": 19490 }, { "epoch": 1.3245685555102595, "grad_norm": 2.0705220699310303, "learning_rate": 0.00083449687457535, "loss": 3.6836, "step": 19495 }, { "epoch": 1.3249082755809214, "grad_norm": 1.6905392408370972, "learning_rate": 0.0008344544095665172, "loss": 3.5903, "step": 19500 }, { "epoch": 1.325247995651583, "grad_norm": 2.4025309085845947, "learning_rate": 0.0008344119445576844, "loss": 3.3934, "step": 19505 }, { "epoch": 1.3255877157222449, "grad_norm": 2.870762825012207, "learning_rate": 0.0008343694795488518, "loss": 3.6016, "step": 19510 }, { "epoch": 1.3259274357929067, "grad_norm": 2.2053585052490234, "learning_rate": 0.000834327014540019, "loss": 3.671, "step": 19515 }, { "epoch": 1.3262671558635684, "grad_norm": 1.833606243133545, "learning_rate": 0.0008342845495311863, "loss": 3.4494, "step": 19520 }, { "epoch": 1.3266068759342302, "grad_norm": 1.8163456916809082, "learning_rate": 0.0008342420845223536, "loss": 3.5527, "step": 19525 }, { "epoch": 1.326946596004892, "grad_norm": 2.223837375640869, "learning_rate": 0.0008341996195135209, "loss": 3.3322, "step": 19530 }, { "epoch": 1.3272863160755537, "grad_norm": 2.132762908935547, "learning_rate": 0.0008341571545046882, "loss": 3.6072, "step": 19535 }, { "epoch": 1.3276260361462155, "grad_norm": 2.1112687587738037, "learning_rate": 0.0008341146894958554, "loss": 3.7059, "step": 19540 }, { "epoch": 1.3279657562168774, "grad_norm": 1.5702879428863525, "learning_rate": 0.0008340722244870227, "loss": 3.519, "step": 19545 }, { "epoch": 1.328305476287539, "grad_norm": 1.8381266593933105, "learning_rate": 0.00083402975947819, "loss": 3.5004, "step": 19550 }, { "epoch": 1.3286451963582009, "grad_norm": 2.3282954692840576, "learning_rate": 0.0008339872944693573, "loss": 3.6841, "step": 19555 }, { "epoch": 1.3289849164288627, "grad_norm": 1.7930258512496948, "learning_rate": 0.0008339448294605246, "loss": 3.6451, "step": 19560 }, { "epoch": 1.3293246364995244, "grad_norm": 2.835195779800415, "learning_rate": 0.0008339023644516919, "loss": 3.519, "step": 19565 }, { "epoch": 1.3296643565701862, "grad_norm": 2.153215169906616, "learning_rate": 0.0008338598994428591, "loss": 3.5114, "step": 19570 }, { "epoch": 1.330004076640848, "grad_norm": 2.066591501235962, "learning_rate": 0.0008338174344340263, "loss": 3.6984, "step": 19575 }, { "epoch": 1.3303437967115097, "grad_norm": 1.8564468622207642, "learning_rate": 0.0008337749694251937, "loss": 3.5126, "step": 19580 }, { "epoch": 1.3306835167821716, "grad_norm": 1.9111720323562622, "learning_rate": 0.0008337325044163609, "loss": 3.582, "step": 19585 }, { "epoch": 1.3310232368528332, "grad_norm": 2.1732337474823, "learning_rate": 0.0008336900394075282, "loss": 3.4919, "step": 19590 }, { "epoch": 1.331362956923495, "grad_norm": 1.7800078392028809, "learning_rate": 0.0008336475743986956, "loss": 3.661, "step": 19595 }, { "epoch": 1.331702676994157, "grad_norm": 2.140064001083374, "learning_rate": 0.0008336051093898628, "loss": 3.6945, "step": 19600 }, { "epoch": 1.3320423970648185, "grad_norm": 1.9722626209259033, "learning_rate": 0.00083356264438103, "loss": 3.5904, "step": 19605 }, { "epoch": 1.3323821171354804, "grad_norm": 2.631481647491455, "learning_rate": 0.0008335201793721974, "loss": 3.4856, "step": 19610 }, { "epoch": 1.332721837206142, "grad_norm": 2.495619297027588, "learning_rate": 0.0008334777143633646, "loss": 3.3754, "step": 19615 }, { "epoch": 1.3330615572768039, "grad_norm": 1.9975210428237915, "learning_rate": 0.0008334352493545318, "loss": 3.9044, "step": 19620 }, { "epoch": 1.3334012773474657, "grad_norm": 1.9953454732894897, "learning_rate": 0.0008333927843456992, "loss": 3.5456, "step": 19625 }, { "epoch": 1.3337409974181273, "grad_norm": 2.3745808601379395, "learning_rate": 0.0008333503193368665, "loss": 3.9877, "step": 19630 }, { "epoch": 1.3340807174887892, "grad_norm": 2.205284357070923, "learning_rate": 0.0008333078543280337, "loss": 3.6192, "step": 19635 }, { "epoch": 1.334420437559451, "grad_norm": 2.1649866104125977, "learning_rate": 0.000833265389319201, "loss": 3.512, "step": 19640 }, { "epoch": 1.3347601576301127, "grad_norm": 1.6806868314743042, "learning_rate": 0.0008332229243103683, "loss": 3.4513, "step": 19645 }, { "epoch": 1.3350998777007745, "grad_norm": 2.7802281379699707, "learning_rate": 0.0008331804593015355, "loss": 3.3987, "step": 19650 }, { "epoch": 1.3354395977714364, "grad_norm": 1.7863218784332275, "learning_rate": 0.0008331379942927028, "loss": 3.4042, "step": 19655 }, { "epoch": 1.335779317842098, "grad_norm": 2.0785152912139893, "learning_rate": 0.0008330955292838702, "loss": 3.6537, "step": 19660 }, { "epoch": 1.3361190379127599, "grad_norm": 2.1642701625823975, "learning_rate": 0.0008330530642750374, "loss": 3.7675, "step": 19665 }, { "epoch": 1.3364587579834217, "grad_norm": 1.9585217237472534, "learning_rate": 0.0008330105992662047, "loss": 3.6959, "step": 19670 }, { "epoch": 1.3367984780540834, "grad_norm": 2.095262050628662, "learning_rate": 0.0008329681342573719, "loss": 3.6967, "step": 19675 }, { "epoch": 1.3371381981247452, "grad_norm": 2.372100591659546, "learning_rate": 0.0008329256692485392, "loss": 3.6356, "step": 19680 }, { "epoch": 1.337477918195407, "grad_norm": 2.0528650283813477, "learning_rate": 0.0008328832042397065, "loss": 3.7103, "step": 19685 }, { "epoch": 1.3378176382660687, "grad_norm": 2.291727066040039, "learning_rate": 0.0008328407392308737, "loss": 3.5397, "step": 19690 }, { "epoch": 1.3381573583367306, "grad_norm": 1.6101839542388916, "learning_rate": 0.0008327982742220411, "loss": 3.6182, "step": 19695 }, { "epoch": 1.3384970784073924, "grad_norm": 2.237630605697632, "learning_rate": 0.0008327558092132084, "loss": 3.4659, "step": 19700 }, { "epoch": 1.338836798478054, "grad_norm": 1.9802354574203491, "learning_rate": 0.0008327133442043756, "loss": 3.5724, "step": 19705 }, { "epoch": 1.3391765185487159, "grad_norm": 2.6429266929626465, "learning_rate": 0.0008326708791955428, "loss": 3.4882, "step": 19710 }, { "epoch": 1.3395162386193777, "grad_norm": 1.8089629411697388, "learning_rate": 0.0008326284141867102, "loss": 3.3756, "step": 19715 }, { "epoch": 1.3398559586900394, "grad_norm": 2.5463311672210693, "learning_rate": 0.0008325859491778774, "loss": 3.6801, "step": 19720 }, { "epoch": 1.3401956787607012, "grad_norm": 1.7877259254455566, "learning_rate": 0.0008325434841690446, "loss": 3.5877, "step": 19725 }, { "epoch": 1.340535398831363, "grad_norm": 1.819934368133545, "learning_rate": 0.0008325010191602121, "loss": 3.5764, "step": 19730 }, { "epoch": 1.3408751189020247, "grad_norm": 1.7436866760253906, "learning_rate": 0.0008324585541513793, "loss": 3.5629, "step": 19735 }, { "epoch": 1.3412148389726866, "grad_norm": 1.8934667110443115, "learning_rate": 0.0008324160891425465, "loss": 3.2623, "step": 19740 }, { "epoch": 1.3415545590433484, "grad_norm": 1.814817190170288, "learning_rate": 0.0008323736241337139, "loss": 3.6435, "step": 19745 }, { "epoch": 1.34189427911401, "grad_norm": 2.388089895248413, "learning_rate": 0.0008323311591248811, "loss": 3.451, "step": 19750 }, { "epoch": 1.342233999184672, "grad_norm": 1.8918569087982178, "learning_rate": 0.0008322886941160483, "loss": 3.6207, "step": 19755 }, { "epoch": 1.3425737192553335, "grad_norm": 2.1948370933532715, "learning_rate": 0.0008322462291072156, "loss": 3.4585, "step": 19760 }, { "epoch": 1.3429134393259954, "grad_norm": 2.306986093521118, "learning_rate": 0.000832203764098383, "loss": 3.4004, "step": 19765 }, { "epoch": 1.3432531593966572, "grad_norm": 3.0345704555511475, "learning_rate": 0.0008321612990895502, "loss": 3.5772, "step": 19770 }, { "epoch": 1.3435928794673189, "grad_norm": 2.968822956085205, "learning_rate": 0.0008321188340807175, "loss": 3.4336, "step": 19775 }, { "epoch": 1.3439325995379807, "grad_norm": 1.6871464252471924, "learning_rate": 0.0008320763690718848, "loss": 3.7263, "step": 19780 }, { "epoch": 1.3442723196086424, "grad_norm": 2.0155088901519775, "learning_rate": 0.000832033904063052, "loss": 3.617, "step": 19785 }, { "epoch": 1.3446120396793042, "grad_norm": 1.6332167387008667, "learning_rate": 0.0008319914390542193, "loss": 3.6896, "step": 19790 }, { "epoch": 1.344951759749966, "grad_norm": 1.7423027753829956, "learning_rate": 0.0008319489740453866, "loss": 3.6803, "step": 19795 }, { "epoch": 1.3452914798206277, "grad_norm": 2.360480785369873, "learning_rate": 0.0008319065090365539, "loss": 3.5629, "step": 19800 }, { "epoch": 1.3456311998912895, "grad_norm": 1.7991294860839844, "learning_rate": 0.0008318640440277212, "loss": 3.6077, "step": 19805 }, { "epoch": 1.3459709199619514, "grad_norm": 1.6581833362579346, "learning_rate": 0.0008318215790188884, "loss": 3.2868, "step": 19810 }, { "epoch": 1.346310640032613, "grad_norm": 2.621304512023926, "learning_rate": 0.0008317791140100557, "loss": 3.5495, "step": 19815 }, { "epoch": 1.3466503601032749, "grad_norm": 1.8793895244598389, "learning_rate": 0.000831736649001223, "loss": 3.4794, "step": 19820 }, { "epoch": 1.3469900801739367, "grad_norm": 1.666784644126892, "learning_rate": 0.0008316941839923902, "loss": 3.5162, "step": 19825 }, { "epoch": 1.3473298002445984, "grad_norm": 1.73686683177948, "learning_rate": 0.0008316517189835575, "loss": 3.5299, "step": 19830 }, { "epoch": 1.3476695203152602, "grad_norm": 1.548329472541809, "learning_rate": 0.0008316092539747249, "loss": 3.7029, "step": 19835 }, { "epoch": 1.348009240385922, "grad_norm": 1.8178467750549316, "learning_rate": 0.0008315667889658921, "loss": 3.6003, "step": 19840 }, { "epoch": 1.3483489604565837, "grad_norm": 2.005305767059326, "learning_rate": 0.0008315243239570594, "loss": 3.4491, "step": 19845 }, { "epoch": 1.3486886805272456, "grad_norm": 2.300609827041626, "learning_rate": 0.0008314818589482267, "loss": 3.7041, "step": 19850 }, { "epoch": 1.3490284005979074, "grad_norm": 1.4848177433013916, "learning_rate": 0.0008314393939393939, "loss": 3.5877, "step": 19855 }, { "epoch": 1.349368120668569, "grad_norm": 2.283228874206543, "learning_rate": 0.0008313969289305611, "loss": 3.4873, "step": 19860 }, { "epoch": 1.349707840739231, "grad_norm": 2.1736559867858887, "learning_rate": 0.0008313544639217286, "loss": 3.7034, "step": 19865 }, { "epoch": 1.3500475608098927, "grad_norm": 1.8157055377960205, "learning_rate": 0.0008313119989128958, "loss": 3.669, "step": 19870 }, { "epoch": 1.3503872808805544, "grad_norm": 2.3193278312683105, "learning_rate": 0.0008312695339040631, "loss": 3.8503, "step": 19875 }, { "epoch": 1.3507270009512162, "grad_norm": 1.9658929109573364, "learning_rate": 0.0008312270688952304, "loss": 3.7606, "step": 19880 }, { "epoch": 1.351066721021878, "grad_norm": 1.7761822938919067, "learning_rate": 0.0008311846038863976, "loss": 3.6777, "step": 19885 }, { "epoch": 1.3514064410925397, "grad_norm": 2.1533923149108887, "learning_rate": 0.0008311421388775649, "loss": 3.542, "step": 19890 }, { "epoch": 1.3517461611632016, "grad_norm": 1.7952332496643066, "learning_rate": 0.0008310996738687322, "loss": 3.5867, "step": 19895 }, { "epoch": 1.3520858812338634, "grad_norm": 2.3196277618408203, "learning_rate": 0.0008310572088598995, "loss": 3.4961, "step": 19900 }, { "epoch": 1.352425601304525, "grad_norm": 1.8794691562652588, "learning_rate": 0.0008310147438510668, "loss": 3.5059, "step": 19905 }, { "epoch": 1.352765321375187, "grad_norm": 2.1296746730804443, "learning_rate": 0.000830972278842234, "loss": 3.5799, "step": 19910 }, { "epoch": 1.3531050414458488, "grad_norm": 1.743605375289917, "learning_rate": 0.0008309298138334013, "loss": 3.6707, "step": 19915 }, { "epoch": 1.3534447615165104, "grad_norm": 2.012817621231079, "learning_rate": 0.0008308873488245686, "loss": 3.615, "step": 19920 }, { "epoch": 1.3537844815871722, "grad_norm": 1.8645262718200684, "learning_rate": 0.0008308448838157358, "loss": 3.5589, "step": 19925 }, { "epoch": 1.3541242016578339, "grad_norm": 2.012265920639038, "learning_rate": 0.0008308024188069031, "loss": 3.7413, "step": 19930 }, { "epoch": 1.3544639217284957, "grad_norm": 1.789870262145996, "learning_rate": 0.0008307599537980705, "loss": 3.6732, "step": 19935 }, { "epoch": 1.3548036417991576, "grad_norm": 1.8648103475570679, "learning_rate": 0.0008307174887892377, "loss": 3.6488, "step": 19940 }, { "epoch": 1.3551433618698192, "grad_norm": 1.665277361869812, "learning_rate": 0.000830675023780405, "loss": 3.6592, "step": 19945 }, { "epoch": 1.355483081940481, "grad_norm": 1.7069473266601562, "learning_rate": 0.0008306325587715723, "loss": 3.7135, "step": 19950 }, { "epoch": 1.3558228020111427, "grad_norm": 1.8541053533554077, "learning_rate": 0.0008305900937627395, "loss": 3.6786, "step": 19955 }, { "epoch": 1.3561625220818045, "grad_norm": 1.9315153360366821, "learning_rate": 0.0008305476287539067, "loss": 3.7678, "step": 19960 }, { "epoch": 1.3565022421524664, "grad_norm": 1.8996185064315796, "learning_rate": 0.0008305051637450741, "loss": 3.7305, "step": 19965 }, { "epoch": 1.356841962223128, "grad_norm": 2.0240814685821533, "learning_rate": 0.0008304626987362414, "loss": 3.5453, "step": 19970 }, { "epoch": 1.3571816822937899, "grad_norm": 1.784816861152649, "learning_rate": 0.0008304202337274086, "loss": 3.5339, "step": 19975 }, { "epoch": 1.3575214023644517, "grad_norm": 1.6845083236694336, "learning_rate": 0.000830377768718576, "loss": 3.5738, "step": 19980 }, { "epoch": 1.3578611224351134, "grad_norm": 1.694905161857605, "learning_rate": 0.0008303353037097432, "loss": 3.4861, "step": 19985 }, { "epoch": 1.3582008425057752, "grad_norm": 1.6533033847808838, "learning_rate": 0.0008302928387009104, "loss": 3.5744, "step": 19990 }, { "epoch": 1.358540562576437, "grad_norm": 2.4208483695983887, "learning_rate": 0.0008302503736920778, "loss": 3.4867, "step": 19995 }, { "epoch": 1.3588802826470987, "grad_norm": 2.0866029262542725, "learning_rate": 0.000830207908683245, "loss": 3.8062, "step": 20000 }, { "epoch": 1.3592200027177606, "grad_norm": 1.873889684677124, "learning_rate": 0.0008301654436744123, "loss": 3.8258, "step": 20005 }, { "epoch": 1.3595597227884224, "grad_norm": 2.131596565246582, "learning_rate": 0.0008301229786655797, "loss": 3.6963, "step": 20010 }, { "epoch": 1.359899442859084, "grad_norm": 2.0034549236297607, "learning_rate": 0.0008300805136567469, "loss": 3.7836, "step": 20015 }, { "epoch": 1.360239162929746, "grad_norm": 1.677886962890625, "learning_rate": 0.0008300380486479141, "loss": 3.6933, "step": 20020 }, { "epoch": 1.3605788830004077, "grad_norm": 2.5952401161193848, "learning_rate": 0.0008299955836390814, "loss": 3.4724, "step": 20025 }, { "epoch": 1.3609186030710694, "grad_norm": 1.8856475353240967, "learning_rate": 0.0008299531186302487, "loss": 3.5583, "step": 20030 }, { "epoch": 1.3612583231417312, "grad_norm": 1.9831197261810303, "learning_rate": 0.0008299106536214159, "loss": 3.73, "step": 20035 }, { "epoch": 1.361598043212393, "grad_norm": 1.6312882900238037, "learning_rate": 0.0008298681886125833, "loss": 3.8091, "step": 20040 }, { "epoch": 1.3619377632830547, "grad_norm": 1.9268598556518555, "learning_rate": 0.0008298257236037506, "loss": 3.5451, "step": 20045 }, { "epoch": 1.3622774833537166, "grad_norm": 2.1577796936035156, "learning_rate": 0.0008297832585949178, "loss": 3.6965, "step": 20050 }, { "epoch": 1.3626172034243784, "grad_norm": 2.155956506729126, "learning_rate": 0.0008297407935860851, "loss": 3.5958, "step": 20055 }, { "epoch": 1.36295692349504, "grad_norm": 1.4149471521377563, "learning_rate": 0.0008296983285772523, "loss": 3.7056, "step": 20060 }, { "epoch": 1.363296643565702, "grad_norm": 2.0566744804382324, "learning_rate": 0.0008296558635684196, "loss": 3.6537, "step": 20065 }, { "epoch": 1.3636363636363638, "grad_norm": 2.0557210445404053, "learning_rate": 0.0008296133985595869, "loss": 3.5931, "step": 20070 }, { "epoch": 1.3639760837070254, "grad_norm": 2.1139047145843506, "learning_rate": 0.0008295709335507542, "loss": 3.6229, "step": 20075 }, { "epoch": 1.3643158037776872, "grad_norm": 1.727913498878479, "learning_rate": 0.0008295284685419215, "loss": 3.4604, "step": 20080 }, { "epoch": 1.364655523848349, "grad_norm": 2.268118143081665, "learning_rate": 0.0008294860035330888, "loss": 3.4144, "step": 20085 }, { "epoch": 1.3649952439190107, "grad_norm": 2.099346160888672, "learning_rate": 0.000829443538524256, "loss": 3.5977, "step": 20090 }, { "epoch": 1.3653349639896726, "grad_norm": 1.6633511781692505, "learning_rate": 0.0008294010735154233, "loss": 3.4214, "step": 20095 }, { "epoch": 1.3656746840603342, "grad_norm": 1.7188867330551147, "learning_rate": 0.0008293586085065906, "loss": 3.5394, "step": 20100 }, { "epoch": 1.366014404130996, "grad_norm": 2.252535820007324, "learning_rate": 0.0008293161434977578, "loss": 3.6904, "step": 20105 }, { "epoch": 1.366354124201658, "grad_norm": 2.787306547164917, "learning_rate": 0.0008292736784889251, "loss": 3.4577, "step": 20110 }, { "epoch": 1.3666938442723195, "grad_norm": 2.1518993377685547, "learning_rate": 0.0008292312134800925, "loss": 3.6788, "step": 20115 }, { "epoch": 1.3670335643429814, "grad_norm": 2.0547244548797607, "learning_rate": 0.0008291887484712597, "loss": 3.074, "step": 20120 }, { "epoch": 1.367373284413643, "grad_norm": 1.694871187210083, "learning_rate": 0.0008291462834624269, "loss": 3.4602, "step": 20125 }, { "epoch": 1.3677130044843049, "grad_norm": 1.882256031036377, "learning_rate": 0.0008291038184535943, "loss": 3.5158, "step": 20130 }, { "epoch": 1.3680527245549667, "grad_norm": 2.254934310913086, "learning_rate": 0.0008290613534447615, "loss": 3.7229, "step": 20135 }, { "epoch": 1.3683924446256284, "grad_norm": 1.7755422592163086, "learning_rate": 0.0008290188884359287, "loss": 3.236, "step": 20140 }, { "epoch": 1.3687321646962902, "grad_norm": 2.119443416595459, "learning_rate": 0.0008289764234270962, "loss": 3.529, "step": 20145 }, { "epoch": 1.369071884766952, "grad_norm": 2.4819040298461914, "learning_rate": 0.0008289339584182634, "loss": 3.3639, "step": 20150 }, { "epoch": 1.3694116048376137, "grad_norm": 2.5402467250823975, "learning_rate": 0.0008288914934094306, "loss": 3.7119, "step": 20155 }, { "epoch": 1.3697513249082756, "grad_norm": 2.0679595470428467, "learning_rate": 0.000828849028400598, "loss": 3.5742, "step": 20160 }, { "epoch": 1.3700910449789374, "grad_norm": 1.7741012573242188, "learning_rate": 0.0008288065633917652, "loss": 3.5438, "step": 20165 }, { "epoch": 1.370430765049599, "grad_norm": 2.1399502754211426, "learning_rate": 0.0008287640983829324, "loss": 3.4525, "step": 20170 }, { "epoch": 1.370770485120261, "grad_norm": 1.577825665473938, "learning_rate": 0.0008287216333740997, "loss": 3.4294, "step": 20175 }, { "epoch": 1.3711102051909227, "grad_norm": 1.6687333583831787, "learning_rate": 0.0008286791683652671, "loss": 3.668, "step": 20180 }, { "epoch": 1.3714499252615844, "grad_norm": 1.695855975151062, "learning_rate": 0.0008286367033564343, "loss": 3.2545, "step": 20185 }, { "epoch": 1.3717896453322462, "grad_norm": 2.3283700942993164, "learning_rate": 0.0008285942383476016, "loss": 3.7487, "step": 20190 }, { "epoch": 1.372129365402908, "grad_norm": 2.643712282180786, "learning_rate": 0.0008285517733387689, "loss": 3.2892, "step": 20195 }, { "epoch": 1.3724690854735697, "grad_norm": 2.745521306991577, "learning_rate": 0.0008285093083299361, "loss": 3.4647, "step": 20200 }, { "epoch": 1.3728088055442316, "grad_norm": 1.471993088722229, "learning_rate": 0.0008284668433211034, "loss": 3.402, "step": 20205 }, { "epoch": 1.3731485256148934, "grad_norm": 1.8562185764312744, "learning_rate": 0.0008284243783122706, "loss": 3.4822, "step": 20210 }, { "epoch": 1.373488245685555, "grad_norm": 2.0594213008880615, "learning_rate": 0.0008283819133034381, "loss": 3.4938, "step": 20215 }, { "epoch": 1.373827965756217, "grad_norm": 1.942175269126892, "learning_rate": 0.0008283394482946053, "loss": 3.2799, "step": 20220 }, { "epoch": 1.3741676858268788, "grad_norm": 1.9916727542877197, "learning_rate": 0.0008282969832857725, "loss": 3.588, "step": 20225 }, { "epoch": 1.3745074058975404, "grad_norm": 1.4317271709442139, "learning_rate": 0.0008282545182769399, "loss": 3.2372, "step": 20230 }, { "epoch": 1.3748471259682022, "grad_norm": 2.23604416847229, "learning_rate": 0.0008282120532681071, "loss": 3.5497, "step": 20235 }, { "epoch": 1.375186846038864, "grad_norm": 1.933443307876587, "learning_rate": 0.0008281695882592743, "loss": 3.4797, "step": 20240 }, { "epoch": 1.3755265661095257, "grad_norm": 1.8039618730545044, "learning_rate": 0.0008281271232504417, "loss": 3.4678, "step": 20245 }, { "epoch": 1.3758662861801876, "grad_norm": 1.9597258567810059, "learning_rate": 0.000828084658241609, "loss": 3.4506, "step": 20250 }, { "epoch": 1.3762060062508494, "grad_norm": 2.2396552562713623, "learning_rate": 0.0008280421932327762, "loss": 3.5009, "step": 20255 }, { "epoch": 1.376545726321511, "grad_norm": 2.2099902629852295, "learning_rate": 0.0008279997282239435, "loss": 3.477, "step": 20260 }, { "epoch": 1.376885446392173, "grad_norm": 2.669534921646118, "learning_rate": 0.0008279572632151108, "loss": 3.5851, "step": 20265 }, { "epoch": 1.3772251664628345, "grad_norm": 2.087252378463745, "learning_rate": 0.000827914798206278, "loss": 3.8284, "step": 20270 }, { "epoch": 1.3775648865334964, "grad_norm": 2.661989212036133, "learning_rate": 0.0008278723331974453, "loss": 3.5995, "step": 20275 }, { "epoch": 1.3779046066041583, "grad_norm": 1.792147159576416, "learning_rate": 0.0008278298681886126, "loss": 3.5436, "step": 20280 }, { "epoch": 1.3782443266748199, "grad_norm": 2.217724323272705, "learning_rate": 0.0008277874031797799, "loss": 3.4004, "step": 20285 }, { "epoch": 1.3785840467454817, "grad_norm": 1.7551565170288086, "learning_rate": 0.0008277449381709472, "loss": 3.6365, "step": 20290 }, { "epoch": 1.3789237668161434, "grad_norm": 1.9251502752304077, "learning_rate": 0.0008277024731621145, "loss": 3.7318, "step": 20295 }, { "epoch": 1.3792634868868052, "grad_norm": 2.5167179107666016, "learning_rate": 0.0008276600081532817, "loss": 3.6776, "step": 20300 }, { "epoch": 1.379603206957467, "grad_norm": 1.9246922731399536, "learning_rate": 0.000827617543144449, "loss": 3.3238, "step": 20305 }, { "epoch": 1.3799429270281287, "grad_norm": 2.1428940296173096, "learning_rate": 0.0008275750781356162, "loss": 3.5686, "step": 20310 }, { "epoch": 1.3802826470987906, "grad_norm": 2.06229829788208, "learning_rate": 0.0008275326131267835, "loss": 3.6435, "step": 20315 }, { "epoch": 1.3806223671694524, "grad_norm": 2.1970231533050537, "learning_rate": 0.0008274901481179509, "loss": 3.683, "step": 20320 }, { "epoch": 1.380962087240114, "grad_norm": 2.7872154712677, "learning_rate": 0.0008274476831091181, "loss": 3.5195, "step": 20325 }, { "epoch": 1.381301807310776, "grad_norm": 1.6305205821990967, "learning_rate": 0.0008274052181002854, "loss": 3.6527, "step": 20330 }, { "epoch": 1.3816415273814378, "grad_norm": 1.8210043907165527, "learning_rate": 0.0008273627530914527, "loss": 3.4371, "step": 20335 }, { "epoch": 1.3819812474520994, "grad_norm": 1.7060292959213257, "learning_rate": 0.0008273202880826199, "loss": 3.6253, "step": 20340 }, { "epoch": 1.3823209675227612, "grad_norm": 1.9578542709350586, "learning_rate": 0.0008272778230737871, "loss": 3.5823, "step": 20345 }, { "epoch": 1.382660687593423, "grad_norm": 1.8940435647964478, "learning_rate": 0.0008272353580649546, "loss": 3.5498, "step": 20350 }, { "epoch": 1.3830004076640847, "grad_norm": 1.9179832935333252, "learning_rate": 0.0008271928930561218, "loss": 3.5336, "step": 20355 }, { "epoch": 1.3833401277347466, "grad_norm": 2.1557400226593018, "learning_rate": 0.000827150428047289, "loss": 3.4193, "step": 20360 }, { "epoch": 1.3836798478054084, "grad_norm": 1.9788281917572021, "learning_rate": 0.0008271079630384564, "loss": 3.7645, "step": 20365 }, { "epoch": 1.38401956787607, "grad_norm": 1.7780663967132568, "learning_rate": 0.0008270654980296236, "loss": 3.6437, "step": 20370 }, { "epoch": 1.384359287946732, "grad_norm": 1.9393073320388794, "learning_rate": 0.0008270230330207908, "loss": 3.6329, "step": 20375 }, { "epoch": 1.3846990080173938, "grad_norm": 1.9355976581573486, "learning_rate": 0.0008269805680119582, "loss": 3.8539, "step": 20380 }, { "epoch": 1.3850387280880554, "grad_norm": 1.9931862354278564, "learning_rate": 0.0008269381030031255, "loss": 3.7724, "step": 20385 }, { "epoch": 1.3853784481587172, "grad_norm": 1.765750765800476, "learning_rate": 0.0008268956379942927, "loss": 3.3573, "step": 20390 }, { "epoch": 1.385718168229379, "grad_norm": 2.058882474899292, "learning_rate": 0.0008268531729854601, "loss": 3.5904, "step": 20395 }, { "epoch": 1.3860578883000407, "grad_norm": 1.8346401453018188, "learning_rate": 0.0008268107079766273, "loss": 3.5524, "step": 20400 }, { "epoch": 1.3863976083707026, "grad_norm": 2.6825129985809326, "learning_rate": 0.0008267682429677945, "loss": 3.5305, "step": 20405 }, { "epoch": 1.3867373284413644, "grad_norm": 1.8743211030960083, "learning_rate": 0.0008267257779589618, "loss": 3.4724, "step": 20410 }, { "epoch": 1.387077048512026, "grad_norm": 2.4132096767425537, "learning_rate": 0.0008266833129501291, "loss": 3.6505, "step": 20415 }, { "epoch": 1.387416768582688, "grad_norm": 1.502934455871582, "learning_rate": 0.0008266408479412964, "loss": 3.7182, "step": 20420 }, { "epoch": 1.3877564886533498, "grad_norm": 1.740283727645874, "learning_rate": 0.0008265983829324637, "loss": 3.5187, "step": 20425 }, { "epoch": 1.3880962087240114, "grad_norm": 1.9448657035827637, "learning_rate": 0.000826555917923631, "loss": 3.5295, "step": 20430 }, { "epoch": 1.3884359287946733, "grad_norm": 2.0320639610290527, "learning_rate": 0.0008265134529147982, "loss": 3.6885, "step": 20435 }, { "epoch": 1.3887756488653349, "grad_norm": 1.9297077655792236, "learning_rate": 0.0008264709879059655, "loss": 3.5344, "step": 20440 }, { "epoch": 1.3891153689359967, "grad_norm": 1.853185772895813, "learning_rate": 0.0008264285228971327, "loss": 3.7506, "step": 20445 }, { "epoch": 1.3894550890066586, "grad_norm": 2.467832326889038, "learning_rate": 0.0008263860578883, "loss": 3.6326, "step": 20450 }, { "epoch": 1.3897948090773202, "grad_norm": 1.8064647912979126, "learning_rate": 0.0008263435928794674, "loss": 3.3717, "step": 20455 }, { "epoch": 1.390134529147982, "grad_norm": 1.7196629047393799, "learning_rate": 0.0008263011278706346, "loss": 3.8313, "step": 20460 }, { "epoch": 1.3904742492186437, "grad_norm": 1.6832661628723145, "learning_rate": 0.0008262586628618019, "loss": 3.5978, "step": 20465 }, { "epoch": 1.3908139692893056, "grad_norm": 2.034452199935913, "learning_rate": 0.0008262161978529692, "loss": 3.4325, "step": 20470 }, { "epoch": 1.3911536893599674, "grad_norm": 2.105921506881714, "learning_rate": 0.0008261737328441364, "loss": 3.5121, "step": 20475 }, { "epoch": 1.391493409430629, "grad_norm": 2.17626953125, "learning_rate": 0.0008261312678353037, "loss": 3.7487, "step": 20480 }, { "epoch": 1.391833129501291, "grad_norm": 1.9952800273895264, "learning_rate": 0.000826088802826471, "loss": 3.4155, "step": 20485 }, { "epoch": 1.3921728495719528, "grad_norm": 2.0503087043762207, "learning_rate": 0.0008260463378176383, "loss": 3.5784, "step": 20490 }, { "epoch": 1.3925125696426144, "grad_norm": 2.213972330093384, "learning_rate": 0.0008260038728088055, "loss": 3.3998, "step": 20495 }, { "epoch": 1.3928522897132762, "grad_norm": 2.4342122077941895, "learning_rate": 0.0008259614077999729, "loss": 3.6266, "step": 20500 }, { "epoch": 1.393192009783938, "grad_norm": 2.189854145050049, "learning_rate": 0.0008259189427911401, "loss": 3.6051, "step": 20505 }, { "epoch": 1.3935317298545997, "grad_norm": 1.9494214057922363, "learning_rate": 0.0008258764777823073, "loss": 3.4807, "step": 20510 }, { "epoch": 1.3938714499252616, "grad_norm": 2.894514322280884, "learning_rate": 0.0008258340127734747, "loss": 3.4795, "step": 20515 }, { "epoch": 1.3942111699959234, "grad_norm": 2.415106773376465, "learning_rate": 0.0008257915477646419, "loss": 3.7413, "step": 20520 }, { "epoch": 1.394550890066585, "grad_norm": 2.382859230041504, "learning_rate": 0.0008257490827558092, "loss": 3.646, "step": 20525 }, { "epoch": 1.394890610137247, "grad_norm": 2.2766261100769043, "learning_rate": 0.0008257066177469766, "loss": 3.4149, "step": 20530 }, { "epoch": 1.3952303302079088, "grad_norm": 2.0107545852661133, "learning_rate": 0.0008256641527381438, "loss": 3.574, "step": 20535 }, { "epoch": 1.3955700502785704, "grad_norm": 1.5392698049545288, "learning_rate": 0.000825621687729311, "loss": 3.6912, "step": 20540 }, { "epoch": 1.3959097703492322, "grad_norm": 1.7835854291915894, "learning_rate": 0.0008255792227204783, "loss": 3.5646, "step": 20545 }, { "epoch": 1.396249490419894, "grad_norm": 2.2729525566101074, "learning_rate": 0.0008255367577116456, "loss": 3.3676, "step": 20550 }, { "epoch": 1.3965892104905557, "grad_norm": 1.5035099983215332, "learning_rate": 0.0008254942927028129, "loss": 3.3535, "step": 20555 }, { "epoch": 1.3969289305612176, "grad_norm": 1.5773344039916992, "learning_rate": 0.0008254518276939802, "loss": 3.4269, "step": 20560 }, { "epoch": 1.3972686506318794, "grad_norm": 1.8008272647857666, "learning_rate": 0.0008254093626851475, "loss": 3.6423, "step": 20565 }, { "epoch": 1.397608370702541, "grad_norm": 1.9697099924087524, "learning_rate": 0.0008253668976763148, "loss": 3.4706, "step": 20570 }, { "epoch": 1.397948090773203, "grad_norm": 1.784127950668335, "learning_rate": 0.000825324432667482, "loss": 3.2207, "step": 20575 }, { "epoch": 1.3982878108438648, "grad_norm": 2.3902359008789062, "learning_rate": 0.0008252819676586493, "loss": 3.6116, "step": 20580 }, { "epoch": 1.3986275309145264, "grad_norm": 2.3802123069763184, "learning_rate": 0.0008252395026498166, "loss": 3.4628, "step": 20585 }, { "epoch": 1.3989672509851883, "grad_norm": 2.0574450492858887, "learning_rate": 0.0008251970376409838, "loss": 3.636, "step": 20590 }, { "epoch": 1.3993069710558501, "grad_norm": 2.0733482837677, "learning_rate": 0.0008251545726321512, "loss": 3.4407, "step": 20595 }, { "epoch": 1.3996466911265117, "grad_norm": 2.4224441051483154, "learning_rate": 0.0008251121076233185, "loss": 3.5856, "step": 20600 }, { "epoch": 1.3999864111971736, "grad_norm": 2.0128886699676514, "learning_rate": 0.0008250696426144857, "loss": 3.6737, "step": 20605 }, { "epoch": 1.4003261312678352, "grad_norm": 2.239655017852783, "learning_rate": 0.0008250271776056529, "loss": 3.6835, "step": 20610 }, { "epoch": 1.400665851338497, "grad_norm": 1.4488029479980469, "learning_rate": 0.0008249847125968203, "loss": 3.7597, "step": 20615 }, { "epoch": 1.401005571409159, "grad_norm": 2.004549503326416, "learning_rate": 0.0008249422475879875, "loss": 3.5352, "step": 20620 }, { "epoch": 1.4013452914798206, "grad_norm": 2.1666364669799805, "learning_rate": 0.0008248997825791547, "loss": 3.4107, "step": 20625 }, { "epoch": 1.4016850115504824, "grad_norm": 1.366011619567871, "learning_rate": 0.0008248573175703222, "loss": 3.5318, "step": 20630 }, { "epoch": 1.402024731621144, "grad_norm": 1.9968743324279785, "learning_rate": 0.0008248148525614894, "loss": 3.4049, "step": 20635 }, { "epoch": 1.402364451691806, "grad_norm": 1.857835054397583, "learning_rate": 0.0008247723875526566, "loss": 3.9164, "step": 20640 }, { "epoch": 1.4027041717624678, "grad_norm": 2.4286725521087646, "learning_rate": 0.000824729922543824, "loss": 3.8654, "step": 20645 }, { "epoch": 1.4030438918331294, "grad_norm": 2.4834580421447754, "learning_rate": 0.0008246874575349912, "loss": 3.5665, "step": 20650 }, { "epoch": 1.4033836119037912, "grad_norm": 1.8415297269821167, "learning_rate": 0.0008246449925261584, "loss": 3.3237, "step": 20655 }, { "epoch": 1.403723331974453, "grad_norm": 1.8561662435531616, "learning_rate": 0.0008246025275173257, "loss": 3.5774, "step": 20660 }, { "epoch": 1.4040630520451147, "grad_norm": 1.9479914903640747, "learning_rate": 0.0008245600625084931, "loss": 3.7693, "step": 20665 }, { "epoch": 1.4044027721157766, "grad_norm": 2.003818988800049, "learning_rate": 0.0008245175974996603, "loss": 3.5396, "step": 20670 }, { "epoch": 1.4047424921864384, "grad_norm": 1.8602107763290405, "learning_rate": 0.0008244751324908276, "loss": 3.4446, "step": 20675 }, { "epoch": 1.4050822122571, "grad_norm": 1.9254742860794067, "learning_rate": 0.0008244326674819949, "loss": 3.7179, "step": 20680 }, { "epoch": 1.405421932327762, "grad_norm": 1.6462037563323975, "learning_rate": 0.0008243902024731621, "loss": 3.501, "step": 20685 }, { "epoch": 1.4057616523984238, "grad_norm": 1.761430025100708, "learning_rate": 0.0008243477374643294, "loss": 3.3807, "step": 20690 }, { "epoch": 1.4061013724690854, "grad_norm": 1.952024221420288, "learning_rate": 0.0008243052724554966, "loss": 3.5291, "step": 20695 }, { "epoch": 1.4064410925397473, "grad_norm": 2.314180850982666, "learning_rate": 0.000824262807446664, "loss": 3.6963, "step": 20700 }, { "epoch": 1.406780812610409, "grad_norm": 1.899109125137329, "learning_rate": 0.0008242203424378313, "loss": 3.5388, "step": 20705 }, { "epoch": 1.4071205326810707, "grad_norm": 1.9915330410003662, "learning_rate": 0.0008241778774289985, "loss": 3.8185, "step": 20710 }, { "epoch": 1.4074602527517326, "grad_norm": 1.9845376014709473, "learning_rate": 0.0008241354124201658, "loss": 3.4692, "step": 20715 }, { "epoch": 1.4077999728223944, "grad_norm": 1.527084469795227, "learning_rate": 0.0008240929474113331, "loss": 3.2608, "step": 20720 }, { "epoch": 1.408139692893056, "grad_norm": 1.8069730997085571, "learning_rate": 0.0008240504824025003, "loss": 3.4447, "step": 20725 }, { "epoch": 1.408479412963718, "grad_norm": 2.203946590423584, "learning_rate": 0.0008240080173936675, "loss": 3.3887, "step": 20730 }, { "epoch": 1.4088191330343798, "grad_norm": 1.9014595746994019, "learning_rate": 0.000823965552384835, "loss": 3.6758, "step": 20735 }, { "epoch": 1.4091588531050414, "grad_norm": 2.485318899154663, "learning_rate": 0.0008239230873760022, "loss": 3.4185, "step": 20740 }, { "epoch": 1.4094985731757033, "grad_norm": 1.9093072414398193, "learning_rate": 0.0008238806223671694, "loss": 3.471, "step": 20745 }, { "epoch": 1.4098382932463651, "grad_norm": 2.6945960521698, "learning_rate": 0.0008238381573583368, "loss": 3.6236, "step": 20750 }, { "epoch": 1.4101780133170267, "grad_norm": 2.647829532623291, "learning_rate": 0.000823795692349504, "loss": 3.4279, "step": 20755 }, { "epoch": 1.4105177333876886, "grad_norm": 1.847754955291748, "learning_rate": 0.0008237532273406712, "loss": 3.4587, "step": 20760 }, { "epoch": 1.4108574534583505, "grad_norm": 1.998573660850525, "learning_rate": 0.0008237107623318386, "loss": 3.6146, "step": 20765 }, { "epoch": 1.411197173529012, "grad_norm": 1.9420814514160156, "learning_rate": 0.0008236682973230059, "loss": 3.6358, "step": 20770 }, { "epoch": 1.411536893599674, "grad_norm": 2.526139974594116, "learning_rate": 0.0008236258323141731, "loss": 3.4733, "step": 20775 }, { "epoch": 1.4118766136703356, "grad_norm": 1.8666788339614868, "learning_rate": 0.0008235833673053405, "loss": 3.7084, "step": 20780 }, { "epoch": 1.4122163337409974, "grad_norm": 2.2567315101623535, "learning_rate": 0.0008235409022965077, "loss": 3.6474, "step": 20785 }, { "epoch": 1.4125560538116593, "grad_norm": 2.5347421169281006, "learning_rate": 0.0008234984372876749, "loss": 3.5188, "step": 20790 }, { "epoch": 1.412895773882321, "grad_norm": 1.7067657709121704, "learning_rate": 0.0008234559722788422, "loss": 3.6462, "step": 20795 }, { "epoch": 1.4132354939529828, "grad_norm": 2.1528286933898926, "learning_rate": 0.0008234135072700095, "loss": 3.5538, "step": 20800 }, { "epoch": 1.4135752140236444, "grad_norm": 1.8030632734298706, "learning_rate": 0.0008233710422611768, "loss": 3.55, "step": 20805 }, { "epoch": 1.4139149340943062, "grad_norm": 1.6686018705368042, "learning_rate": 0.0008233285772523441, "loss": 3.3863, "step": 20810 }, { "epoch": 1.414254654164968, "grad_norm": 2.3682007789611816, "learning_rate": 0.0008232861122435114, "loss": 3.7917, "step": 20815 }, { "epoch": 1.4145943742356297, "grad_norm": 2.8847572803497314, "learning_rate": 0.0008232436472346786, "loss": 3.6693, "step": 20820 }, { "epoch": 1.4149340943062916, "grad_norm": 1.9409481287002563, "learning_rate": 0.0008232011822258459, "loss": 3.6332, "step": 20825 }, { "epoch": 1.4152738143769534, "grad_norm": 1.9790875911712646, "learning_rate": 0.0008231587172170132, "loss": 3.5864, "step": 20830 }, { "epoch": 1.415613534447615, "grad_norm": 1.6567002534866333, "learning_rate": 0.0008231162522081804, "loss": 3.4859, "step": 20835 }, { "epoch": 1.415953254518277, "grad_norm": 1.8380496501922607, "learning_rate": 0.0008230737871993478, "loss": 3.6394, "step": 20840 }, { "epoch": 1.4162929745889388, "grad_norm": 2.422773838043213, "learning_rate": 0.000823031322190515, "loss": 3.8754, "step": 20845 }, { "epoch": 1.4166326946596004, "grad_norm": 1.8621824979782104, "learning_rate": 0.0008229888571816823, "loss": 3.4298, "step": 20850 }, { "epoch": 1.4169724147302623, "grad_norm": 1.8576674461364746, "learning_rate": 0.0008229463921728496, "loss": 3.3607, "step": 20855 }, { "epoch": 1.417312134800924, "grad_norm": 1.9567008018493652, "learning_rate": 0.0008229039271640168, "loss": 3.5219, "step": 20860 }, { "epoch": 1.4176518548715857, "grad_norm": 2.3595430850982666, "learning_rate": 0.0008228614621551841, "loss": 3.5409, "step": 20865 }, { "epoch": 1.4179915749422476, "grad_norm": 1.861998438835144, "learning_rate": 0.0008228189971463514, "loss": 3.6108, "step": 20870 }, { "epoch": 1.4183312950129094, "grad_norm": 1.9270273447036743, "learning_rate": 0.0008227765321375187, "loss": 3.5949, "step": 20875 }, { "epoch": 1.418671015083571, "grad_norm": 2.0015525817871094, "learning_rate": 0.000822734067128686, "loss": 3.7138, "step": 20880 }, { "epoch": 1.419010735154233, "grad_norm": 1.9379183053970337, "learning_rate": 0.0008226916021198533, "loss": 3.5523, "step": 20885 }, { "epoch": 1.4193504552248948, "grad_norm": 1.985959529876709, "learning_rate": 0.0008226491371110205, "loss": 3.6246, "step": 20890 }, { "epoch": 1.4196901752955564, "grad_norm": 1.9603816270828247, "learning_rate": 0.0008226066721021878, "loss": 3.7373, "step": 20895 }, { "epoch": 1.4200298953662183, "grad_norm": 1.8062885999679565, "learning_rate": 0.0008225642070933551, "loss": 3.6054, "step": 20900 }, { "epoch": 1.4203696154368801, "grad_norm": 2.3290669918060303, "learning_rate": 0.0008225217420845223, "loss": 3.3672, "step": 20905 }, { "epoch": 1.4207093355075417, "grad_norm": 2.284879207611084, "learning_rate": 0.0008224792770756897, "loss": 3.8339, "step": 20910 }, { "epoch": 1.4210490555782036, "grad_norm": 1.8868943452835083, "learning_rate": 0.000822436812066857, "loss": 3.5418, "step": 20915 }, { "epoch": 1.4213887756488655, "grad_norm": 1.993499517440796, "learning_rate": 0.0008223943470580242, "loss": 3.7588, "step": 20920 }, { "epoch": 1.421728495719527, "grad_norm": 2.3413619995117188, "learning_rate": 0.0008223518820491915, "loss": 3.56, "step": 20925 }, { "epoch": 1.422068215790189, "grad_norm": 1.7981276512145996, "learning_rate": 0.0008223094170403588, "loss": 3.5423, "step": 20930 }, { "epoch": 1.4224079358608508, "grad_norm": 2.2612080574035645, "learning_rate": 0.000822266952031526, "loss": 3.6158, "step": 20935 }, { "epoch": 1.4227476559315124, "grad_norm": 1.7744181156158447, "learning_rate": 0.0008222244870226934, "loss": 3.3367, "step": 20940 }, { "epoch": 1.4230873760021743, "grad_norm": 1.7577515840530396, "learning_rate": 0.0008221820220138606, "loss": 3.4915, "step": 20945 }, { "epoch": 1.423427096072836, "grad_norm": 2.6690618991851807, "learning_rate": 0.0008221395570050279, "loss": 3.4423, "step": 20950 }, { "epoch": 1.4237668161434978, "grad_norm": 1.6761106252670288, "learning_rate": 0.0008220970919961952, "loss": 3.7573, "step": 20955 }, { "epoch": 1.4241065362141596, "grad_norm": 2.15740704536438, "learning_rate": 0.0008220546269873624, "loss": 3.6878, "step": 20960 }, { "epoch": 1.4244462562848212, "grad_norm": 1.6548311710357666, "learning_rate": 0.0008220121619785297, "loss": 3.4862, "step": 20965 }, { "epoch": 1.424785976355483, "grad_norm": 2.1478381156921387, "learning_rate": 0.000821969696969697, "loss": 3.6155, "step": 20970 }, { "epoch": 1.4251256964261447, "grad_norm": 2.034367799758911, "learning_rate": 0.0008219272319608643, "loss": 3.5056, "step": 20975 }, { "epoch": 1.4254654164968066, "grad_norm": 1.995638370513916, "learning_rate": 0.0008218847669520316, "loss": 3.745, "step": 20980 }, { "epoch": 1.4258051365674684, "grad_norm": 1.9879459142684937, "learning_rate": 0.0008218423019431989, "loss": 3.5248, "step": 20985 }, { "epoch": 1.42614485663813, "grad_norm": 2.4555468559265137, "learning_rate": 0.0008217998369343661, "loss": 3.4378, "step": 20990 }, { "epoch": 1.426484576708792, "grad_norm": 1.6315034627914429, "learning_rate": 0.0008217573719255333, "loss": 3.3613, "step": 20995 }, { "epoch": 1.4268242967794538, "grad_norm": 1.704077959060669, "learning_rate": 0.0008217149069167007, "loss": 3.5118, "step": 21000 }, { "epoch": 1.4271640168501154, "grad_norm": 2.1232941150665283, "learning_rate": 0.0008216724419078679, "loss": 3.7779, "step": 21005 }, { "epoch": 1.4275037369207773, "grad_norm": 2.085385322570801, "learning_rate": 0.0008216299768990352, "loss": 3.6927, "step": 21010 }, { "epoch": 1.427843456991439, "grad_norm": 1.9137909412384033, "learning_rate": 0.0008215875118902026, "loss": 3.805, "step": 21015 }, { "epoch": 1.4281831770621007, "grad_norm": 1.8834259510040283, "learning_rate": 0.0008215450468813698, "loss": 3.3926, "step": 21020 }, { "epoch": 1.4285228971327626, "grad_norm": 2.1120150089263916, "learning_rate": 0.000821502581872537, "loss": 3.6262, "step": 21025 }, { "epoch": 1.4288626172034244, "grad_norm": 1.9816522598266602, "learning_rate": 0.0008214601168637044, "loss": 3.6533, "step": 21030 }, { "epoch": 1.429202337274086, "grad_norm": 1.7638511657714844, "learning_rate": 0.0008214176518548716, "loss": 3.6554, "step": 21035 }, { "epoch": 1.429542057344748, "grad_norm": 1.6368736028671265, "learning_rate": 0.0008213751868460388, "loss": 3.6639, "step": 21040 }, { "epoch": 1.4298817774154098, "grad_norm": 1.6608259677886963, "learning_rate": 0.0008213327218372062, "loss": 3.5365, "step": 21045 }, { "epoch": 1.4302214974860714, "grad_norm": 1.7252144813537598, "learning_rate": 0.0008212902568283735, "loss": 3.7155, "step": 21050 }, { "epoch": 1.4305612175567333, "grad_norm": 2.0800533294677734, "learning_rate": 0.0008212477918195407, "loss": 3.6875, "step": 21055 }, { "epoch": 1.4309009376273951, "grad_norm": 1.9842891693115234, "learning_rate": 0.000821205326810708, "loss": 3.6207, "step": 21060 }, { "epoch": 1.4312406576980568, "grad_norm": 1.6174840927124023, "learning_rate": 0.0008211628618018753, "loss": 3.6291, "step": 21065 }, { "epoch": 1.4315803777687186, "grad_norm": 1.8980998992919922, "learning_rate": 0.0008211203967930425, "loss": 3.4835, "step": 21070 }, { "epoch": 1.4319200978393805, "grad_norm": 1.776246428489685, "learning_rate": 0.0008210779317842098, "loss": 3.414, "step": 21075 }, { "epoch": 1.432259817910042, "grad_norm": 2.161036491394043, "learning_rate": 0.0008210354667753772, "loss": 3.4108, "step": 21080 }, { "epoch": 1.432599537980704, "grad_norm": 2.3793997764587402, "learning_rate": 0.0008209930017665444, "loss": 3.7678, "step": 21085 }, { "epoch": 1.4329392580513658, "grad_norm": 2.109483242034912, "learning_rate": 0.0008209505367577117, "loss": 3.5266, "step": 21090 }, { "epoch": 1.4332789781220274, "grad_norm": 1.7944260835647583, "learning_rate": 0.0008209080717488789, "loss": 3.4691, "step": 21095 }, { "epoch": 1.4336186981926893, "grad_norm": 2.044189691543579, "learning_rate": 0.0008208656067400462, "loss": 3.1874, "step": 21100 }, { "epoch": 1.4339584182633511, "grad_norm": 2.0117456912994385, "learning_rate": 0.0008208231417312135, "loss": 3.4548, "step": 21105 }, { "epoch": 1.4342981383340128, "grad_norm": 1.7488019466400146, "learning_rate": 0.0008207806767223807, "loss": 3.6023, "step": 21110 }, { "epoch": 1.4346378584046746, "grad_norm": 1.6693031787872314, "learning_rate": 0.0008207382117135481, "loss": 3.4283, "step": 21115 }, { "epoch": 1.4349775784753362, "grad_norm": 2.0622901916503906, "learning_rate": 0.0008206957467047154, "loss": 3.6158, "step": 21120 }, { "epoch": 1.435317298545998, "grad_norm": 1.536251187324524, "learning_rate": 0.0008206532816958826, "loss": 3.5517, "step": 21125 }, { "epoch": 1.43565701861666, "grad_norm": 2.148108959197998, "learning_rate": 0.0008206108166870498, "loss": 3.6615, "step": 21130 }, { "epoch": 1.4359967386873216, "grad_norm": 1.5426220893859863, "learning_rate": 0.0008205683516782172, "loss": 3.652, "step": 21135 }, { "epoch": 1.4363364587579834, "grad_norm": 2.0334408283233643, "learning_rate": 0.0008205258866693844, "loss": 3.4981, "step": 21140 }, { "epoch": 1.436676178828645, "grad_norm": 1.910610556602478, "learning_rate": 0.0008204919146623183, "loss": 3.6248, "step": 21145 }, { "epoch": 1.437015898899307, "grad_norm": 2.3079142570495605, "learning_rate": 0.0008204494496534856, "loss": 3.6179, "step": 21150 }, { "epoch": 1.4373556189699688, "grad_norm": 1.7205005884170532, "learning_rate": 0.0008204069846446528, "loss": 3.4568, "step": 21155 }, { "epoch": 1.4376953390406304, "grad_norm": 2.1678810119628906, "learning_rate": 0.0008203645196358201, "loss": 3.343, "step": 21160 }, { "epoch": 1.4380350591112923, "grad_norm": 2.385842800140381, "learning_rate": 0.0008203220546269874, "loss": 3.6703, "step": 21165 }, { "epoch": 1.438374779181954, "grad_norm": 1.6977307796478271, "learning_rate": 0.0008202795896181546, "loss": 3.8979, "step": 21170 }, { "epoch": 1.4387144992526157, "grad_norm": 2.3533596992492676, "learning_rate": 0.000820237124609322, "loss": 3.6215, "step": 21175 }, { "epoch": 1.4390542193232776, "grad_norm": 1.4748166799545288, "learning_rate": 0.0008201946596004892, "loss": 3.4591, "step": 21180 }, { "epoch": 1.4393939393939394, "grad_norm": 1.755553960800171, "learning_rate": 0.0008201521945916565, "loss": 3.7343, "step": 21185 }, { "epoch": 1.439733659464601, "grad_norm": 2.589813232421875, "learning_rate": 0.0008201097295828238, "loss": 3.5489, "step": 21190 }, { "epoch": 1.440073379535263, "grad_norm": 2.071423053741455, "learning_rate": 0.000820067264573991, "loss": 3.6557, "step": 21195 }, { "epoch": 1.4404130996059248, "grad_norm": 2.024665117263794, "learning_rate": 0.0008200247995651583, "loss": 3.4912, "step": 21200 }, { "epoch": 1.4407528196765864, "grad_norm": 2.024566411972046, "learning_rate": 0.0008199823345563257, "loss": 3.9488, "step": 21205 }, { "epoch": 1.4410925397472483, "grad_norm": 1.813926100730896, "learning_rate": 0.0008199398695474929, "loss": 3.5842, "step": 21210 }, { "epoch": 1.4414322598179101, "grad_norm": 1.9188120365142822, "learning_rate": 0.0008198974045386602, "loss": 3.7133, "step": 21215 }, { "epoch": 1.4417719798885718, "grad_norm": 2.0997345447540283, "learning_rate": 0.0008198549395298275, "loss": 3.5338, "step": 21220 }, { "epoch": 1.4421116999592336, "grad_norm": 2.639848470687866, "learning_rate": 0.0008198124745209947, "loss": 3.7847, "step": 21225 }, { "epoch": 1.4424514200298955, "grad_norm": 1.9865617752075195, "learning_rate": 0.0008197700095121619, "loss": 3.5732, "step": 21230 }, { "epoch": 1.442791140100557, "grad_norm": 2.6164543628692627, "learning_rate": 0.0008197275445033293, "loss": 3.5995, "step": 21235 }, { "epoch": 1.443130860171219, "grad_norm": 1.8227885961532593, "learning_rate": 0.0008196850794944966, "loss": 3.4082, "step": 21240 }, { "epoch": 1.4434705802418808, "grad_norm": 1.910162329673767, "learning_rate": 0.0008196426144856638, "loss": 3.6917, "step": 21245 }, { "epoch": 1.4438103003125424, "grad_norm": 1.9617167711257935, "learning_rate": 0.0008196001494768312, "loss": 3.3544, "step": 21250 }, { "epoch": 1.4441500203832043, "grad_norm": 2.4598145484924316, "learning_rate": 0.0008195576844679984, "loss": 3.6715, "step": 21255 }, { "epoch": 1.4444897404538661, "grad_norm": 1.93595290184021, "learning_rate": 0.0008195152194591656, "loss": 3.3857, "step": 21260 }, { "epoch": 1.4448294605245278, "grad_norm": 2.153637170791626, "learning_rate": 0.000819472754450333, "loss": 3.3636, "step": 21265 }, { "epoch": 1.4451691805951896, "grad_norm": 1.876846194267273, "learning_rate": 0.0008194302894415002, "loss": 3.5357, "step": 21270 }, { "epoch": 1.4455089006658515, "grad_norm": 3.3554627895355225, "learning_rate": 0.0008193878244326675, "loss": 3.438, "step": 21275 }, { "epoch": 1.445848620736513, "grad_norm": 1.6118967533111572, "learning_rate": 0.0008193453594238348, "loss": 3.4827, "step": 21280 }, { "epoch": 1.446188340807175, "grad_norm": 2.025271415710449, "learning_rate": 0.0008193028944150021, "loss": 3.7359, "step": 21285 }, { "epoch": 1.4465280608778366, "grad_norm": 1.5400316715240479, "learning_rate": 0.0008192604294061693, "loss": 3.8176, "step": 21290 }, { "epoch": 1.4468677809484984, "grad_norm": 2.447354555130005, "learning_rate": 0.0008192179643973366, "loss": 3.6659, "step": 21295 }, { "epoch": 1.4472075010191603, "grad_norm": 2.0164220333099365, "learning_rate": 0.0008191754993885039, "loss": 3.6038, "step": 21300 }, { "epoch": 1.447547221089822, "grad_norm": 2.185429096221924, "learning_rate": 0.0008191330343796711, "loss": 3.4824, "step": 21305 }, { "epoch": 1.4478869411604838, "grad_norm": 2.111337661743164, "learning_rate": 0.0008190905693708385, "loss": 3.5661, "step": 21310 }, { "epoch": 1.4482266612311454, "grad_norm": 2.201200246810913, "learning_rate": 0.0008190481043620058, "loss": 3.5803, "step": 21315 }, { "epoch": 1.4485663813018073, "grad_norm": 1.9399479627609253, "learning_rate": 0.000819005639353173, "loss": 3.4974, "step": 21320 }, { "epoch": 1.4489061013724691, "grad_norm": 2.069608688354492, "learning_rate": 0.0008189631743443403, "loss": 3.688, "step": 21325 }, { "epoch": 1.4492458214431307, "grad_norm": 2.0622262954711914, "learning_rate": 0.0008189207093355075, "loss": 3.5479, "step": 21330 }, { "epoch": 1.4495855415137926, "grad_norm": 1.790718913078308, "learning_rate": 0.0008188782443266748, "loss": 3.7438, "step": 21335 }, { "epoch": 1.4499252615844545, "grad_norm": 1.9960354566574097, "learning_rate": 0.0008188357793178421, "loss": 3.5208, "step": 21340 }, { "epoch": 1.450264981655116, "grad_norm": 1.85335111618042, "learning_rate": 0.0008187933143090094, "loss": 3.7386, "step": 21345 }, { "epoch": 1.450604701725778, "grad_norm": 3.191141366958618, "learning_rate": 0.0008187508493001767, "loss": 3.5647, "step": 21350 }, { "epoch": 1.4509444217964398, "grad_norm": 2.183319330215454, "learning_rate": 0.000818708384291344, "loss": 3.7666, "step": 21355 }, { "epoch": 1.4512841418671014, "grad_norm": 1.9056901931762695, "learning_rate": 0.0008186659192825112, "loss": 3.5183, "step": 21360 }, { "epoch": 1.4516238619377633, "grad_norm": 1.7130544185638428, "learning_rate": 0.0008186234542736784, "loss": 3.4414, "step": 21365 }, { "epoch": 1.4519635820084251, "grad_norm": 1.6768193244934082, "learning_rate": 0.0008185809892648458, "loss": 3.8571, "step": 21370 }, { "epoch": 1.4523033020790868, "grad_norm": 1.7786977291107178, "learning_rate": 0.000818538524256013, "loss": 3.5233, "step": 21375 }, { "epoch": 1.4526430221497486, "grad_norm": 2.8728621006011963, "learning_rate": 0.0008184960592471803, "loss": 3.2932, "step": 21380 }, { "epoch": 1.4529827422204105, "grad_norm": 2.028078317642212, "learning_rate": 0.0008184535942383477, "loss": 3.7471, "step": 21385 }, { "epoch": 1.453322462291072, "grad_norm": 2.0609993934631348, "learning_rate": 0.0008184111292295149, "loss": 3.6257, "step": 21390 }, { "epoch": 1.453662182361734, "grad_norm": 2.5297818183898926, "learning_rate": 0.0008183686642206821, "loss": 3.2801, "step": 21395 }, { "epoch": 1.4540019024323958, "grad_norm": 2.029083490371704, "learning_rate": 0.0008183261992118495, "loss": 3.3961, "step": 21400 }, { "epoch": 1.4543416225030574, "grad_norm": 1.823486566543579, "learning_rate": 0.0008182837342030167, "loss": 3.6423, "step": 21405 }, { "epoch": 1.4546813425737193, "grad_norm": 2.3106722831726074, "learning_rate": 0.0008182412691941839, "loss": 3.662, "step": 21410 }, { "epoch": 1.4550210626443811, "grad_norm": 2.1413698196411133, "learning_rate": 0.0008181988041853514, "loss": 3.6598, "step": 21415 }, { "epoch": 1.4553607827150428, "grad_norm": 2.1998345851898193, "learning_rate": 0.0008181563391765186, "loss": 3.4296, "step": 21420 }, { "epoch": 1.4557005027857046, "grad_norm": 1.6632771492004395, "learning_rate": 0.0008181138741676858, "loss": 3.5224, "step": 21425 }, { "epoch": 1.4560402228563665, "grad_norm": 2.8682737350463867, "learning_rate": 0.0008180714091588531, "loss": 3.5506, "step": 21430 }, { "epoch": 1.456379942927028, "grad_norm": 1.8010164499282837, "learning_rate": 0.0008180289441500204, "loss": 3.4334, "step": 21435 }, { "epoch": 1.45671966299769, "grad_norm": 2.2170326709747314, "learning_rate": 0.0008179864791411877, "loss": 3.6841, "step": 21440 }, { "epoch": 1.4570593830683518, "grad_norm": 2.0164742469787598, "learning_rate": 0.0008179440141323549, "loss": 3.5601, "step": 21445 }, { "epoch": 1.4573991031390134, "grad_norm": 2.022794246673584, "learning_rate": 0.0008179015491235223, "loss": 3.8147, "step": 21450 }, { "epoch": 1.4577388232096753, "grad_norm": 1.7467037439346313, "learning_rate": 0.0008178590841146896, "loss": 3.5926, "step": 21455 }, { "epoch": 1.458078543280337, "grad_norm": 2.4260690212249756, "learning_rate": 0.0008178166191058568, "loss": 3.326, "step": 21460 }, { "epoch": 1.4584182633509988, "grad_norm": 2.2505686283111572, "learning_rate": 0.000817774154097024, "loss": 3.7102, "step": 21465 }, { "epoch": 1.4587579834216606, "grad_norm": 1.607245922088623, "learning_rate": 0.0008177316890881914, "loss": 3.3833, "step": 21470 }, { "epoch": 1.4590977034923223, "grad_norm": 2.0183281898498535, "learning_rate": 0.0008176892240793586, "loss": 3.4834, "step": 21475 }, { "epoch": 1.4594374235629841, "grad_norm": 1.9167125225067139, "learning_rate": 0.0008176467590705258, "loss": 3.7628, "step": 21480 }, { "epoch": 1.4597771436336457, "grad_norm": 2.0080678462982178, "learning_rate": 0.0008176042940616933, "loss": 3.6203, "step": 21485 }, { "epoch": 1.4601168637043076, "grad_norm": 2.757197856903076, "learning_rate": 0.0008175618290528605, "loss": 3.9315, "step": 21490 }, { "epoch": 1.4604565837749695, "grad_norm": 1.9970637559890747, "learning_rate": 0.0008175193640440277, "loss": 3.7609, "step": 21495 }, { "epoch": 1.460796303845631, "grad_norm": 2.020254373550415, "learning_rate": 0.0008174768990351951, "loss": 3.3572, "step": 21500 }, { "epoch": 1.461136023916293, "grad_norm": 2.176140069961548, "learning_rate": 0.0008174344340263623, "loss": 3.507, "step": 21505 }, { "epoch": 1.4614757439869548, "grad_norm": 1.6765880584716797, "learning_rate": 0.0008173919690175295, "loss": 3.7316, "step": 21510 }, { "epoch": 1.4618154640576164, "grad_norm": 2.3149144649505615, "learning_rate": 0.0008173495040086968, "loss": 3.6188, "step": 21515 }, { "epoch": 1.4621551841282783, "grad_norm": 3.2331013679504395, "learning_rate": 0.0008173070389998642, "loss": 3.3186, "step": 21520 }, { "epoch": 1.4624949041989401, "grad_norm": 1.842803955078125, "learning_rate": 0.0008172645739910314, "loss": 3.7943, "step": 21525 }, { "epoch": 1.4628346242696018, "grad_norm": 1.8642452955245972, "learning_rate": 0.0008172221089821987, "loss": 3.5096, "step": 21530 }, { "epoch": 1.4631743443402636, "grad_norm": 2.2249817848205566, "learning_rate": 0.000817179643973366, "loss": 3.6649, "step": 21535 }, { "epoch": 1.4635140644109255, "grad_norm": 2.4430785179138184, "learning_rate": 0.0008171371789645332, "loss": 3.6002, "step": 21540 }, { "epoch": 1.463853784481587, "grad_norm": 1.8193508386611938, "learning_rate": 0.0008170947139557005, "loss": 3.4019, "step": 21545 }, { "epoch": 1.464193504552249, "grad_norm": 2.1025102138519287, "learning_rate": 0.0008170522489468678, "loss": 3.76, "step": 21550 }, { "epoch": 1.4645332246229108, "grad_norm": 2.4054858684539795, "learning_rate": 0.0008170097839380351, "loss": 3.4942, "step": 21555 }, { "epoch": 1.4648729446935724, "grad_norm": 1.7655266523361206, "learning_rate": 0.0008169673189292024, "loss": 3.5008, "step": 21560 }, { "epoch": 1.4652126647642343, "grad_norm": 1.7062625885009766, "learning_rate": 0.0008169248539203696, "loss": 3.7796, "step": 21565 }, { "epoch": 1.4655523848348961, "grad_norm": 1.9982576370239258, "learning_rate": 0.0008168823889115369, "loss": 3.6852, "step": 21570 }, { "epoch": 1.4658921049055578, "grad_norm": 1.769953966140747, "learning_rate": 0.0008168399239027042, "loss": 3.7291, "step": 21575 }, { "epoch": 1.4662318249762196, "grad_norm": 2.220759391784668, "learning_rate": 0.0008167974588938714, "loss": 3.8805, "step": 21580 }, { "epoch": 1.4665715450468815, "grad_norm": 1.9060474634170532, "learning_rate": 0.0008167549938850387, "loss": 3.4027, "step": 21585 }, { "epoch": 1.466911265117543, "grad_norm": 1.7252304553985596, "learning_rate": 0.0008167125288762061, "loss": 3.4393, "step": 21590 }, { "epoch": 1.467250985188205, "grad_norm": 1.865749478340149, "learning_rate": 0.0008166700638673733, "loss": 3.6136, "step": 21595 }, { "epoch": 1.4675907052588668, "grad_norm": 1.9822112321853638, "learning_rate": 0.0008166275988585406, "loss": 3.6973, "step": 21600 }, { "epoch": 1.4679304253295284, "grad_norm": 1.7648169994354248, "learning_rate": 0.0008165851338497079, "loss": 3.5975, "step": 21605 }, { "epoch": 1.4682701454001903, "grad_norm": 1.9816360473632812, "learning_rate": 0.0008165426688408751, "loss": 3.6354, "step": 21610 }, { "epoch": 1.4686098654708521, "grad_norm": 1.9469165802001953, "learning_rate": 0.0008165002038320423, "loss": 3.4587, "step": 21615 }, { "epoch": 1.4689495855415138, "grad_norm": 2.714731454849243, "learning_rate": 0.0008164577388232097, "loss": 3.5658, "step": 21620 }, { "epoch": 1.4692893056121756, "grad_norm": 2.093950033187866, "learning_rate": 0.000816415273814377, "loss": 3.2805, "step": 21625 }, { "epoch": 1.4696290256828373, "grad_norm": 1.9798853397369385, "learning_rate": 0.0008163728088055442, "loss": 3.265, "step": 21630 }, { "epoch": 1.4699687457534991, "grad_norm": 2.235020875930786, "learning_rate": 0.0008163303437967116, "loss": 3.7449, "step": 21635 }, { "epoch": 1.470308465824161, "grad_norm": 2.153658151626587, "learning_rate": 0.0008162878787878788, "loss": 3.3332, "step": 21640 }, { "epoch": 1.4706481858948226, "grad_norm": 1.9614136219024658, "learning_rate": 0.000816245413779046, "loss": 3.4862, "step": 21645 }, { "epoch": 1.4709879059654845, "grad_norm": 2.3473358154296875, "learning_rate": 0.0008162029487702134, "loss": 3.5268, "step": 21650 }, { "epoch": 1.471327626036146, "grad_norm": 2.6706507205963135, "learning_rate": 0.0008161604837613806, "loss": 3.5437, "step": 21655 }, { "epoch": 1.471667346106808, "grad_norm": 2.011841297149658, "learning_rate": 0.0008161180187525479, "loss": 3.4352, "step": 21660 }, { "epoch": 1.4720070661774698, "grad_norm": 2.464024543762207, "learning_rate": 0.0008160755537437153, "loss": 3.6791, "step": 21665 }, { "epoch": 1.4723467862481314, "grad_norm": 2.098721742630005, "learning_rate": 0.0008160330887348825, "loss": 3.8113, "step": 21670 }, { "epoch": 1.4726865063187933, "grad_norm": 1.9020265340805054, "learning_rate": 0.0008159906237260497, "loss": 3.5689, "step": 21675 }, { "epoch": 1.4730262263894551, "grad_norm": 1.783903956413269, "learning_rate": 0.000815948158717217, "loss": 3.539, "step": 21680 }, { "epoch": 1.4733659464601168, "grad_norm": 1.7969120740890503, "learning_rate": 0.0008159056937083843, "loss": 3.5644, "step": 21685 }, { "epoch": 1.4737056665307786, "grad_norm": 1.8986393213272095, "learning_rate": 0.0008158632286995515, "loss": 3.5519, "step": 21690 }, { "epoch": 1.4740453866014405, "grad_norm": 2.1746091842651367, "learning_rate": 0.0008158207636907189, "loss": 3.8075, "step": 21695 }, { "epoch": 1.474385106672102, "grad_norm": 1.8793202638626099, "learning_rate": 0.0008157782986818862, "loss": 3.4406, "step": 21700 }, { "epoch": 1.474724826742764, "grad_norm": 1.6801910400390625, "learning_rate": 0.0008157358336730534, "loss": 3.5371, "step": 21705 }, { "epoch": 1.4750645468134258, "grad_norm": 2.06140398979187, "learning_rate": 0.0008156933686642207, "loss": 3.5425, "step": 21710 }, { "epoch": 1.4754042668840874, "grad_norm": 1.676571011543274, "learning_rate": 0.0008156509036553879, "loss": 3.6142, "step": 21715 }, { "epoch": 1.4757439869547493, "grad_norm": 2.22513747215271, "learning_rate": 0.0008156084386465552, "loss": 3.5491, "step": 21720 }, { "epoch": 1.4760837070254111, "grad_norm": 2.157200813293457, "learning_rate": 0.0008155659736377226, "loss": 3.7012, "step": 21725 }, { "epoch": 1.4764234270960728, "grad_norm": 1.6941591501235962, "learning_rate": 0.0008155235086288898, "loss": 3.4967, "step": 21730 }, { "epoch": 1.4767631471667346, "grad_norm": 1.92744779586792, "learning_rate": 0.0008154810436200571, "loss": 3.5045, "step": 21735 }, { "epoch": 1.4771028672373965, "grad_norm": 2.051060438156128, "learning_rate": 0.0008154385786112244, "loss": 3.5379, "step": 21740 }, { "epoch": 1.477442587308058, "grad_norm": 2.1411731243133545, "learning_rate": 0.0008153961136023916, "loss": 3.3389, "step": 21745 }, { "epoch": 1.47778230737872, "grad_norm": 2.008814573287964, "learning_rate": 0.0008153536485935588, "loss": 3.5256, "step": 21750 }, { "epoch": 1.4781220274493818, "grad_norm": 1.607235312461853, "learning_rate": 0.0008153111835847262, "loss": 3.5784, "step": 21755 }, { "epoch": 1.4784617475200434, "grad_norm": 1.6266652345657349, "learning_rate": 0.0008152687185758935, "loss": 3.5729, "step": 21760 }, { "epoch": 1.4788014675907053, "grad_norm": 2.2045671939849854, "learning_rate": 0.0008152262535670607, "loss": 3.8576, "step": 21765 }, { "epoch": 1.4791411876613672, "grad_norm": 1.8832954168319702, "learning_rate": 0.0008151837885582281, "loss": 3.4473, "step": 21770 }, { "epoch": 1.4794809077320288, "grad_norm": 2.0365798473358154, "learning_rate": 0.0008151413235493953, "loss": 3.607, "step": 21775 }, { "epoch": 1.4798206278026906, "grad_norm": 1.7580002546310425, "learning_rate": 0.0008150988585405626, "loss": 3.3927, "step": 21780 }, { "epoch": 1.4801603478733525, "grad_norm": 1.5988521575927734, "learning_rate": 0.0008150563935317299, "loss": 3.7819, "step": 21785 }, { "epoch": 1.4805000679440141, "grad_norm": 2.1016762256622314, "learning_rate": 0.0008150139285228971, "loss": 3.621, "step": 21790 }, { "epoch": 1.480839788014676, "grad_norm": 2.1259894371032715, "learning_rate": 0.0008149714635140645, "loss": 3.6914, "step": 21795 }, { "epoch": 1.4811795080853376, "grad_norm": 2.3303942680358887, "learning_rate": 0.0008149289985052318, "loss": 3.6567, "step": 21800 }, { "epoch": 1.4815192281559995, "grad_norm": 1.4423158168792725, "learning_rate": 0.000814886533496399, "loss": 3.7908, "step": 21805 }, { "epoch": 1.4818589482266613, "grad_norm": 1.5443652868270874, "learning_rate": 0.0008148440684875663, "loss": 3.6331, "step": 21810 }, { "epoch": 1.482198668297323, "grad_norm": 2.4545438289642334, "learning_rate": 0.0008148016034787335, "loss": 3.6759, "step": 21815 }, { "epoch": 1.4825383883679848, "grad_norm": 1.5371301174163818, "learning_rate": 0.0008147591384699008, "loss": 3.3319, "step": 21820 }, { "epoch": 1.4828781084386464, "grad_norm": 1.8540623188018799, "learning_rate": 0.0008147166734610681, "loss": 3.9413, "step": 21825 }, { "epoch": 1.4832178285093083, "grad_norm": 2.0054917335510254, "learning_rate": 0.0008146742084522354, "loss": 3.596, "step": 21830 }, { "epoch": 1.4835575485799701, "grad_norm": 2.6623241901397705, "learning_rate": 0.0008146317434434027, "loss": 3.7536, "step": 21835 }, { "epoch": 1.4838972686506318, "grad_norm": 2.297361373901367, "learning_rate": 0.00081458927843457, "loss": 3.5656, "step": 21840 }, { "epoch": 1.4842369887212936, "grad_norm": 1.9972542524337769, "learning_rate": 0.0008145468134257372, "loss": 3.5644, "step": 21845 }, { "epoch": 1.4845767087919555, "grad_norm": 2.2383012771606445, "learning_rate": 0.0008145043484169045, "loss": 3.5299, "step": 21850 }, { "epoch": 1.484916428862617, "grad_norm": 2.3624672889709473, "learning_rate": 0.0008144618834080718, "loss": 3.7406, "step": 21855 }, { "epoch": 1.485256148933279, "grad_norm": 2.1952826976776123, "learning_rate": 0.000814419418399239, "loss": 3.5696, "step": 21860 }, { "epoch": 1.4855958690039408, "grad_norm": 1.8502761125564575, "learning_rate": 0.0008143769533904063, "loss": 3.7619, "step": 21865 }, { "epoch": 1.4859355890746024, "grad_norm": 2.2759130001068115, "learning_rate": 0.0008143344883815737, "loss": 3.4336, "step": 21870 }, { "epoch": 1.4862753091452643, "grad_norm": 1.8644014596939087, "learning_rate": 0.0008142920233727409, "loss": 3.4927, "step": 21875 }, { "epoch": 1.4866150292159261, "grad_norm": 2.108442544937134, "learning_rate": 0.0008142495583639081, "loss": 3.5896, "step": 21880 }, { "epoch": 1.4869547492865878, "grad_norm": 1.539209246635437, "learning_rate": 0.0008142070933550755, "loss": 3.3042, "step": 21885 }, { "epoch": 1.4872944693572496, "grad_norm": 2.450396776199341, "learning_rate": 0.0008141646283462427, "loss": 3.3833, "step": 21890 }, { "epoch": 1.4876341894279115, "grad_norm": 1.9892385005950928, "learning_rate": 0.0008141221633374099, "loss": 3.6473, "step": 21895 }, { "epoch": 1.487973909498573, "grad_norm": 2.026686906814575, "learning_rate": 0.0008140796983285774, "loss": 3.4312, "step": 21900 }, { "epoch": 1.488313629569235, "grad_norm": 2.02937388420105, "learning_rate": 0.0008140372333197446, "loss": 3.6872, "step": 21905 }, { "epoch": 1.4886533496398968, "grad_norm": 1.771664023399353, "learning_rate": 0.0008139947683109118, "loss": 3.2402, "step": 21910 }, { "epoch": 1.4889930697105584, "grad_norm": 2.3555800914764404, "learning_rate": 0.0008139523033020791, "loss": 3.4955, "step": 21915 }, { "epoch": 1.4893327897812203, "grad_norm": 2.2921483516693115, "learning_rate": 0.0008139098382932464, "loss": 3.4468, "step": 21920 }, { "epoch": 1.4896725098518822, "grad_norm": 3.2681171894073486, "learning_rate": 0.0008138673732844136, "loss": 3.3557, "step": 21925 }, { "epoch": 1.4900122299225438, "grad_norm": 2.044222831726074, "learning_rate": 0.0008138249082755809, "loss": 3.7313, "step": 21930 }, { "epoch": 1.4903519499932056, "grad_norm": 1.4820705652236938, "learning_rate": 0.0008137824432667483, "loss": 3.5258, "step": 21935 }, { "epoch": 1.4906916700638675, "grad_norm": 1.8747202157974243, "learning_rate": 0.0008137399782579155, "loss": 3.7247, "step": 21940 }, { "epoch": 1.4910313901345291, "grad_norm": 2.001749277114868, "learning_rate": 0.0008136975132490828, "loss": 3.6287, "step": 21945 }, { "epoch": 1.491371110205191, "grad_norm": 2.115784168243408, "learning_rate": 0.00081365504824025, "loss": 3.6922, "step": 21950 }, { "epoch": 1.4917108302758528, "grad_norm": 1.79408860206604, "learning_rate": 0.0008136125832314173, "loss": 3.5362, "step": 21955 }, { "epoch": 1.4920505503465145, "grad_norm": 1.6803104877471924, "learning_rate": 0.0008135701182225846, "loss": 3.5284, "step": 21960 }, { "epoch": 1.4923902704171763, "grad_norm": 2.3763747215270996, "learning_rate": 0.0008135276532137518, "loss": 3.4458, "step": 21965 }, { "epoch": 1.492729990487838, "grad_norm": 2.117032766342163, "learning_rate": 0.0008134851882049192, "loss": 3.3794, "step": 21970 }, { "epoch": 1.4930697105584998, "grad_norm": 2.3199939727783203, "learning_rate": 0.0008134427231960865, "loss": 3.6959, "step": 21975 }, { "epoch": 1.4934094306291616, "grad_norm": 1.9181023836135864, "learning_rate": 0.0008134002581872537, "loss": 3.6522, "step": 21980 }, { "epoch": 1.4937491506998233, "grad_norm": 2.5282535552978516, "learning_rate": 0.000813357793178421, "loss": 3.3597, "step": 21985 }, { "epoch": 1.4940888707704851, "grad_norm": 1.6340575218200684, "learning_rate": 0.0008133153281695883, "loss": 3.5092, "step": 21990 }, { "epoch": 1.4944285908411468, "grad_norm": 1.9217201471328735, "learning_rate": 0.0008132728631607555, "loss": 3.5507, "step": 21995 }, { "epoch": 1.4947683109118086, "grad_norm": 1.6013096570968628, "learning_rate": 0.0008132303981519227, "loss": 3.6763, "step": 22000 }, { "epoch": 1.4951080309824705, "grad_norm": 2.12450909614563, "learning_rate": 0.0008131879331430902, "loss": 3.7735, "step": 22005 }, { "epoch": 1.495447751053132, "grad_norm": 1.9750933647155762, "learning_rate": 0.0008131454681342574, "loss": 3.4755, "step": 22010 }, { "epoch": 1.495787471123794, "grad_norm": 2.157233476638794, "learning_rate": 0.0008131030031254246, "loss": 3.6712, "step": 22015 }, { "epoch": 1.4961271911944558, "grad_norm": 1.7335560321807861, "learning_rate": 0.000813060538116592, "loss": 3.6182, "step": 22020 }, { "epoch": 1.4964669112651174, "grad_norm": 1.7547643184661865, "learning_rate": 0.0008130180731077592, "loss": 3.3969, "step": 22025 }, { "epoch": 1.4968066313357793, "grad_norm": 1.935941219329834, "learning_rate": 0.0008129756080989264, "loss": 3.3939, "step": 22030 }, { "epoch": 1.4971463514064411, "grad_norm": 1.9763280153274536, "learning_rate": 0.0008129331430900938, "loss": 3.7618, "step": 22035 }, { "epoch": 1.4974860714771028, "grad_norm": 1.906431794166565, "learning_rate": 0.0008128906780812611, "loss": 3.5065, "step": 22040 }, { "epoch": 1.4978257915477646, "grad_norm": 1.8448200225830078, "learning_rate": 0.0008128482130724283, "loss": 3.4918, "step": 22045 }, { "epoch": 1.4981655116184265, "grad_norm": 1.9974302053451538, "learning_rate": 0.0008128057480635957, "loss": 3.6102, "step": 22050 }, { "epoch": 1.4985052316890881, "grad_norm": 1.831827998161316, "learning_rate": 0.0008127632830547629, "loss": 3.416, "step": 22055 }, { "epoch": 1.49884495175975, "grad_norm": 1.7509565353393555, "learning_rate": 0.0008127208180459301, "loss": 3.7111, "step": 22060 }, { "epoch": 1.4991846718304118, "grad_norm": 1.677467942237854, "learning_rate": 0.0008126783530370974, "loss": 3.8879, "step": 22065 }, { "epoch": 1.4995243919010735, "grad_norm": 1.75126314163208, "learning_rate": 0.0008126358880282647, "loss": 3.437, "step": 22070 }, { "epoch": 1.4998641119717353, "grad_norm": 1.9610830545425415, "learning_rate": 0.000812593423019432, "loss": 3.6867, "step": 22075 }, { "epoch": 1.5002038320423972, "grad_norm": 1.998129963874817, "learning_rate": 0.0008125509580105993, "loss": 3.5782, "step": 22080 }, { "epoch": 1.5005435521130588, "grad_norm": 1.8608635663986206, "learning_rate": 0.0008125084930017666, "loss": 3.395, "step": 22085 }, { "epoch": 1.5008832721837206, "grad_norm": 2.1591687202453613, "learning_rate": 0.0008124660279929338, "loss": 3.4565, "step": 22090 }, { "epoch": 1.5012229922543825, "grad_norm": 2.394547462463379, "learning_rate": 0.0008124235629841011, "loss": 3.5205, "step": 22095 }, { "epoch": 1.5015627123250441, "grad_norm": 2.0240533351898193, "learning_rate": 0.0008123810979752683, "loss": 3.3889, "step": 22100 }, { "epoch": 1.501902432395706, "grad_norm": 2.025813102722168, "learning_rate": 0.0008123386329664356, "loss": 3.565, "step": 22105 }, { "epoch": 1.5022421524663678, "grad_norm": 1.9731045961380005, "learning_rate": 0.000812296167957603, "loss": 3.71, "step": 22110 }, { "epoch": 1.5025818725370295, "grad_norm": 2.1531028747558594, "learning_rate": 0.0008122537029487702, "loss": 3.6519, "step": 22115 }, { "epoch": 1.5029215926076913, "grad_norm": 1.8579922914505005, "learning_rate": 0.0008122112379399375, "loss": 3.4526, "step": 22120 }, { "epoch": 1.5032613126783532, "grad_norm": 1.9329519271850586, "learning_rate": 0.0008121687729311048, "loss": 3.5543, "step": 22125 }, { "epoch": 1.5036010327490148, "grad_norm": 1.987195372581482, "learning_rate": 0.000812126307922272, "loss": 3.5177, "step": 22130 }, { "epoch": 1.5039407528196764, "grad_norm": 2.876424789428711, "learning_rate": 0.0008120838429134394, "loss": 3.5845, "step": 22135 }, { "epoch": 1.5042804728903385, "grad_norm": 2.2717783451080322, "learning_rate": 0.0008120413779046066, "loss": 3.5889, "step": 22140 }, { "epoch": 1.5046201929610001, "grad_norm": 1.9590967893600464, "learning_rate": 0.0008119989128957739, "loss": 3.666, "step": 22145 }, { "epoch": 1.5049599130316618, "grad_norm": 2.2846062183380127, "learning_rate": 0.0008119564478869413, "loss": 3.7276, "step": 22150 }, { "epoch": 1.5052996331023238, "grad_norm": 1.9924860000610352, "learning_rate": 0.0008119139828781085, "loss": 3.5535, "step": 22155 }, { "epoch": 1.5056393531729855, "grad_norm": 2.300318956375122, "learning_rate": 0.0008118715178692757, "loss": 3.6749, "step": 22160 }, { "epoch": 1.505979073243647, "grad_norm": 2.52394700050354, "learning_rate": 0.000811829052860443, "loss": 3.6939, "step": 22165 }, { "epoch": 1.506318793314309, "grad_norm": 2.408440589904785, "learning_rate": 0.0008117865878516103, "loss": 3.4544, "step": 22170 }, { "epoch": 1.5066585133849708, "grad_norm": 2.3713173866271973, "learning_rate": 0.0008117441228427775, "loss": 3.532, "step": 22175 }, { "epoch": 1.5069982334556324, "grad_norm": 2.0477843284606934, "learning_rate": 0.0008117016578339449, "loss": 3.6266, "step": 22180 }, { "epoch": 1.5073379535262943, "grad_norm": 1.830551028251648, "learning_rate": 0.0008116591928251122, "loss": 3.8, "step": 22185 }, { "epoch": 1.5076776735969561, "grad_norm": 2.4603114128112793, "learning_rate": 0.0008116167278162794, "loss": 3.2743, "step": 22190 }, { "epoch": 1.5080173936676178, "grad_norm": 1.9524027109146118, "learning_rate": 0.0008115742628074467, "loss": 3.6106, "step": 22195 }, { "epoch": 1.5083571137382796, "grad_norm": 1.7562544345855713, "learning_rate": 0.000811531797798614, "loss": 3.5514, "step": 22200 }, { "epoch": 1.5086968338089415, "grad_norm": 2.2327163219451904, "learning_rate": 0.0008114893327897812, "loss": 3.8027, "step": 22205 }, { "epoch": 1.5090365538796031, "grad_norm": 2.232172966003418, "learning_rate": 0.0008114468677809485, "loss": 3.4494, "step": 22210 }, { "epoch": 1.509376273950265, "grad_norm": 1.5522677898406982, "learning_rate": 0.0008114044027721158, "loss": 3.6431, "step": 22215 }, { "epoch": 1.5097159940209268, "grad_norm": 2.9260168075561523, "learning_rate": 0.0008113619377632831, "loss": 3.4643, "step": 22220 }, { "epoch": 1.5100557140915885, "grad_norm": 2.0596628189086914, "learning_rate": 0.0008113194727544504, "loss": 3.52, "step": 22225 }, { "epoch": 1.5103954341622503, "grad_norm": 2.032020330429077, "learning_rate": 0.0008112770077456176, "loss": 3.459, "step": 22230 }, { "epoch": 1.5107351542329122, "grad_norm": 2.0872647762298584, "learning_rate": 0.0008112345427367849, "loss": 3.3955, "step": 22235 }, { "epoch": 1.5110748743035738, "grad_norm": 1.7737336158752441, "learning_rate": 0.0008111920777279522, "loss": 3.6024, "step": 22240 }, { "epoch": 1.5114145943742356, "grad_norm": 1.6619017124176025, "learning_rate": 0.0008111496127191194, "loss": 3.3535, "step": 22245 }, { "epoch": 1.5117543144448975, "grad_norm": 2.0028533935546875, "learning_rate": 0.0008111071477102867, "loss": 3.6857, "step": 22250 }, { "epoch": 1.5120940345155591, "grad_norm": 2.2204816341400146, "learning_rate": 0.0008110646827014541, "loss": 3.7382, "step": 22255 }, { "epoch": 1.512433754586221, "grad_norm": 1.9354393482208252, "learning_rate": 0.0008110222176926213, "loss": 3.3855, "step": 22260 }, { "epoch": 1.5127734746568828, "grad_norm": 1.7026071548461914, "learning_rate": 0.0008109797526837885, "loss": 3.4972, "step": 22265 }, { "epoch": 1.5131131947275445, "grad_norm": 1.8766762018203735, "learning_rate": 0.0008109372876749559, "loss": 3.4851, "step": 22270 }, { "epoch": 1.5134529147982063, "grad_norm": 1.918897271156311, "learning_rate": 0.0008108948226661231, "loss": 3.385, "step": 22275 }, { "epoch": 1.5137926348688682, "grad_norm": 2.854515552520752, "learning_rate": 0.0008108523576572903, "loss": 3.4601, "step": 22280 }, { "epoch": 1.5141323549395298, "grad_norm": 2.012726306915283, "learning_rate": 0.0008108098926484578, "loss": 3.6083, "step": 22285 }, { "epoch": 1.5144720750101917, "grad_norm": 1.9514278173446655, "learning_rate": 0.000810767427639625, "loss": 3.6467, "step": 22290 }, { "epoch": 1.5148117950808535, "grad_norm": 2.4605417251586914, "learning_rate": 0.0008107249626307922, "loss": 3.4675, "step": 22295 }, { "epoch": 1.5151515151515151, "grad_norm": 1.8188761472702026, "learning_rate": 0.0008106824976219596, "loss": 3.7029, "step": 22300 }, { "epoch": 1.5154912352221768, "grad_norm": 1.701676607131958, "learning_rate": 0.0008106400326131268, "loss": 3.7499, "step": 22305 }, { "epoch": 1.5158309552928388, "grad_norm": 2.1593692302703857, "learning_rate": 0.000810597567604294, "loss": 3.5408, "step": 22310 }, { "epoch": 1.5161706753635005, "grad_norm": 2.031914710998535, "learning_rate": 0.0008105551025954614, "loss": 3.6828, "step": 22315 }, { "epoch": 1.516510395434162, "grad_norm": 1.7940248250961304, "learning_rate": 0.0008105126375866287, "loss": 3.6824, "step": 22320 }, { "epoch": 1.5168501155048242, "grad_norm": 2.5083394050598145, "learning_rate": 0.0008104701725777959, "loss": 3.5315, "step": 22325 }, { "epoch": 1.5171898355754858, "grad_norm": 2.302177906036377, "learning_rate": 0.0008104277075689632, "loss": 3.6497, "step": 22330 }, { "epoch": 1.5175295556461474, "grad_norm": 1.638199806213379, "learning_rate": 0.0008103852425601305, "loss": 3.5425, "step": 22335 }, { "epoch": 1.5178692757168093, "grad_norm": 1.9415283203125, "learning_rate": 0.0008103427775512977, "loss": 3.3747, "step": 22340 }, { "epoch": 1.5182089957874711, "grad_norm": 1.9189614057540894, "learning_rate": 0.000810300312542465, "loss": 3.435, "step": 22345 }, { "epoch": 1.5185487158581328, "grad_norm": 1.9706419706344604, "learning_rate": 0.0008102578475336324, "loss": 3.47, "step": 22350 }, { "epoch": 1.5188884359287946, "grad_norm": 1.898728609085083, "learning_rate": 0.0008102153825247996, "loss": 3.3439, "step": 22355 }, { "epoch": 1.5192281559994565, "grad_norm": 1.5313750505447388, "learning_rate": 0.0008101729175159669, "loss": 3.6737, "step": 22360 }, { "epoch": 1.5195678760701181, "grad_norm": 2.107032299041748, "learning_rate": 0.0008101304525071341, "loss": 3.3514, "step": 22365 }, { "epoch": 1.51990759614078, "grad_norm": 1.8630167245864868, "learning_rate": 0.0008100879874983014, "loss": 3.3954, "step": 22370 }, { "epoch": 1.5202473162114418, "grad_norm": 1.7611973285675049, "learning_rate": 0.0008100455224894687, "loss": 3.6689, "step": 22375 }, { "epoch": 1.5205870362821035, "grad_norm": 4.1480631828308105, "learning_rate": 0.0008100030574806359, "loss": 3.6621, "step": 22380 }, { "epoch": 1.5209267563527653, "grad_norm": 1.7615424394607544, "learning_rate": 0.0008099605924718033, "loss": 3.6693, "step": 22385 }, { "epoch": 1.5212664764234272, "grad_norm": 2.193552017211914, "learning_rate": 0.0008099181274629706, "loss": 3.5836, "step": 22390 }, { "epoch": 1.5216061964940888, "grad_norm": 2.3057873249053955, "learning_rate": 0.0008098756624541378, "loss": 3.6793, "step": 22395 }, { "epoch": 1.5219459165647506, "grad_norm": 1.885317087173462, "learning_rate": 0.000809833197445305, "loss": 3.7124, "step": 22400 }, { "epoch": 1.5222856366354125, "grad_norm": 2.422820568084717, "learning_rate": 0.0008097907324364724, "loss": 3.5847, "step": 22405 }, { "epoch": 1.5226253567060741, "grad_norm": 1.668961763381958, "learning_rate": 0.0008097482674276396, "loss": 3.9168, "step": 22410 }, { "epoch": 1.522965076776736, "grad_norm": 1.5841201543807983, "learning_rate": 0.0008097058024188068, "loss": 3.667, "step": 22415 }, { "epoch": 1.5233047968473978, "grad_norm": 2.265428304672241, "learning_rate": 0.0008096633374099743, "loss": 3.5906, "step": 22420 }, { "epoch": 1.5236445169180595, "grad_norm": 1.634851336479187, "learning_rate": 0.0008096208724011415, "loss": 3.646, "step": 22425 }, { "epoch": 1.5239842369887213, "grad_norm": 2.083467721939087, "learning_rate": 0.0008095784073923087, "loss": 3.3492, "step": 22430 }, { "epoch": 1.5243239570593832, "grad_norm": 2.0325052738189697, "learning_rate": 0.0008095359423834761, "loss": 3.4033, "step": 22435 }, { "epoch": 1.5246636771300448, "grad_norm": 2.253502130508423, "learning_rate": 0.0008094934773746433, "loss": 3.6163, "step": 22440 }, { "epoch": 1.5250033972007067, "grad_norm": 2.0456442832946777, "learning_rate": 0.0008094510123658105, "loss": 3.6859, "step": 22445 }, { "epoch": 1.5253431172713685, "grad_norm": 2.726410388946533, "learning_rate": 0.0008094085473569778, "loss": 3.5167, "step": 22450 }, { "epoch": 1.5256828373420301, "grad_norm": 2.068819046020508, "learning_rate": 0.0008093660823481452, "loss": 3.5679, "step": 22455 }, { "epoch": 1.526022557412692, "grad_norm": 1.9112718105316162, "learning_rate": 0.0008093236173393124, "loss": 3.7026, "step": 22460 }, { "epoch": 1.5263622774833538, "grad_norm": 1.8180731534957886, "learning_rate": 0.0008092811523304797, "loss": 3.6465, "step": 22465 }, { "epoch": 1.5267019975540155, "grad_norm": 1.7412792444229126, "learning_rate": 0.000809238687321647, "loss": 3.5068, "step": 22470 }, { "epoch": 1.527041717624677, "grad_norm": 2.02544903755188, "learning_rate": 0.0008091962223128143, "loss": 3.839, "step": 22475 }, { "epoch": 1.5273814376953392, "grad_norm": 2.412651538848877, "learning_rate": 0.0008091537573039815, "loss": 3.4915, "step": 22480 }, { "epoch": 1.5277211577660008, "grad_norm": 1.613718867301941, "learning_rate": 0.0008091112922951488, "loss": 3.6378, "step": 22485 }, { "epoch": 1.5280608778366624, "grad_norm": 2.5328497886657715, "learning_rate": 0.0008090688272863162, "loss": 3.6208, "step": 22490 }, { "epoch": 1.5284005979073245, "grad_norm": 2.1242482662200928, "learning_rate": 0.0008090263622774834, "loss": 3.637, "step": 22495 }, { "epoch": 1.5287403179779862, "grad_norm": 2.477501630783081, "learning_rate": 0.0008089838972686506, "loss": 3.5206, "step": 22500 }, { "epoch": 1.5290800380486478, "grad_norm": 2.021376609802246, "learning_rate": 0.000808941432259818, "loss": 3.44, "step": 22505 }, { "epoch": 1.5294197581193096, "grad_norm": 2.0619564056396484, "learning_rate": 0.0008088989672509852, "loss": 3.6108, "step": 22510 }, { "epoch": 1.5297594781899715, "grad_norm": 2.018454074859619, "learning_rate": 0.0008088565022421524, "loss": 3.3956, "step": 22515 }, { "epoch": 1.5300991982606331, "grad_norm": 2.295484781265259, "learning_rate": 0.0008088140372333198, "loss": 3.3592, "step": 22520 }, { "epoch": 1.530438918331295, "grad_norm": 2.0279555320739746, "learning_rate": 0.0008087715722244871, "loss": 3.6313, "step": 22525 }, { "epoch": 1.5307786384019568, "grad_norm": 1.891229271888733, "learning_rate": 0.0008087291072156543, "loss": 3.4625, "step": 22530 }, { "epoch": 1.5311183584726185, "grad_norm": 1.8785299062728882, "learning_rate": 0.0008086866422068217, "loss": 3.7592, "step": 22535 }, { "epoch": 1.5314580785432803, "grad_norm": 2.2332873344421387, "learning_rate": 0.0008086441771979889, "loss": 3.5597, "step": 22540 }, { "epoch": 1.5317977986139422, "grad_norm": 1.531746506690979, "learning_rate": 0.0008086017121891561, "loss": 3.4729, "step": 22545 }, { "epoch": 1.5321375186846038, "grad_norm": 2.253300189971924, "learning_rate": 0.0008085592471803234, "loss": 3.5591, "step": 22550 }, { "epoch": 1.5324772387552656, "grad_norm": 2.0839526653289795, "learning_rate": 0.0008085167821714907, "loss": 3.6456, "step": 22555 }, { "epoch": 1.5328169588259275, "grad_norm": 1.7334197759628296, "learning_rate": 0.000808474317162658, "loss": 3.5789, "step": 22560 }, { "epoch": 1.5331566788965891, "grad_norm": 2.6386163234710693, "learning_rate": 0.0008084318521538253, "loss": 3.5221, "step": 22565 }, { "epoch": 1.533496398967251, "grad_norm": 1.8966366052627563, "learning_rate": 0.0008083893871449926, "loss": 3.7309, "step": 22570 }, { "epoch": 1.5338361190379128, "grad_norm": 1.9586222171783447, "learning_rate": 0.0008083469221361598, "loss": 3.481, "step": 22575 }, { "epoch": 1.5341758391085745, "grad_norm": 2.740286111831665, "learning_rate": 0.0008083044571273271, "loss": 3.639, "step": 22580 }, { "epoch": 1.5345155591792363, "grad_norm": 2.6330373287200928, "learning_rate": 0.0008082619921184944, "loss": 3.7516, "step": 22585 }, { "epoch": 1.5348552792498982, "grad_norm": 2.1927294731140137, "learning_rate": 0.0008082195271096616, "loss": 3.8481, "step": 22590 }, { "epoch": 1.5351949993205598, "grad_norm": 1.9770712852478027, "learning_rate": 0.000808177062100829, "loss": 3.8573, "step": 22595 }, { "epoch": 1.5355347193912217, "grad_norm": 1.8687083721160889, "learning_rate": 0.0008081345970919962, "loss": 3.5771, "step": 22600 }, { "epoch": 1.5358744394618835, "grad_norm": 1.54700767993927, "learning_rate": 0.0008080921320831635, "loss": 3.5306, "step": 22605 }, { "epoch": 1.5362141595325451, "grad_norm": 2.5362298488616943, "learning_rate": 0.0008080496670743308, "loss": 3.5341, "step": 22610 }, { "epoch": 1.536553879603207, "grad_norm": 1.6495487689971924, "learning_rate": 0.000808007202065498, "loss": 3.8231, "step": 22615 }, { "epoch": 1.5368935996738688, "grad_norm": 1.7076336145401, "learning_rate": 0.0008079647370566653, "loss": 3.5739, "step": 22620 }, { "epoch": 1.5372333197445305, "grad_norm": 1.6922165155410767, "learning_rate": 0.0008079222720478326, "loss": 3.5564, "step": 22625 }, { "epoch": 1.5375730398151923, "grad_norm": 1.9567989110946655, "learning_rate": 0.0008078798070389999, "loss": 3.5897, "step": 22630 }, { "epoch": 1.5379127598858542, "grad_norm": 1.9561666250228882, "learning_rate": 0.0008078373420301672, "loss": 3.6739, "step": 22635 }, { "epoch": 1.5382524799565158, "grad_norm": 1.6078495979309082, "learning_rate": 0.0008077948770213345, "loss": 3.4883, "step": 22640 }, { "epoch": 1.5385922000271774, "grad_norm": 1.9302716255187988, "learning_rate": 0.0008077524120125017, "loss": 3.5587, "step": 22645 }, { "epoch": 1.5389319200978395, "grad_norm": 1.9403886795043945, "learning_rate": 0.0008077099470036689, "loss": 3.5571, "step": 22650 }, { "epoch": 1.5392716401685012, "grad_norm": 1.8927186727523804, "learning_rate": 0.0008076674819948363, "loss": 3.5335, "step": 22655 }, { "epoch": 1.5396113602391628, "grad_norm": 2.025461196899414, "learning_rate": 0.0008076250169860035, "loss": 3.5498, "step": 22660 }, { "epoch": 1.5399510803098249, "grad_norm": 2.2804272174835205, "learning_rate": 0.0008075825519771708, "loss": 3.4983, "step": 22665 }, { "epoch": 1.5402908003804865, "grad_norm": 1.728312611579895, "learning_rate": 0.0008075400869683382, "loss": 3.5517, "step": 22670 }, { "epoch": 1.5406305204511481, "grad_norm": 1.7266201972961426, "learning_rate": 0.0008074976219595054, "loss": 3.6224, "step": 22675 }, { "epoch": 1.54097024052181, "grad_norm": 2.2603440284729004, "learning_rate": 0.0008074551569506726, "loss": 3.6549, "step": 22680 }, { "epoch": 1.5413099605924718, "grad_norm": 1.7375622987747192, "learning_rate": 0.00080741269194184, "loss": 3.6532, "step": 22685 }, { "epoch": 1.5416496806631335, "grad_norm": 1.7503305673599243, "learning_rate": 0.0008073702269330072, "loss": 3.3766, "step": 22690 }, { "epoch": 1.5419894007337953, "grad_norm": 2.074916124343872, "learning_rate": 0.0008073277619241744, "loss": 3.7255, "step": 22695 }, { "epoch": 1.5423291208044572, "grad_norm": 2.2191104888916016, "learning_rate": 0.0008072852969153418, "loss": 3.4455, "step": 22700 }, { "epoch": 1.5426688408751188, "grad_norm": 1.4216032028198242, "learning_rate": 0.0008072428319065091, "loss": 3.5719, "step": 22705 }, { "epoch": 1.5430085609457806, "grad_norm": 2.398455858230591, "learning_rate": 0.0008072003668976763, "loss": 3.4544, "step": 22710 }, { "epoch": 1.5433482810164425, "grad_norm": 2.0315463542938232, "learning_rate": 0.0008071579018888436, "loss": 3.5453, "step": 22715 }, { "epoch": 1.5436880010871041, "grad_norm": 1.840645670890808, "learning_rate": 0.0008071154368800109, "loss": 3.3881, "step": 22720 }, { "epoch": 1.544027721157766, "grad_norm": 1.9088776111602783, "learning_rate": 0.0008070729718711781, "loss": 3.5867, "step": 22725 }, { "epoch": 1.5443674412284278, "grad_norm": 2.7887651920318604, "learning_rate": 0.0008070305068623454, "loss": 3.7628, "step": 22730 }, { "epoch": 1.5447071612990895, "grad_norm": 1.6975140571594238, "learning_rate": 0.0008069880418535128, "loss": 3.4597, "step": 22735 }, { "epoch": 1.5450468813697513, "grad_norm": 1.4544389247894287, "learning_rate": 0.00080694557684468, "loss": 3.6895, "step": 22740 }, { "epoch": 1.5453866014404132, "grad_norm": 1.858152985572815, "learning_rate": 0.0008069031118358473, "loss": 3.4682, "step": 22745 }, { "epoch": 1.5457263215110748, "grad_norm": 1.946869134902954, "learning_rate": 0.0008068606468270145, "loss": 3.7746, "step": 22750 }, { "epoch": 1.5460660415817367, "grad_norm": 1.7532154321670532, "learning_rate": 0.0008068181818181818, "loss": 3.4838, "step": 22755 }, { "epoch": 1.5464057616523985, "grad_norm": 2.1532297134399414, "learning_rate": 0.0008067757168093491, "loss": 3.3495, "step": 22760 }, { "epoch": 1.5467454817230601, "grad_norm": 1.6621699333190918, "learning_rate": 0.0008067332518005163, "loss": 3.4442, "step": 22765 }, { "epoch": 1.547085201793722, "grad_norm": 2.1185593605041504, "learning_rate": 0.0008066907867916837, "loss": 3.5056, "step": 22770 }, { "epoch": 1.5474249218643839, "grad_norm": 2.0713894367218018, "learning_rate": 0.000806648321782851, "loss": 3.7953, "step": 22775 }, { "epoch": 1.5477646419350455, "grad_norm": 2.3451974391937256, "learning_rate": 0.0008066058567740182, "loss": 3.4991, "step": 22780 }, { "epoch": 1.5481043620057073, "grad_norm": 1.9724652767181396, "learning_rate": 0.0008065633917651854, "loss": 3.6784, "step": 22785 }, { "epoch": 1.5484440820763692, "grad_norm": 2.384814500808716, "learning_rate": 0.0008065209267563528, "loss": 3.6558, "step": 22790 }, { "epoch": 1.5487838021470308, "grad_norm": 2.0722458362579346, "learning_rate": 0.00080647846174752, "loss": 3.8514, "step": 22795 }, { "epoch": 1.5491235222176927, "grad_norm": 1.7892355918884277, "learning_rate": 0.0008064359967386872, "loss": 3.336, "step": 22800 }, { "epoch": 1.5494632422883545, "grad_norm": 2.045621633529663, "learning_rate": 0.0008063935317298547, "loss": 3.3888, "step": 22805 }, { "epoch": 1.5498029623590162, "grad_norm": 2.0010437965393066, "learning_rate": 0.0008063510667210219, "loss": 3.4346, "step": 22810 }, { "epoch": 1.5501426824296778, "grad_norm": 1.915148138999939, "learning_rate": 0.0008063086017121892, "loss": 3.6596, "step": 22815 }, { "epoch": 1.5504824025003399, "grad_norm": 1.5862075090408325, "learning_rate": 0.0008062661367033565, "loss": 3.4499, "step": 22820 }, { "epoch": 1.5508221225710015, "grad_norm": 2.384075880050659, "learning_rate": 0.0008062236716945237, "loss": 3.5619, "step": 22825 }, { "epoch": 1.5511618426416631, "grad_norm": 2.15838360786438, "learning_rate": 0.000806181206685691, "loss": 3.7122, "step": 22830 }, { "epoch": 1.5515015627123252, "grad_norm": 2.1626265048980713, "learning_rate": 0.0008061387416768582, "loss": 3.7778, "step": 22835 }, { "epoch": 1.5518412827829868, "grad_norm": 2.2619287967681885, "learning_rate": 0.0008060962766680256, "loss": 3.6233, "step": 22840 }, { "epoch": 1.5521810028536485, "grad_norm": 2.1010324954986572, "learning_rate": 0.0008060538116591929, "loss": 3.6426, "step": 22845 }, { "epoch": 1.5525207229243103, "grad_norm": 1.6838980913162231, "learning_rate": 0.0008060113466503601, "loss": 3.5473, "step": 22850 }, { "epoch": 1.5528604429949722, "grad_norm": 2.4602622985839844, "learning_rate": 0.0008059688816415274, "loss": 3.4745, "step": 22855 }, { "epoch": 1.5532001630656338, "grad_norm": 2.02461576461792, "learning_rate": 0.0008059264166326947, "loss": 3.5703, "step": 22860 }, { "epoch": 1.5535398831362957, "grad_norm": 1.8596128225326538, "learning_rate": 0.0008058839516238619, "loss": 3.7425, "step": 22865 }, { "epoch": 1.5538796032069575, "grad_norm": 2.263514280319214, "learning_rate": 0.0008058414866150292, "loss": 3.5505, "step": 22870 }, { "epoch": 1.5542193232776191, "grad_norm": 1.8337799310684204, "learning_rate": 0.0008057990216061966, "loss": 3.5161, "step": 22875 }, { "epoch": 1.554559043348281, "grad_norm": 1.5120456218719482, "learning_rate": 0.0008057565565973638, "loss": 3.5567, "step": 22880 }, { "epoch": 1.5548987634189428, "grad_norm": 2.0465762615203857, "learning_rate": 0.000805714091588531, "loss": 3.5883, "step": 22885 }, { "epoch": 1.5552384834896045, "grad_norm": 2.6862144470214844, "learning_rate": 0.0008056716265796984, "loss": 3.6943, "step": 22890 }, { "epoch": 1.5555782035602663, "grad_norm": 1.7768833637237549, "learning_rate": 0.0008056291615708656, "loss": 3.6712, "step": 22895 }, { "epoch": 1.5559179236309282, "grad_norm": 2.1972455978393555, "learning_rate": 0.0008055866965620328, "loss": 3.5456, "step": 22900 }, { "epoch": 1.5562576437015898, "grad_norm": 1.6156269311904907, "learning_rate": 0.0008055442315532003, "loss": 3.4767, "step": 22905 }, { "epoch": 1.5565973637722517, "grad_norm": 1.9243316650390625, "learning_rate": 0.0008055017665443675, "loss": 3.6375, "step": 22910 }, { "epoch": 1.5569370838429135, "grad_norm": 2.0702719688415527, "learning_rate": 0.0008054593015355347, "loss": 3.6653, "step": 22915 }, { "epoch": 1.5572768039135751, "grad_norm": 1.6640349626541138, "learning_rate": 0.0008054168365267021, "loss": 3.4942, "step": 22920 }, { "epoch": 1.557616523984237, "grad_norm": 1.696498990058899, "learning_rate": 0.0008053743715178693, "loss": 3.5948, "step": 22925 }, { "epoch": 1.5579562440548989, "grad_norm": 1.8350646495819092, "learning_rate": 0.0008053319065090365, "loss": 3.5437, "step": 22930 }, { "epoch": 1.5582959641255605, "grad_norm": 1.8170053958892822, "learning_rate": 0.0008052894415002038, "loss": 3.5156, "step": 22935 }, { "epoch": 1.5586356841962223, "grad_norm": 1.5956627130508423, "learning_rate": 0.0008052469764913712, "loss": 3.6249, "step": 22940 }, { "epoch": 1.5589754042668842, "grad_norm": 2.1653201580047607, "learning_rate": 0.0008052045114825384, "loss": 3.399, "step": 22945 }, { "epoch": 1.5593151243375458, "grad_norm": 1.8230693340301514, "learning_rate": 0.0008051620464737057, "loss": 3.5429, "step": 22950 }, { "epoch": 1.5596548444082077, "grad_norm": 1.8250477313995361, "learning_rate": 0.000805119581464873, "loss": 3.5677, "step": 22955 }, { "epoch": 1.5599945644788695, "grad_norm": 2.2031753063201904, "learning_rate": 0.0008050771164560402, "loss": 3.5672, "step": 22960 }, { "epoch": 1.5603342845495312, "grad_norm": 2.5282795429229736, "learning_rate": 0.0008050346514472075, "loss": 3.5929, "step": 22965 }, { "epoch": 1.560674004620193, "grad_norm": 2.11785888671875, "learning_rate": 0.0008049921864383748, "loss": 3.7284, "step": 22970 }, { "epoch": 1.5610137246908549, "grad_norm": 1.7575472593307495, "learning_rate": 0.0008049497214295421, "loss": 3.4005, "step": 22975 }, { "epoch": 1.5613534447615165, "grad_norm": 2.4142298698425293, "learning_rate": 0.0008049072564207094, "loss": 3.4138, "step": 22980 }, { "epoch": 1.5616931648321781, "grad_norm": 2.2150087356567383, "learning_rate": 0.0008048647914118766, "loss": 3.564, "step": 22985 }, { "epoch": 1.5620328849028402, "grad_norm": 2.2000248432159424, "learning_rate": 0.0008048223264030439, "loss": 3.8156, "step": 22990 }, { "epoch": 1.5623726049735018, "grad_norm": 1.4965729713439941, "learning_rate": 0.0008047798613942112, "loss": 3.4675, "step": 22995 }, { "epoch": 1.5627123250441635, "grad_norm": 2.9305260181427, "learning_rate": 0.0008047373963853784, "loss": 3.4851, "step": 23000 }, { "epoch": 1.5630520451148255, "grad_norm": 2.3418734073638916, "learning_rate": 0.0008046949313765457, "loss": 3.587, "step": 23005 }, { "epoch": 1.5633917651854872, "grad_norm": 2.022758960723877, "learning_rate": 0.0008046524663677131, "loss": 3.4321, "step": 23010 }, { "epoch": 1.5637314852561488, "grad_norm": 2.1269822120666504, "learning_rate": 0.0008046100013588803, "loss": 3.932, "step": 23015 }, { "epoch": 1.5640712053268107, "grad_norm": 1.995998501777649, "learning_rate": 0.0008045675363500476, "loss": 3.7424, "step": 23020 }, { "epoch": 1.5644109253974725, "grad_norm": 1.9974274635314941, "learning_rate": 0.0008045250713412149, "loss": 3.5347, "step": 23025 }, { "epoch": 1.5647506454681341, "grad_norm": 2.3760101795196533, "learning_rate": 0.0008044826063323821, "loss": 3.4536, "step": 23030 }, { "epoch": 1.565090365538796, "grad_norm": 1.951073408126831, "learning_rate": 0.0008044401413235493, "loss": 3.826, "step": 23035 }, { "epoch": 1.5654300856094578, "grad_norm": 1.820180058479309, "learning_rate": 0.0008043976763147167, "loss": 3.5353, "step": 23040 }, { "epoch": 1.5657698056801195, "grad_norm": 1.7664583921432495, "learning_rate": 0.000804355211305884, "loss": 3.4872, "step": 23045 }, { "epoch": 1.5661095257507813, "grad_norm": 1.716953992843628, "learning_rate": 0.0008043127462970512, "loss": 3.5731, "step": 23050 }, { "epoch": 1.5664492458214432, "grad_norm": 2.5742619037628174, "learning_rate": 0.0008042702812882186, "loss": 3.6383, "step": 23055 }, { "epoch": 1.5667889658921048, "grad_norm": 1.6743345260620117, "learning_rate": 0.0008042278162793858, "loss": 3.7139, "step": 23060 }, { "epoch": 1.5671286859627667, "grad_norm": 1.9897360801696777, "learning_rate": 0.000804185351270553, "loss": 3.8571, "step": 23065 }, { "epoch": 1.5674684060334285, "grad_norm": 1.9221439361572266, "learning_rate": 0.0008041428862617204, "loss": 3.8105, "step": 23070 }, { "epoch": 1.5678081261040901, "grad_norm": 2.78865122795105, "learning_rate": 0.0008041004212528876, "loss": 3.5144, "step": 23075 }, { "epoch": 1.568147846174752, "grad_norm": 1.8160665035247803, "learning_rate": 0.0008040579562440549, "loss": 3.6445, "step": 23080 }, { "epoch": 1.5684875662454139, "grad_norm": 2.201864242553711, "learning_rate": 0.0008040154912352223, "loss": 3.5739, "step": 23085 }, { "epoch": 1.5688272863160755, "grad_norm": 1.9878965616226196, "learning_rate": 0.0008039730262263895, "loss": 3.5166, "step": 23090 }, { "epoch": 1.5691670063867373, "grad_norm": 2.1397674083709717, "learning_rate": 0.0008039305612175567, "loss": 3.4767, "step": 23095 }, { "epoch": 1.5695067264573992, "grad_norm": 1.7483055591583252, "learning_rate": 0.000803888096208724, "loss": 3.5804, "step": 23100 }, { "epoch": 1.5698464465280608, "grad_norm": 1.9744129180908203, "learning_rate": 0.0008038456311998913, "loss": 3.6381, "step": 23105 }, { "epoch": 1.5701861665987227, "grad_norm": 1.6878126859664917, "learning_rate": 0.0008038031661910585, "loss": 3.4674, "step": 23110 }, { "epoch": 1.5705258866693845, "grad_norm": 2.3550045490264893, "learning_rate": 0.0008037607011822259, "loss": 3.5596, "step": 23115 }, { "epoch": 1.5708656067400462, "grad_norm": 2.026339054107666, "learning_rate": 0.0008037182361733932, "loss": 3.4781, "step": 23120 }, { "epoch": 1.571205326810708, "grad_norm": 1.8421343564987183, "learning_rate": 0.0008036757711645604, "loss": 3.6177, "step": 23125 }, { "epoch": 1.5715450468813699, "grad_norm": 1.6100283861160278, "learning_rate": 0.0008036333061557277, "loss": 3.4976, "step": 23130 }, { "epoch": 1.5718847669520315, "grad_norm": 1.5846879482269287, "learning_rate": 0.0008035908411468949, "loss": 3.7161, "step": 23135 }, { "epoch": 1.5722244870226934, "grad_norm": 1.986141324043274, "learning_rate": 0.0008035483761380622, "loss": 3.5981, "step": 23140 }, { "epoch": 1.5725642070933552, "grad_norm": 1.7982501983642578, "learning_rate": 0.0008035059111292295, "loss": 3.5932, "step": 23145 }, { "epoch": 1.5729039271640168, "grad_norm": 1.7617653608322144, "learning_rate": 0.0008034634461203968, "loss": 3.6576, "step": 23150 }, { "epoch": 1.5732436472346785, "grad_norm": 2.8327670097351074, "learning_rate": 0.0008034209811115642, "loss": 3.7519, "step": 23155 }, { "epoch": 1.5735833673053405, "grad_norm": 1.8763477802276611, "learning_rate": 0.0008033785161027314, "loss": 3.3896, "step": 23160 }, { "epoch": 1.5739230873760022, "grad_norm": 1.6474637985229492, "learning_rate": 0.0008033360510938986, "loss": 3.5339, "step": 23165 }, { "epoch": 1.5742628074466638, "grad_norm": 2.2579152584075928, "learning_rate": 0.000803293586085066, "loss": 3.5893, "step": 23170 }, { "epoch": 1.5746025275173259, "grad_norm": 2.3970000743865967, "learning_rate": 0.0008032511210762332, "loss": 3.712, "step": 23175 }, { "epoch": 1.5749422475879875, "grad_norm": 2.2037160396575928, "learning_rate": 0.0008032086560674004, "loss": 3.7302, "step": 23180 }, { "epoch": 1.5752819676586491, "grad_norm": 1.959623098373413, "learning_rate": 0.0008031661910585679, "loss": 3.5832, "step": 23185 }, { "epoch": 1.575621687729311, "grad_norm": 2.2642405033111572, "learning_rate": 0.0008031237260497351, "loss": 3.6863, "step": 23190 }, { "epoch": 1.5759614077999728, "grad_norm": 1.9343316555023193, "learning_rate": 0.0008030812610409023, "loss": 3.8625, "step": 23195 }, { "epoch": 1.5763011278706345, "grad_norm": 1.8544597625732422, "learning_rate": 0.0008030387960320696, "loss": 3.3241, "step": 23200 }, { "epoch": 1.5766408479412963, "grad_norm": 2.0047850608825684, "learning_rate": 0.0008029963310232369, "loss": 3.4471, "step": 23205 }, { "epoch": 1.5769805680119582, "grad_norm": 3.2819623947143555, "learning_rate": 0.0008029538660144041, "loss": 3.5454, "step": 23210 }, { "epoch": 1.5773202880826198, "grad_norm": 1.8410292863845825, "learning_rate": 0.0008029114010055714, "loss": 3.5572, "step": 23215 }, { "epoch": 1.5776600081532817, "grad_norm": 1.8944833278656006, "learning_rate": 0.0008028689359967388, "loss": 3.558, "step": 23220 }, { "epoch": 1.5779997282239435, "grad_norm": 1.6465057134628296, "learning_rate": 0.000802826470987906, "loss": 3.6984, "step": 23225 }, { "epoch": 1.5783394482946052, "grad_norm": 1.955043077468872, "learning_rate": 0.0008027840059790733, "loss": 3.4006, "step": 23230 }, { "epoch": 1.578679168365267, "grad_norm": 2.1535937786102295, "learning_rate": 0.0008027415409702405, "loss": 3.5382, "step": 23235 }, { "epoch": 1.5790188884359289, "grad_norm": 1.9891213178634644, "learning_rate": 0.0008026990759614078, "loss": 3.5647, "step": 23240 }, { "epoch": 1.5793586085065905, "grad_norm": 2.1322429180145264, "learning_rate": 0.0008026566109525751, "loss": 3.3557, "step": 23245 }, { "epoch": 1.5796983285772523, "grad_norm": 1.7771694660186768, "learning_rate": 0.0008026141459437423, "loss": 3.565, "step": 23250 }, { "epoch": 1.5800380486479142, "grad_norm": 2.2041847705841064, "learning_rate": 0.0008025716809349097, "loss": 3.7804, "step": 23255 }, { "epoch": 1.5803777687185758, "grad_norm": 1.970436930656433, "learning_rate": 0.000802529215926077, "loss": 3.6027, "step": 23260 }, { "epoch": 1.5807174887892377, "grad_norm": 2.334804058074951, "learning_rate": 0.0008024867509172442, "loss": 3.4888, "step": 23265 }, { "epoch": 1.5810572088598995, "grad_norm": 1.988473892211914, "learning_rate": 0.0008024442859084115, "loss": 3.5975, "step": 23270 }, { "epoch": 1.5813969289305612, "grad_norm": 1.8480181694030762, "learning_rate": 0.0008024018208995788, "loss": 3.8416, "step": 23275 }, { "epoch": 1.581736649001223, "grad_norm": 2.118663787841797, "learning_rate": 0.000802359355890746, "loss": 3.7667, "step": 23280 }, { "epoch": 1.5820763690718849, "grad_norm": 2.009413480758667, "learning_rate": 0.0008023168908819132, "loss": 3.4957, "step": 23285 }, { "epoch": 1.5824160891425465, "grad_norm": 2.8943519592285156, "learning_rate": 0.0008022744258730807, "loss": 3.229, "step": 23290 }, { "epoch": 1.5827558092132084, "grad_norm": 2.2433996200561523, "learning_rate": 0.0008022319608642479, "loss": 3.4786, "step": 23295 }, { "epoch": 1.5830955292838702, "grad_norm": 2.174924373626709, "learning_rate": 0.0008021894958554151, "loss": 3.6064, "step": 23300 }, { "epoch": 1.5834352493545318, "grad_norm": 2.523085832595825, "learning_rate": 0.0008021470308465825, "loss": 3.6004, "step": 23305 }, { "epoch": 1.5837749694251937, "grad_norm": 2.374117136001587, "learning_rate": 0.0008021045658377497, "loss": 3.468, "step": 23310 }, { "epoch": 1.5841146894958555, "grad_norm": 1.7930357456207275, "learning_rate": 0.0008020621008289169, "loss": 3.2933, "step": 23315 }, { "epoch": 1.5844544095665172, "grad_norm": 2.3486101627349854, "learning_rate": 0.0008020196358200843, "loss": 3.3696, "step": 23320 }, { "epoch": 1.5847941296371788, "grad_norm": 2.087818145751953, "learning_rate": 0.0008019771708112516, "loss": 3.4955, "step": 23325 }, { "epoch": 1.5851338497078409, "grad_norm": 1.8286938667297363, "learning_rate": 0.0008019347058024188, "loss": 3.5473, "step": 23330 }, { "epoch": 1.5854735697785025, "grad_norm": 1.8569823503494263, "learning_rate": 0.0008018922407935861, "loss": 3.5777, "step": 23335 }, { "epoch": 1.5858132898491641, "grad_norm": 2.386308431625366, "learning_rate": 0.0008018497757847534, "loss": 3.4189, "step": 23340 }, { "epoch": 1.5861530099198262, "grad_norm": 2.3880155086517334, "learning_rate": 0.0008018073107759206, "loss": 3.6138, "step": 23345 }, { "epoch": 1.5864927299904878, "grad_norm": 1.748098611831665, "learning_rate": 0.0008017648457670879, "loss": 3.9085, "step": 23350 }, { "epoch": 1.5868324500611495, "grad_norm": 2.520569086074829, "learning_rate": 0.0008017223807582552, "loss": 3.6247, "step": 23355 }, { "epoch": 1.5871721701318113, "grad_norm": 2.343965530395508, "learning_rate": 0.0008016799157494225, "loss": 3.4763, "step": 23360 }, { "epoch": 1.5875118902024732, "grad_norm": 1.733153223991394, "learning_rate": 0.0008016374507405898, "loss": 3.6899, "step": 23365 }, { "epoch": 1.5878516102731348, "grad_norm": 1.6736160516738892, "learning_rate": 0.000801594985731757, "loss": 3.6193, "step": 23370 }, { "epoch": 1.5881913303437967, "grad_norm": 1.6181707382202148, "learning_rate": 0.0008015525207229243, "loss": 3.5547, "step": 23375 }, { "epoch": 1.5885310504144585, "grad_norm": 1.8714230060577393, "learning_rate": 0.0008015100557140916, "loss": 3.6227, "step": 23380 }, { "epoch": 1.5888707704851202, "grad_norm": 2.4700872898101807, "learning_rate": 0.0008014675907052588, "loss": 3.652, "step": 23385 }, { "epoch": 1.589210490555782, "grad_norm": 2.6204044818878174, "learning_rate": 0.0008014251256964261, "loss": 3.453, "step": 23390 }, { "epoch": 1.5895502106264439, "grad_norm": 1.858385682106018, "learning_rate": 0.0008013826606875935, "loss": 3.6222, "step": 23395 }, { "epoch": 1.5898899306971055, "grad_norm": 2.260377883911133, "learning_rate": 0.0008013401956787607, "loss": 3.6152, "step": 23400 }, { "epoch": 1.5902296507677673, "grad_norm": 1.9654605388641357, "learning_rate": 0.000801297730669928, "loss": 3.7736, "step": 23405 }, { "epoch": 1.5905693708384292, "grad_norm": 2.124427556991577, "learning_rate": 0.0008012552656610953, "loss": 3.779, "step": 23410 }, { "epoch": 1.5909090909090908, "grad_norm": 1.785689115524292, "learning_rate": 0.0008012128006522625, "loss": 3.6072, "step": 23415 }, { "epoch": 1.5912488109797527, "grad_norm": 2.514857053756714, "learning_rate": 0.0008011703356434297, "loss": 3.3823, "step": 23420 }, { "epoch": 1.5915885310504145, "grad_norm": 2.227313280105591, "learning_rate": 0.0008011278706345971, "loss": 3.744, "step": 23425 }, { "epoch": 1.5919282511210762, "grad_norm": 1.8490328788757324, "learning_rate": 0.0008010854056257644, "loss": 3.4554, "step": 23430 }, { "epoch": 1.592267971191738, "grad_norm": 2.2202913761138916, "learning_rate": 0.0008010429406169316, "loss": 3.2591, "step": 23435 }, { "epoch": 1.5926076912623999, "grad_norm": 3.403351306915283, "learning_rate": 0.000801000475608099, "loss": 3.2486, "step": 23440 }, { "epoch": 1.5929474113330615, "grad_norm": 1.9921786785125732, "learning_rate": 0.0008009580105992662, "loss": 3.4909, "step": 23445 }, { "epoch": 1.5932871314037234, "grad_norm": 1.7096933126449585, "learning_rate": 0.0008009155455904334, "loss": 3.8501, "step": 23450 }, { "epoch": 1.5936268514743852, "grad_norm": 2.2573435306549072, "learning_rate": 0.0008008730805816008, "loss": 3.3966, "step": 23455 }, { "epoch": 1.5939665715450468, "grad_norm": 1.816704511642456, "learning_rate": 0.000800830615572768, "loss": 3.698, "step": 23460 }, { "epoch": 1.5943062916157087, "grad_norm": 1.9074623584747314, "learning_rate": 0.0008007881505639353, "loss": 3.4244, "step": 23465 }, { "epoch": 1.5946460116863705, "grad_norm": 2.9278271198272705, "learning_rate": 0.0008007456855551027, "loss": 3.7405, "step": 23470 }, { "epoch": 1.5949857317570322, "grad_norm": 2.0310428142547607, "learning_rate": 0.0008007032205462699, "loss": 3.215, "step": 23475 }, { "epoch": 1.595325451827694, "grad_norm": 1.788256287574768, "learning_rate": 0.0008006607555374371, "loss": 3.5203, "step": 23480 }, { "epoch": 1.5956651718983559, "grad_norm": 2.366382598876953, "learning_rate": 0.0008006182905286044, "loss": 3.5203, "step": 23485 }, { "epoch": 1.5960048919690175, "grad_norm": 1.8832132816314697, "learning_rate": 0.0008005758255197717, "loss": 3.434, "step": 23490 }, { "epoch": 1.5963446120396791, "grad_norm": 2.0188584327697754, "learning_rate": 0.0008005333605109391, "loss": 3.4768, "step": 23495 }, { "epoch": 1.5966843321103412, "grad_norm": 2.4632556438446045, "learning_rate": 0.0008004908955021063, "loss": 3.6785, "step": 23500 }, { "epoch": 1.5970240521810029, "grad_norm": 2.282167434692383, "learning_rate": 0.0008004484304932736, "loss": 3.4139, "step": 23505 }, { "epoch": 1.5973637722516645, "grad_norm": 2.1626381874084473, "learning_rate": 0.0008004059654844409, "loss": 3.6487, "step": 23510 }, { "epoch": 1.5977034923223266, "grad_norm": 1.9685248136520386, "learning_rate": 0.0008003635004756081, "loss": 3.7285, "step": 23515 }, { "epoch": 1.5980432123929882, "grad_norm": 2.0456366539001465, "learning_rate": 0.0008003210354667753, "loss": 3.5244, "step": 23520 }, { "epoch": 1.5983829324636498, "grad_norm": 1.923074722290039, "learning_rate": 0.0008002785704579427, "loss": 3.7499, "step": 23525 }, { "epoch": 1.5987226525343117, "grad_norm": 1.7883930206298828, "learning_rate": 0.00080023610544911, "loss": 3.463, "step": 23530 }, { "epoch": 1.5990623726049735, "grad_norm": 1.8733271360397339, "learning_rate": 0.0008001936404402772, "loss": 3.2886, "step": 23535 }, { "epoch": 1.5994020926756352, "grad_norm": 2.3830959796905518, "learning_rate": 0.0008001511754314446, "loss": 3.8316, "step": 23540 }, { "epoch": 1.599741812746297, "grad_norm": 2.5252432823181152, "learning_rate": 0.0008001087104226118, "loss": 3.4027, "step": 23545 }, { "epoch": 1.6000815328169589, "grad_norm": 2.024040937423706, "learning_rate": 0.000800066245413779, "loss": 3.7422, "step": 23550 }, { "epoch": 1.6004212528876205, "grad_norm": 1.8453738689422607, "learning_rate": 0.0008000237804049464, "loss": 3.649, "step": 23555 }, { "epoch": 1.6007609729582823, "grad_norm": 1.892744541168213, "learning_rate": 0.0007999813153961136, "loss": 3.706, "step": 23560 }, { "epoch": 1.6011006930289442, "grad_norm": 2.1559133529663086, "learning_rate": 0.0007999388503872809, "loss": 3.4437, "step": 23565 }, { "epoch": 1.6014404130996058, "grad_norm": 1.9625253677368164, "learning_rate": 0.0007998963853784483, "loss": 3.6227, "step": 23570 }, { "epoch": 1.6017801331702677, "grad_norm": 1.9630391597747803, "learning_rate": 0.0007998539203696155, "loss": 3.469, "step": 23575 }, { "epoch": 1.6021198532409295, "grad_norm": 3.0554862022399902, "learning_rate": 0.0007998114553607827, "loss": 3.4331, "step": 23580 }, { "epoch": 1.6024595733115912, "grad_norm": 2.1482253074645996, "learning_rate": 0.00079976899035195, "loss": 3.8436, "step": 23585 }, { "epoch": 1.602799293382253, "grad_norm": 1.6919513940811157, "learning_rate": 0.0007997265253431173, "loss": 3.6795, "step": 23590 }, { "epoch": 1.6031390134529149, "grad_norm": 1.812232255935669, "learning_rate": 0.0007996840603342845, "loss": 3.4265, "step": 23595 }, { "epoch": 1.6034787335235765, "grad_norm": 2.2339024543762207, "learning_rate": 0.0007996415953254519, "loss": 3.727, "step": 23600 }, { "epoch": 1.6038184535942384, "grad_norm": 1.7486634254455566, "learning_rate": 0.0007995991303166192, "loss": 3.7462, "step": 23605 }, { "epoch": 1.6041581736649002, "grad_norm": 1.8539268970489502, "learning_rate": 0.0007995566653077864, "loss": 3.5518, "step": 23610 }, { "epoch": 1.6044978937355618, "grad_norm": 2.627089262008667, "learning_rate": 0.0007995142002989537, "loss": 3.325, "step": 23615 }, { "epoch": 1.6048376138062237, "grad_norm": 1.7640658617019653, "learning_rate": 0.000799471735290121, "loss": 3.6421, "step": 23620 }, { "epoch": 1.6051773338768855, "grad_norm": 1.7949042320251465, "learning_rate": 0.0007994292702812882, "loss": 3.6141, "step": 23625 }, { "epoch": 1.6055170539475472, "grad_norm": 1.5470350980758667, "learning_rate": 0.0007993868052724555, "loss": 3.5277, "step": 23630 }, { "epoch": 1.605856774018209, "grad_norm": 2.210371971130371, "learning_rate": 0.0007993443402636228, "loss": 3.4863, "step": 23635 }, { "epoch": 1.6061964940888709, "grad_norm": 1.9346649646759033, "learning_rate": 0.0007993018752547901, "loss": 3.6851, "step": 23640 }, { "epoch": 1.6065362141595325, "grad_norm": 1.564705491065979, "learning_rate": 0.0007992594102459574, "loss": 3.3798, "step": 23645 }, { "epoch": 1.6068759342301944, "grad_norm": 1.801560401916504, "learning_rate": 0.0007992169452371246, "loss": 3.4945, "step": 23650 }, { "epoch": 1.6072156543008562, "grad_norm": 2.0754711627960205, "learning_rate": 0.0007991744802282919, "loss": 3.588, "step": 23655 }, { "epoch": 1.6075553743715179, "grad_norm": 1.5391955375671387, "learning_rate": 0.0007991320152194592, "loss": 3.5754, "step": 23660 }, { "epoch": 1.6078950944421795, "grad_norm": 1.7240411043167114, "learning_rate": 0.0007990895502106264, "loss": 3.3891, "step": 23665 }, { "epoch": 1.6082348145128416, "grad_norm": 2.0450689792633057, "learning_rate": 0.0007990470852017937, "loss": 3.4122, "step": 23670 }, { "epoch": 1.6085745345835032, "grad_norm": 2.068056583404541, "learning_rate": 0.0007990046201929611, "loss": 3.5745, "step": 23675 }, { "epoch": 1.6089142546541648, "grad_norm": 1.889237403869629, "learning_rate": 0.0007989621551841283, "loss": 3.5684, "step": 23680 }, { "epoch": 1.609253974724827, "grad_norm": 1.8931483030319214, "learning_rate": 0.0007989196901752955, "loss": 3.5366, "step": 23685 }, { "epoch": 1.6095936947954885, "grad_norm": 1.528354287147522, "learning_rate": 0.0007988772251664629, "loss": 3.6147, "step": 23690 }, { "epoch": 1.6099334148661502, "grad_norm": 2.2113168239593506, "learning_rate": 0.0007988347601576301, "loss": 3.2773, "step": 23695 }, { "epoch": 1.610273134936812, "grad_norm": 1.8544247150421143, "learning_rate": 0.0007987922951487973, "loss": 3.5018, "step": 23700 }, { "epoch": 1.6106128550074739, "grad_norm": 1.6287355422973633, "learning_rate": 0.0007987498301399648, "loss": 3.6005, "step": 23705 }, { "epoch": 1.6109525750781355, "grad_norm": 1.9182679653167725, "learning_rate": 0.000798707365131132, "loss": 3.6336, "step": 23710 }, { "epoch": 1.6112922951487973, "grad_norm": 1.852592945098877, "learning_rate": 0.0007986649001222992, "loss": 3.6472, "step": 23715 }, { "epoch": 1.6116320152194592, "grad_norm": 1.9010018110275269, "learning_rate": 0.0007986224351134665, "loss": 3.8596, "step": 23720 }, { "epoch": 1.6119717352901208, "grad_norm": 1.9254025220870972, "learning_rate": 0.0007985799701046338, "loss": 3.5243, "step": 23725 }, { "epoch": 1.6123114553607827, "grad_norm": 2.2326838970184326, "learning_rate": 0.000798537505095801, "loss": 3.7733, "step": 23730 }, { "epoch": 1.6126511754314445, "grad_norm": 2.258931875228882, "learning_rate": 0.0007984950400869683, "loss": 3.6074, "step": 23735 }, { "epoch": 1.6129908955021062, "grad_norm": 1.666810393333435, "learning_rate": 0.0007984525750781357, "loss": 3.3189, "step": 23740 }, { "epoch": 1.613330615572768, "grad_norm": 1.9280235767364502, "learning_rate": 0.0007984101100693029, "loss": 3.6282, "step": 23745 }, { "epoch": 1.6136703356434299, "grad_norm": 2.0575332641601562, "learning_rate": 0.0007983676450604702, "loss": 3.5983, "step": 23750 }, { "epoch": 1.6140100557140915, "grad_norm": 1.9889447689056396, "learning_rate": 0.0007983251800516375, "loss": 3.7232, "step": 23755 }, { "epoch": 1.6143497757847534, "grad_norm": 1.7026108503341675, "learning_rate": 0.0007982827150428047, "loss": 3.6025, "step": 23760 }, { "epoch": 1.6146894958554152, "grad_norm": 2.122793197631836, "learning_rate": 0.000798240250033972, "loss": 3.4928, "step": 23765 }, { "epoch": 1.6150292159260768, "grad_norm": 1.854821801185608, "learning_rate": 0.0007981977850251392, "loss": 3.3167, "step": 23770 }, { "epoch": 1.6153689359967387, "grad_norm": 1.6385107040405273, "learning_rate": 0.0007981553200163066, "loss": 3.5394, "step": 23775 }, { "epoch": 1.6157086560674006, "grad_norm": 2.0411922931671143, "learning_rate": 0.0007981128550074739, "loss": 3.588, "step": 23780 }, { "epoch": 1.6160483761380622, "grad_norm": 1.7207335233688354, "learning_rate": 0.0007980703899986411, "loss": 3.4617, "step": 23785 }, { "epoch": 1.616388096208724, "grad_norm": 2.047630548477173, "learning_rate": 0.0007980279249898084, "loss": 3.3915, "step": 23790 }, { "epoch": 1.6167278162793859, "grad_norm": 2.4536731243133545, "learning_rate": 0.0007979854599809757, "loss": 3.3437, "step": 23795 }, { "epoch": 1.6170675363500475, "grad_norm": 2.013047933578491, "learning_rate": 0.0007979429949721429, "loss": 3.5815, "step": 23800 }, { "epoch": 1.6174072564207094, "grad_norm": 1.7972873449325562, "learning_rate": 0.0007979005299633101, "loss": 3.4259, "step": 23805 }, { "epoch": 1.6177469764913712, "grad_norm": 2.085606575012207, "learning_rate": 0.0007978580649544776, "loss": 3.5681, "step": 23810 }, { "epoch": 1.6180866965620329, "grad_norm": 1.6960458755493164, "learning_rate": 0.0007978155999456448, "loss": 3.8336, "step": 23815 }, { "epoch": 1.6184264166326947, "grad_norm": 1.7848408222198486, "learning_rate": 0.000797773134936812, "loss": 3.4106, "step": 23820 }, { "epoch": 1.6187661367033566, "grad_norm": 2.513399600982666, "learning_rate": 0.0007977306699279794, "loss": 3.602, "step": 23825 }, { "epoch": 1.6191058567740182, "grad_norm": 1.7005014419555664, "learning_rate": 0.0007976882049191466, "loss": 3.507, "step": 23830 }, { "epoch": 1.6194455768446798, "grad_norm": 1.9712989330291748, "learning_rate": 0.0007976457399103139, "loss": 3.5862, "step": 23835 }, { "epoch": 1.619785296915342, "grad_norm": 1.5181459188461304, "learning_rate": 0.0007976032749014812, "loss": 3.3231, "step": 23840 }, { "epoch": 1.6201250169860035, "grad_norm": 1.9257140159606934, "learning_rate": 0.0007975608098926485, "loss": 3.2926, "step": 23845 }, { "epoch": 1.6204647370566652, "grad_norm": 1.896662950515747, "learning_rate": 0.0007975183448838158, "loss": 3.7182, "step": 23850 }, { "epoch": 1.6208044571273272, "grad_norm": 1.8487941026687622, "learning_rate": 0.0007974758798749831, "loss": 3.6554, "step": 23855 }, { "epoch": 1.6211441771979889, "grad_norm": 1.7869564294815063, "learning_rate": 0.0007974334148661503, "loss": 3.4366, "step": 23860 }, { "epoch": 1.6214838972686505, "grad_norm": 1.7511417865753174, "learning_rate": 0.0007973909498573176, "loss": 3.4297, "step": 23865 }, { "epoch": 1.6218236173393124, "grad_norm": 1.8228095769882202, "learning_rate": 0.0007973484848484848, "loss": 3.4333, "step": 23870 }, { "epoch": 1.6221633374099742, "grad_norm": 2.2559289932250977, "learning_rate": 0.0007973060198396521, "loss": 3.5801, "step": 23875 }, { "epoch": 1.6225030574806358, "grad_norm": 1.8751248121261597, "learning_rate": 0.0007972635548308195, "loss": 3.6467, "step": 23880 }, { "epoch": 1.6228427775512977, "grad_norm": 1.7869665622711182, "learning_rate": 0.0007972210898219867, "loss": 3.5675, "step": 23885 }, { "epoch": 1.6231824976219595, "grad_norm": 1.9327740669250488, "learning_rate": 0.000797178624813154, "loss": 3.6746, "step": 23890 }, { "epoch": 1.6235222176926212, "grad_norm": 2.319798707962036, "learning_rate": 0.0007971361598043213, "loss": 3.4661, "step": 23895 }, { "epoch": 1.623861937763283, "grad_norm": 1.809572458267212, "learning_rate": 0.0007970936947954885, "loss": 3.5538, "step": 23900 }, { "epoch": 1.6242016578339449, "grad_norm": 1.8331800699234009, "learning_rate": 0.0007970512297866558, "loss": 3.5747, "step": 23905 }, { "epoch": 1.6245413779046065, "grad_norm": 2.0994439125061035, "learning_rate": 0.0007970087647778231, "loss": 3.5661, "step": 23910 }, { "epoch": 1.6248810979752684, "grad_norm": 2.1342031955718994, "learning_rate": 0.0007969662997689904, "loss": 3.5233, "step": 23915 }, { "epoch": 1.6252208180459302, "grad_norm": 1.8148807287216187, "learning_rate": 0.0007969238347601576, "loss": 3.6007, "step": 23920 }, { "epoch": 1.6255605381165918, "grad_norm": 1.5569798946380615, "learning_rate": 0.000796881369751325, "loss": 3.5494, "step": 23925 }, { "epoch": 1.6259002581872537, "grad_norm": 1.5634794235229492, "learning_rate": 0.0007968389047424922, "loss": 3.5889, "step": 23930 }, { "epoch": 1.6262399782579156, "grad_norm": 2.3132452964782715, "learning_rate": 0.0007967964397336594, "loss": 3.3849, "step": 23935 }, { "epoch": 1.6265796983285772, "grad_norm": 1.5428545475006104, "learning_rate": 0.0007967539747248268, "loss": 3.6077, "step": 23940 }, { "epoch": 1.626919418399239, "grad_norm": 2.128164052963257, "learning_rate": 0.000796711509715994, "loss": 3.544, "step": 23945 }, { "epoch": 1.627259138469901, "grad_norm": 2.1017091274261475, "learning_rate": 0.0007966690447071613, "loss": 3.2877, "step": 23950 }, { "epoch": 1.6275988585405625, "grad_norm": 1.9261200428009033, "learning_rate": 0.0007966265796983287, "loss": 3.6068, "step": 23955 }, { "epoch": 1.6279385786112244, "grad_norm": 1.8712985515594482, "learning_rate": 0.0007965841146894959, "loss": 3.7506, "step": 23960 }, { "epoch": 1.6282782986818862, "grad_norm": 2.3877055644989014, "learning_rate": 0.0007965416496806631, "loss": 3.5453, "step": 23965 }, { "epoch": 1.6286180187525479, "grad_norm": 1.8540763854980469, "learning_rate": 0.0007964991846718304, "loss": 3.7312, "step": 23970 }, { "epoch": 1.6289577388232097, "grad_norm": 1.8484796285629272, "learning_rate": 0.0007964567196629977, "loss": 3.5355, "step": 23975 }, { "epoch": 1.6292974588938716, "grad_norm": 2.521103858947754, "learning_rate": 0.0007964142546541649, "loss": 3.5338, "step": 23980 }, { "epoch": 1.6296371789645332, "grad_norm": 1.9471737146377563, "learning_rate": 0.0007963717896453323, "loss": 3.7573, "step": 23985 }, { "epoch": 1.629976899035195, "grad_norm": 2.32014536857605, "learning_rate": 0.0007963293246364996, "loss": 3.6948, "step": 23990 }, { "epoch": 1.630316619105857, "grad_norm": 1.7932029962539673, "learning_rate": 0.0007962868596276668, "loss": 3.5883, "step": 23995 }, { "epoch": 1.6306563391765185, "grad_norm": 1.6462286710739136, "learning_rate": 0.0007962443946188341, "loss": 3.6293, "step": 24000 }, { "epoch": 1.6309960592471802, "grad_norm": 2.317420482635498, "learning_rate": 0.0007962019296100014, "loss": 3.4961, "step": 24005 }, { "epoch": 1.6313357793178422, "grad_norm": 2.1464388370513916, "learning_rate": 0.0007961594646011686, "loss": 3.399, "step": 24010 }, { "epoch": 1.6316754993885039, "grad_norm": 2.0239920616149902, "learning_rate": 0.000796116999592336, "loss": 3.4647, "step": 24015 }, { "epoch": 1.6320152194591655, "grad_norm": 1.8248363733291626, "learning_rate": 0.0007960745345835032, "loss": 3.7185, "step": 24020 }, { "epoch": 1.6323549395298276, "grad_norm": 1.9532426595687866, "learning_rate": 0.0007960320695746705, "loss": 3.6832, "step": 24025 }, { "epoch": 1.6326946596004892, "grad_norm": 1.5817792415618896, "learning_rate": 0.0007959896045658378, "loss": 3.6425, "step": 24030 }, { "epoch": 1.6330343796711508, "grad_norm": 1.7207059860229492, "learning_rate": 0.000795947139557005, "loss": 3.6574, "step": 24035 }, { "epoch": 1.633374099741813, "grad_norm": 1.7166489362716675, "learning_rate": 0.0007959046745481723, "loss": 3.5438, "step": 24040 }, { "epoch": 1.6337138198124745, "grad_norm": 1.8451863527297974, "learning_rate": 0.0007958622095393396, "loss": 3.3191, "step": 24045 }, { "epoch": 1.6340535398831362, "grad_norm": 2.4694578647613525, "learning_rate": 0.0007958197445305069, "loss": 3.4618, "step": 24050 }, { "epoch": 1.634393259953798, "grad_norm": 2.4596900939941406, "learning_rate": 0.0007957772795216742, "loss": 3.274, "step": 24055 }, { "epoch": 1.6347329800244599, "grad_norm": 1.7361284494400024, "learning_rate": 0.0007957348145128415, "loss": 3.5281, "step": 24060 }, { "epoch": 1.6350727000951215, "grad_norm": 1.907127857208252, "learning_rate": 0.0007956923495040087, "loss": 3.7541, "step": 24065 }, { "epoch": 1.6354124201657834, "grad_norm": 2.3364086151123047, "learning_rate": 0.0007956498844951759, "loss": 3.7256, "step": 24070 }, { "epoch": 1.6357521402364452, "grad_norm": 1.5819717645645142, "learning_rate": 0.0007956074194863433, "loss": 3.5585, "step": 24075 }, { "epoch": 1.6360918603071068, "grad_norm": 1.931229829788208, "learning_rate": 0.0007955649544775105, "loss": 3.7583, "step": 24080 }, { "epoch": 1.6364315803777687, "grad_norm": 2.2059261798858643, "learning_rate": 0.0007955224894686778, "loss": 3.9592, "step": 24085 }, { "epoch": 1.6367713004484306, "grad_norm": 2.726163148880005, "learning_rate": 0.0007954800244598452, "loss": 3.5794, "step": 24090 }, { "epoch": 1.6371110205190922, "grad_norm": 2.0671801567077637, "learning_rate": 0.0007954375594510124, "loss": 3.4662, "step": 24095 }, { "epoch": 1.637450740589754, "grad_norm": 1.8827009201049805, "learning_rate": 0.0007953950944421796, "loss": 3.5794, "step": 24100 }, { "epoch": 1.637790460660416, "grad_norm": 2.031947374343872, "learning_rate": 0.000795352629433347, "loss": 3.4428, "step": 24105 }, { "epoch": 1.6381301807310775, "grad_norm": 2.2988109588623047, "learning_rate": 0.0007953101644245142, "loss": 3.3866, "step": 24110 }, { "epoch": 1.6384699008017394, "grad_norm": 2.23116135597229, "learning_rate": 0.0007952676994156814, "loss": 3.4451, "step": 24115 }, { "epoch": 1.6388096208724012, "grad_norm": 2.117631435394287, "learning_rate": 0.0007952252344068488, "loss": 3.4496, "step": 24120 }, { "epoch": 1.6391493409430629, "grad_norm": 1.927133321762085, "learning_rate": 0.0007951827693980161, "loss": 3.4378, "step": 24125 }, { "epoch": 1.6394890610137247, "grad_norm": 2.0762813091278076, "learning_rate": 0.0007951403043891833, "loss": 3.5371, "step": 24130 }, { "epoch": 1.6398287810843866, "grad_norm": 1.5337300300598145, "learning_rate": 0.0007950978393803506, "loss": 3.489, "step": 24135 }, { "epoch": 1.6401685011550482, "grad_norm": 2.2535600662231445, "learning_rate": 0.0007950553743715179, "loss": 3.6349, "step": 24140 }, { "epoch": 1.64050822122571, "grad_norm": 2.231065273284912, "learning_rate": 0.0007950129093626851, "loss": 3.3725, "step": 24145 }, { "epoch": 1.640847941296372, "grad_norm": 1.7382160425186157, "learning_rate": 0.0007949704443538524, "loss": 3.6965, "step": 24150 }, { "epoch": 1.6411876613670335, "grad_norm": 2.001635789871216, "learning_rate": 0.0007949279793450198, "loss": 3.491, "step": 24155 }, { "epoch": 1.6415273814376954, "grad_norm": 1.8695929050445557, "learning_rate": 0.000794885514336187, "loss": 3.2463, "step": 24160 }, { "epoch": 1.6418671015083572, "grad_norm": 1.8337949514389038, "learning_rate": 0.0007948430493273543, "loss": 3.6959, "step": 24165 }, { "epoch": 1.6422068215790189, "grad_norm": 2.119460344314575, "learning_rate": 0.0007948005843185215, "loss": 3.2981, "step": 24170 }, { "epoch": 1.6425465416496805, "grad_norm": 1.9903111457824707, "learning_rate": 0.0007947581193096889, "loss": 3.5279, "step": 24175 }, { "epoch": 1.6428862617203426, "grad_norm": 1.986141562461853, "learning_rate": 0.0007947156543008561, "loss": 3.5084, "step": 24180 }, { "epoch": 1.6432259817910042, "grad_norm": 2.3444125652313232, "learning_rate": 0.0007946731892920233, "loss": 3.5894, "step": 24185 }, { "epoch": 1.6435657018616658, "grad_norm": 2.1740829944610596, "learning_rate": 0.0007946307242831908, "loss": 3.2835, "step": 24190 }, { "epoch": 1.643905421932328, "grad_norm": 1.7916820049285889, "learning_rate": 0.000794588259274358, "loss": 3.5313, "step": 24195 }, { "epoch": 1.6442451420029895, "grad_norm": 2.4999303817749023, "learning_rate": 0.0007945457942655252, "loss": 3.8262, "step": 24200 }, { "epoch": 1.6445848620736512, "grad_norm": 1.9952300786972046, "learning_rate": 0.0007945033292566926, "loss": 3.588, "step": 24205 }, { "epoch": 1.6449245821443133, "grad_norm": 1.4121063947677612, "learning_rate": 0.0007944608642478598, "loss": 3.4064, "step": 24210 }, { "epoch": 1.6452643022149749, "grad_norm": 2.062267541885376, "learning_rate": 0.000794418399239027, "loss": 3.5465, "step": 24215 }, { "epoch": 1.6456040222856365, "grad_norm": 2.1489670276641846, "learning_rate": 0.0007943759342301943, "loss": 3.5615, "step": 24220 }, { "epoch": 1.6459437423562984, "grad_norm": 1.7662482261657715, "learning_rate": 0.0007943334692213617, "loss": 3.5382, "step": 24225 }, { "epoch": 1.6462834624269602, "grad_norm": 2.6238656044006348, "learning_rate": 0.0007942994972142954, "loss": 3.577, "step": 24230 }, { "epoch": 1.6466231824976219, "grad_norm": 2.03196120262146, "learning_rate": 0.0007942570322054627, "loss": 3.4638, "step": 24235 }, { "epoch": 1.6469629025682837, "grad_norm": 1.791159749031067, "learning_rate": 0.00079421456719663, "loss": 3.6655, "step": 24240 }, { "epoch": 1.6473026226389456, "grad_norm": 1.8618749380111694, "learning_rate": 0.0007941721021877972, "loss": 3.4601, "step": 24245 }, { "epoch": 1.6476423427096072, "grad_norm": 1.6997971534729004, "learning_rate": 0.0007941296371789646, "loss": 3.6729, "step": 24250 }, { "epoch": 1.647982062780269, "grad_norm": 2.308969020843506, "learning_rate": 0.0007940871721701318, "loss": 3.4766, "step": 24255 }, { "epoch": 1.648321782850931, "grad_norm": 2.098604917526245, "learning_rate": 0.0007940447071612991, "loss": 3.6149, "step": 24260 }, { "epoch": 1.6486615029215925, "grad_norm": 1.8513532876968384, "learning_rate": 0.0007940022421524664, "loss": 3.7163, "step": 24265 }, { "epoch": 1.6490012229922544, "grad_norm": 2.810676097869873, "learning_rate": 0.0007939597771436336, "loss": 3.4854, "step": 24270 }, { "epoch": 1.6493409430629162, "grad_norm": 1.7040265798568726, "learning_rate": 0.0007939173121348009, "loss": 3.5139, "step": 24275 }, { "epoch": 1.6496806631335779, "grad_norm": 2.400364875793457, "learning_rate": 0.0007938748471259683, "loss": 3.7643, "step": 24280 }, { "epoch": 1.6500203832042397, "grad_norm": 1.8095325231552124, "learning_rate": 0.0007938323821171355, "loss": 3.4729, "step": 24285 }, { "epoch": 1.6503601032749016, "grad_norm": 2.677304983139038, "learning_rate": 0.0007937899171083028, "loss": 3.4792, "step": 24290 }, { "epoch": 1.6506998233455632, "grad_norm": 2.2844626903533936, "learning_rate": 0.0007937474520994701, "loss": 3.7095, "step": 24295 }, { "epoch": 1.651039543416225, "grad_norm": 1.7172207832336426, "learning_rate": 0.0007937049870906373, "loss": 3.3896, "step": 24300 }, { "epoch": 1.651379263486887, "grad_norm": 1.777043342590332, "learning_rate": 0.0007936625220818045, "loss": 3.5717, "step": 24305 }, { "epoch": 1.6517189835575485, "grad_norm": 2.483088731765747, "learning_rate": 0.0007936200570729719, "loss": 3.638, "step": 24310 }, { "epoch": 1.6520587036282104, "grad_norm": 2.090301036834717, "learning_rate": 0.0007935775920641392, "loss": 3.6306, "step": 24315 }, { "epoch": 1.6523984236988722, "grad_norm": 2.4011037349700928, "learning_rate": 0.0007935351270553064, "loss": 3.4701, "step": 24320 }, { "epoch": 1.6527381437695339, "grad_norm": 1.85023033618927, "learning_rate": 0.0007934926620464738, "loss": 3.4888, "step": 24325 }, { "epoch": 1.6530778638401957, "grad_norm": 2.388315439224243, "learning_rate": 0.000793450197037641, "loss": 3.423, "step": 24330 }, { "epoch": 1.6534175839108576, "grad_norm": 1.9776300191879272, "learning_rate": 0.0007934077320288082, "loss": 3.601, "step": 24335 }, { "epoch": 1.6537573039815192, "grad_norm": 1.8268394470214844, "learning_rate": 0.0007933652670199756, "loss": 3.2793, "step": 24340 }, { "epoch": 1.6540970240521808, "grad_norm": 1.823991298675537, "learning_rate": 0.0007933228020111428, "loss": 3.5813, "step": 24345 }, { "epoch": 1.654436744122843, "grad_norm": 2.2161502838134766, "learning_rate": 0.0007932803370023101, "loss": 3.5903, "step": 24350 }, { "epoch": 1.6547764641935045, "grad_norm": 2.0380771160125732, "learning_rate": 0.0007932378719934774, "loss": 3.5517, "step": 24355 }, { "epoch": 1.6551161842641662, "grad_norm": 1.8352357149124146, "learning_rate": 0.0007931954069846447, "loss": 3.594, "step": 24360 }, { "epoch": 1.6554559043348283, "grad_norm": 2.308397054672241, "learning_rate": 0.0007931529419758119, "loss": 3.6103, "step": 24365 }, { "epoch": 1.6557956244054899, "grad_norm": 1.9953728914260864, "learning_rate": 0.0007931104769669792, "loss": 3.598, "step": 24370 }, { "epoch": 1.6561353444761515, "grad_norm": 2.583231210708618, "learning_rate": 0.0007930680119581465, "loss": 3.4608, "step": 24375 }, { "epoch": 1.6564750645468136, "grad_norm": 1.91532564163208, "learning_rate": 0.0007930255469493138, "loss": 3.3518, "step": 24380 }, { "epoch": 1.6568147846174752, "grad_norm": 1.920981526374817, "learning_rate": 0.0007929830819404811, "loss": 3.8116, "step": 24385 }, { "epoch": 1.6571545046881369, "grad_norm": 2.1637561321258545, "learning_rate": 0.0007929406169316484, "loss": 3.3922, "step": 24390 }, { "epoch": 1.6574942247587987, "grad_norm": 2.043303966522217, "learning_rate": 0.0007928981519228157, "loss": 3.7445, "step": 24395 }, { "epoch": 1.6578339448294606, "grad_norm": 1.6901911497116089, "learning_rate": 0.0007928556869139829, "loss": 3.7048, "step": 24400 }, { "epoch": 1.6581736649001222, "grad_norm": 2.098311185836792, "learning_rate": 0.0007928132219051501, "loss": 3.4226, "step": 24405 }, { "epoch": 1.658513384970784, "grad_norm": 2.0772922039031982, "learning_rate": 0.0007927707568963175, "loss": 3.6256, "step": 24410 }, { "epoch": 1.658853105041446, "grad_norm": 2.1021738052368164, "learning_rate": 0.0007927282918874847, "loss": 3.5624, "step": 24415 }, { "epoch": 1.6591928251121075, "grad_norm": 2.1544911861419678, "learning_rate": 0.000792685826878652, "loss": 3.474, "step": 24420 }, { "epoch": 1.6595325451827694, "grad_norm": 1.878689169883728, "learning_rate": 0.0007926433618698194, "loss": 3.5374, "step": 24425 }, { "epoch": 1.6598722652534312, "grad_norm": 2.5099711418151855, "learning_rate": 0.0007926008968609866, "loss": 3.7166, "step": 24430 }, { "epoch": 1.6602119853240929, "grad_norm": 1.9018545150756836, "learning_rate": 0.0007925584318521538, "loss": 3.546, "step": 24435 }, { "epoch": 1.6605517053947547, "grad_norm": 2.870206832885742, "learning_rate": 0.0007925159668433212, "loss": 3.6868, "step": 24440 }, { "epoch": 1.6608914254654166, "grad_norm": 1.8528332710266113, "learning_rate": 0.0007924735018344884, "loss": 3.5622, "step": 24445 }, { "epoch": 1.6612311455360782, "grad_norm": 1.7006596326828003, "learning_rate": 0.0007924310368256556, "loss": 3.7253, "step": 24450 }, { "epoch": 1.66157086560674, "grad_norm": 2.727696180343628, "learning_rate": 0.000792388571816823, "loss": 3.4679, "step": 24455 }, { "epoch": 1.661910585677402, "grad_norm": 1.6900042295455933, "learning_rate": 0.0007923461068079903, "loss": 3.6044, "step": 24460 }, { "epoch": 1.6622503057480635, "grad_norm": 2.5807595252990723, "learning_rate": 0.0007923036417991575, "loss": 3.5891, "step": 24465 }, { "epoch": 1.6625900258187254, "grad_norm": 2.6300318241119385, "learning_rate": 0.0007922611767903248, "loss": 3.7492, "step": 24470 }, { "epoch": 1.6629297458893872, "grad_norm": 2.2370493412017822, "learning_rate": 0.0007922187117814921, "loss": 3.7359, "step": 24475 }, { "epoch": 1.6632694659600489, "grad_norm": 1.7261353731155396, "learning_rate": 0.0007921762467726593, "loss": 3.6602, "step": 24480 }, { "epoch": 1.6636091860307107, "grad_norm": 2.4216158390045166, "learning_rate": 0.0007921337817638266, "loss": 3.3955, "step": 24485 }, { "epoch": 1.6639489061013726, "grad_norm": 2.219086170196533, "learning_rate": 0.000792091316754994, "loss": 3.6509, "step": 24490 }, { "epoch": 1.6642886261720342, "grad_norm": 3.0163638591766357, "learning_rate": 0.0007920488517461612, "loss": 3.5306, "step": 24495 }, { "epoch": 1.664628346242696, "grad_norm": 2.0596580505371094, "learning_rate": 0.0007920063867373285, "loss": 3.7552, "step": 24500 }, { "epoch": 1.664968066313358, "grad_norm": 2.071718454360962, "learning_rate": 0.0007919639217284957, "loss": 3.4594, "step": 24505 }, { "epoch": 1.6653077863840196, "grad_norm": 1.7129755020141602, "learning_rate": 0.000791921456719663, "loss": 3.5531, "step": 24510 }, { "epoch": 1.6656475064546812, "grad_norm": 1.6616554260253906, "learning_rate": 0.0007918789917108303, "loss": 3.3291, "step": 24515 }, { "epoch": 1.6659872265253433, "grad_norm": 2.1729156970977783, "learning_rate": 0.0007918365267019975, "loss": 3.4789, "step": 24520 }, { "epoch": 1.6663269465960049, "grad_norm": 1.6247426271438599, "learning_rate": 0.0007917940616931649, "loss": 3.5805, "step": 24525 }, { "epoch": 1.6666666666666665, "grad_norm": 1.8707493543624878, "learning_rate": 0.0007917515966843322, "loss": 3.6485, "step": 24530 }, { "epoch": 1.6670063867373286, "grad_norm": 2.1962368488311768, "learning_rate": 0.0007917091316754994, "loss": 3.7134, "step": 24535 }, { "epoch": 1.6673461068079902, "grad_norm": 1.9492689371109009, "learning_rate": 0.0007916666666666666, "loss": 3.5375, "step": 24540 }, { "epoch": 1.6676858268786519, "grad_norm": 2.2018651962280273, "learning_rate": 0.000791624201657834, "loss": 3.6234, "step": 24545 }, { "epoch": 1.668025546949314, "grad_norm": 2.546229839324951, "learning_rate": 0.0007915817366490012, "loss": 3.64, "step": 24550 }, { "epoch": 1.6683652670199756, "grad_norm": 2.0521280765533447, "learning_rate": 0.0007915392716401684, "loss": 3.7895, "step": 24555 }, { "epoch": 1.6687049870906372, "grad_norm": 1.741383671760559, "learning_rate": 0.0007914968066313359, "loss": 3.4733, "step": 24560 }, { "epoch": 1.669044707161299, "grad_norm": 1.4814565181732178, "learning_rate": 0.0007914543416225031, "loss": 3.6376, "step": 24565 }, { "epoch": 1.669384427231961, "grad_norm": 1.776001214981079, "learning_rate": 0.0007914118766136703, "loss": 3.7609, "step": 24570 }, { "epoch": 1.6697241473026225, "grad_norm": 1.897796869277954, "learning_rate": 0.0007913694116048377, "loss": 3.4652, "step": 24575 }, { "epoch": 1.6700638673732844, "grad_norm": 2.2653896808624268, "learning_rate": 0.0007913269465960049, "loss": 3.5282, "step": 24580 }, { "epoch": 1.6704035874439462, "grad_norm": 2.052097797393799, "learning_rate": 0.0007912844815871721, "loss": 3.3868, "step": 24585 }, { "epoch": 1.6707433075146079, "grad_norm": 2.0599687099456787, "learning_rate": 0.0007912420165783394, "loss": 3.3448, "step": 24590 }, { "epoch": 1.6710830275852697, "grad_norm": 1.5215051174163818, "learning_rate": 0.0007911995515695068, "loss": 3.4725, "step": 24595 }, { "epoch": 1.6714227476559316, "grad_norm": 1.6958731412887573, "learning_rate": 0.000791157086560674, "loss": 3.398, "step": 24600 }, { "epoch": 1.6717624677265932, "grad_norm": 2.498605489730835, "learning_rate": 0.0007911146215518413, "loss": 3.6109, "step": 24605 }, { "epoch": 1.672102187797255, "grad_norm": 2.1488654613494873, "learning_rate": 0.0007910721565430086, "loss": 3.6838, "step": 24610 }, { "epoch": 1.672441907867917, "grad_norm": 2.457561492919922, "learning_rate": 0.0007910296915341758, "loss": 3.7579, "step": 24615 }, { "epoch": 1.6727816279385785, "grad_norm": 2.4684267044067383, "learning_rate": 0.0007909872265253431, "loss": 3.3596, "step": 24620 }, { "epoch": 1.6731213480092404, "grad_norm": 2.1708669662475586, "learning_rate": 0.0007909447615165104, "loss": 3.5632, "step": 24625 }, { "epoch": 1.6734610680799022, "grad_norm": 2.0156407356262207, "learning_rate": 0.0007909022965076777, "loss": 3.6922, "step": 24630 }, { "epoch": 1.6738007881505639, "grad_norm": 1.7756153345108032, "learning_rate": 0.000790859831498845, "loss": 3.5302, "step": 24635 }, { "epoch": 1.6741405082212257, "grad_norm": 2.0076675415039062, "learning_rate": 0.0007908173664900122, "loss": 3.5863, "step": 24640 }, { "epoch": 1.6744802282918876, "grad_norm": 1.8187263011932373, "learning_rate": 0.0007907749014811795, "loss": 3.6127, "step": 24645 }, { "epoch": 1.6748199483625492, "grad_norm": 1.7318986654281616, "learning_rate": 0.0007907324364723468, "loss": 3.4061, "step": 24650 }, { "epoch": 1.675159668433211, "grad_norm": 1.8388981819152832, "learning_rate": 0.000790689971463514, "loss": 3.548, "step": 24655 }, { "epoch": 1.675499388503873, "grad_norm": 1.7362775802612305, "learning_rate": 0.0007906475064546813, "loss": 3.4504, "step": 24660 }, { "epoch": 1.6758391085745346, "grad_norm": 2.1612229347229004, "learning_rate": 0.0007906050414458487, "loss": 3.6724, "step": 24665 }, { "epoch": 1.6761788286451964, "grad_norm": 2.4986653327941895, "learning_rate": 0.0007905625764370159, "loss": 3.5537, "step": 24670 }, { "epoch": 1.6765185487158583, "grad_norm": 1.7758082151412964, "learning_rate": 0.0007905201114281832, "loss": 3.6281, "step": 24675 }, { "epoch": 1.67685826878652, "grad_norm": 1.8167518377304077, "learning_rate": 0.0007904776464193505, "loss": 3.4499, "step": 24680 }, { "epoch": 1.6771979888571815, "grad_norm": 2.112382411956787, "learning_rate": 0.0007904351814105177, "loss": 3.7794, "step": 24685 }, { "epoch": 1.6775377089278436, "grad_norm": 1.9546887874603271, "learning_rate": 0.0007903927164016849, "loss": 3.5491, "step": 24690 }, { "epoch": 1.6778774289985052, "grad_norm": 2.411344528198242, "learning_rate": 0.0007903502513928523, "loss": 3.7108, "step": 24695 }, { "epoch": 1.6782171490691669, "grad_norm": 1.9498889446258545, "learning_rate": 0.0007903077863840196, "loss": 3.5003, "step": 24700 }, { "epoch": 1.678556869139829, "grad_norm": 2.0596697330474854, "learning_rate": 0.0007902653213751868, "loss": 3.6521, "step": 24705 }, { "epoch": 1.6788965892104906, "grad_norm": 1.6427903175354004, "learning_rate": 0.0007902228563663542, "loss": 3.5203, "step": 24710 }, { "epoch": 1.6792363092811522, "grad_norm": 1.7853165864944458, "learning_rate": 0.0007901803913575214, "loss": 3.7926, "step": 24715 }, { "epoch": 1.6795760293518143, "grad_norm": 2.0875167846679688, "learning_rate": 0.0007901379263486887, "loss": 3.49, "step": 24720 }, { "epoch": 1.679915749422476, "grad_norm": 1.7648011445999146, "learning_rate": 0.000790095461339856, "loss": 3.4522, "step": 24725 }, { "epoch": 1.6802554694931375, "grad_norm": 2.3302204608917236, "learning_rate": 0.0007900529963310232, "loss": 3.4981, "step": 24730 }, { "epoch": 1.6805951895637994, "grad_norm": 2.3806772232055664, "learning_rate": 0.0007900105313221906, "loss": 3.5245, "step": 24735 }, { "epoch": 1.6809349096344612, "grad_norm": 1.6205798387527466, "learning_rate": 0.0007899680663133579, "loss": 3.4639, "step": 24740 }, { "epoch": 1.6812746297051229, "grad_norm": 2.072728395462036, "learning_rate": 0.0007899256013045251, "loss": 3.7979, "step": 24745 }, { "epoch": 1.6816143497757847, "grad_norm": 1.922695517539978, "learning_rate": 0.0007898831362956924, "loss": 3.6204, "step": 24750 }, { "epoch": 1.6819540698464466, "grad_norm": 2.1165924072265625, "learning_rate": 0.0007898406712868596, "loss": 3.6032, "step": 24755 }, { "epoch": 1.6822937899171082, "grad_norm": 2.076054573059082, "learning_rate": 0.0007897982062780269, "loss": 3.4056, "step": 24760 }, { "epoch": 1.68263350998777, "grad_norm": 1.7630934715270996, "learning_rate": 0.0007897557412691942, "loss": 3.1151, "step": 24765 }, { "epoch": 1.682973230058432, "grad_norm": 2.159270763397217, "learning_rate": 0.0007897132762603615, "loss": 3.4632, "step": 24770 }, { "epoch": 1.6833129501290935, "grad_norm": 2.138312816619873, "learning_rate": 0.0007896708112515288, "loss": 3.574, "step": 24775 }, { "epoch": 1.6836526701997554, "grad_norm": 2.1367549896240234, "learning_rate": 0.0007896283462426961, "loss": 3.4649, "step": 24780 }, { "epoch": 1.6839923902704172, "grad_norm": 2.0493924617767334, "learning_rate": 0.0007895858812338633, "loss": 3.4677, "step": 24785 }, { "epoch": 1.6843321103410789, "grad_norm": 1.7783892154693604, "learning_rate": 0.0007895434162250305, "loss": 3.4132, "step": 24790 }, { "epoch": 1.6846718304117407, "grad_norm": 1.824668526649475, "learning_rate": 0.0007895009512161979, "loss": 3.6471, "step": 24795 }, { "epoch": 1.6850115504824026, "grad_norm": 1.8049981594085693, "learning_rate": 0.0007894584862073651, "loss": 3.4351, "step": 24800 }, { "epoch": 1.6853512705530642, "grad_norm": 3.0623490810394287, "learning_rate": 0.0007894160211985324, "loss": 3.3648, "step": 24805 }, { "epoch": 1.685690990623726, "grad_norm": 3.545280933380127, "learning_rate": 0.0007893735561896998, "loss": 3.3235, "step": 24810 }, { "epoch": 1.686030710694388, "grad_norm": 1.903737187385559, "learning_rate": 0.000789331091180867, "loss": 3.65, "step": 24815 }, { "epoch": 1.6863704307650496, "grad_norm": 1.8446170091629028, "learning_rate": 0.0007892886261720342, "loss": 3.4815, "step": 24820 }, { "epoch": 1.6867101508357114, "grad_norm": 1.6784025430679321, "learning_rate": 0.0007892461611632016, "loss": 3.5996, "step": 24825 }, { "epoch": 1.6870498709063733, "grad_norm": 1.9319669008255005, "learning_rate": 0.0007892036961543688, "loss": 3.7697, "step": 24830 }, { "epoch": 1.687389590977035, "grad_norm": 2.0123486518859863, "learning_rate": 0.000789161231145536, "loss": 3.5459, "step": 24835 }, { "epoch": 1.6877293110476967, "grad_norm": 1.8812702894210815, "learning_rate": 0.0007891187661367035, "loss": 3.6326, "step": 24840 }, { "epoch": 1.6880690311183586, "grad_norm": 2.056732654571533, "learning_rate": 0.0007890763011278707, "loss": 3.6657, "step": 24845 }, { "epoch": 1.6884087511890202, "grad_norm": 2.4263572692871094, "learning_rate": 0.0007890338361190379, "loss": 3.4616, "step": 24850 }, { "epoch": 1.6887484712596819, "grad_norm": 1.8560152053833008, "learning_rate": 0.0007889913711102052, "loss": 3.5127, "step": 24855 }, { "epoch": 1.689088191330344, "grad_norm": 2.0100042819976807, "learning_rate": 0.0007889489061013725, "loss": 3.589, "step": 24860 }, { "epoch": 1.6894279114010056, "grad_norm": 1.9713232517242432, "learning_rate": 0.0007889064410925397, "loss": 3.4531, "step": 24865 }, { "epoch": 1.6897676314716672, "grad_norm": 1.8516298532485962, "learning_rate": 0.0007888639760837071, "loss": 3.4005, "step": 24870 }, { "epoch": 1.6901073515423293, "grad_norm": 1.5573961734771729, "learning_rate": 0.0007888215110748744, "loss": 3.5731, "step": 24875 }, { "epoch": 1.690447071612991, "grad_norm": 1.5189427137374878, "learning_rate": 0.0007887790460660416, "loss": 3.4184, "step": 24880 }, { "epoch": 1.6907867916836525, "grad_norm": 2.14630389213562, "learning_rate": 0.0007887365810572089, "loss": 3.3661, "step": 24885 }, { "epoch": 1.6911265117543146, "grad_norm": 2.0582761764526367, "learning_rate": 0.0007886941160483761, "loss": 3.5324, "step": 24890 }, { "epoch": 1.6914662318249762, "grad_norm": 2.3543622493743896, "learning_rate": 0.0007886516510395434, "loss": 3.6311, "step": 24895 }, { "epoch": 1.6918059518956379, "grad_norm": 1.9913684129714966, "learning_rate": 0.0007886091860307107, "loss": 3.4003, "step": 24900 }, { "epoch": 1.6921456719662997, "grad_norm": 2.6919972896575928, "learning_rate": 0.000788566721021878, "loss": 3.4584, "step": 24905 }, { "epoch": 1.6924853920369616, "grad_norm": 1.7309629917144775, "learning_rate": 0.0007885242560130453, "loss": 3.6428, "step": 24910 }, { "epoch": 1.6928251121076232, "grad_norm": 2.2893059253692627, "learning_rate": 0.0007884817910042126, "loss": 3.483, "step": 24915 }, { "epoch": 1.693164832178285, "grad_norm": 1.7896175384521484, "learning_rate": 0.0007884393259953798, "loss": 3.4992, "step": 24920 }, { "epoch": 1.693504552248947, "grad_norm": 2.0043556690216064, "learning_rate": 0.000788396860986547, "loss": 3.8816, "step": 24925 }, { "epoch": 1.6938442723196085, "grad_norm": 2.8890931606292725, "learning_rate": 0.0007883543959777144, "loss": 3.238, "step": 24930 }, { "epoch": 1.6941839923902704, "grad_norm": 1.7954583168029785, "learning_rate": 0.0007883119309688816, "loss": 3.7566, "step": 24935 }, { "epoch": 1.6945237124609323, "grad_norm": 2.1598052978515625, "learning_rate": 0.0007882694659600489, "loss": 3.4805, "step": 24940 }, { "epoch": 1.6948634325315939, "grad_norm": 2.0292108058929443, "learning_rate": 0.0007882270009512163, "loss": 3.5387, "step": 24945 }, { "epoch": 1.6952031526022557, "grad_norm": 2.5030219554901123, "learning_rate": 0.0007881845359423835, "loss": 3.6284, "step": 24950 }, { "epoch": 1.6955428726729176, "grad_norm": 1.7599895000457764, "learning_rate": 0.0007881420709335507, "loss": 3.6266, "step": 24955 }, { "epoch": 1.6958825927435792, "grad_norm": 1.864117980003357, "learning_rate": 0.0007880996059247181, "loss": 3.3341, "step": 24960 }, { "epoch": 1.696222312814241, "grad_norm": 1.7615903615951538, "learning_rate": 0.0007880571409158853, "loss": 3.4617, "step": 24965 }, { "epoch": 1.696562032884903, "grad_norm": 1.7218034267425537, "learning_rate": 0.0007880146759070525, "loss": 3.6405, "step": 24970 }, { "epoch": 1.6969017529555646, "grad_norm": 1.592492699623108, "learning_rate": 0.00078797221089822, "loss": 3.5944, "step": 24975 }, { "epoch": 1.6972414730262264, "grad_norm": 2.0643763542175293, "learning_rate": 0.0007879297458893872, "loss": 3.5475, "step": 24980 }, { "epoch": 1.6975811930968883, "grad_norm": 2.5867135524749756, "learning_rate": 0.0007878872808805544, "loss": 3.3426, "step": 24985 }, { "epoch": 1.69792091316755, "grad_norm": 2.1453042030334473, "learning_rate": 0.0007878448158717217, "loss": 3.3957, "step": 24990 }, { "epoch": 1.6982606332382117, "grad_norm": 2.1982226371765137, "learning_rate": 0.000787802350862889, "loss": 3.7329, "step": 24995 }, { "epoch": 1.6986003533088736, "grad_norm": 1.7943212985992432, "learning_rate": 0.0007877598858540562, "loss": 3.6931, "step": 25000 }, { "epoch": 1.6989400733795352, "grad_norm": 1.775612235069275, "learning_rate": 0.0007877174208452235, "loss": 3.5054, "step": 25005 }, { "epoch": 1.699279793450197, "grad_norm": 2.2755215167999268, "learning_rate": 0.0007876749558363909, "loss": 3.7279, "step": 25010 }, { "epoch": 1.699619513520859, "grad_norm": 2.3985583782196045, "learning_rate": 0.0007876324908275581, "loss": 3.275, "step": 25015 }, { "epoch": 1.6999592335915206, "grad_norm": 2.183558702468872, "learning_rate": 0.0007875900258187254, "loss": 3.6991, "step": 25020 }, { "epoch": 1.7002989536621822, "grad_norm": 2.4577176570892334, "learning_rate": 0.0007875475608098927, "loss": 3.6072, "step": 25025 }, { "epoch": 1.7006386737328443, "grad_norm": 1.8926854133605957, "learning_rate": 0.0007875050958010599, "loss": 3.4915, "step": 25030 }, { "epoch": 1.700978393803506, "grad_norm": 2.0705604553222656, "learning_rate": 0.0007874626307922272, "loss": 3.2594, "step": 25035 }, { "epoch": 1.7013181138741675, "grad_norm": 2.193310499191284, "learning_rate": 0.0007874201657833944, "loss": 3.6329, "step": 25040 }, { "epoch": 1.7016578339448296, "grad_norm": 2.0547807216644287, "learning_rate": 0.0007873777007745618, "loss": 3.4559, "step": 25045 }, { "epoch": 1.7019975540154912, "grad_norm": 2.393503189086914, "learning_rate": 0.0007873352357657291, "loss": 3.5719, "step": 25050 }, { "epoch": 1.7023372740861529, "grad_norm": 2.249399423599243, "learning_rate": 0.0007872927707568963, "loss": 3.574, "step": 25055 }, { "epoch": 1.702676994156815, "grad_norm": 2.0623276233673096, "learning_rate": 0.0007872503057480637, "loss": 3.4659, "step": 25060 }, { "epoch": 1.7030167142274766, "grad_norm": 1.6142363548278809, "learning_rate": 0.0007872078407392309, "loss": 3.2676, "step": 25065 }, { "epoch": 1.7033564342981382, "grad_norm": 1.8242721557617188, "learning_rate": 0.0007871653757303981, "loss": 3.6111, "step": 25070 }, { "epoch": 1.7036961543688, "grad_norm": 1.6033929586410522, "learning_rate": 0.0007871229107215655, "loss": 3.2894, "step": 25075 }, { "epoch": 1.704035874439462, "grad_norm": 1.8422309160232544, "learning_rate": 0.0007870804457127328, "loss": 3.5497, "step": 25080 }, { "epoch": 1.7043755945101235, "grad_norm": 2.511939764022827, "learning_rate": 0.0007870379807039, "loss": 3.7108, "step": 25085 }, { "epoch": 1.7047153145807854, "grad_norm": 1.7011046409606934, "learning_rate": 0.0007869955156950673, "loss": 3.5608, "step": 25090 }, { "epoch": 1.7050550346514473, "grad_norm": 1.9047589302062988, "learning_rate": 0.0007869530506862346, "loss": 3.6682, "step": 25095 }, { "epoch": 1.7053947547221089, "grad_norm": 1.702845811843872, "learning_rate": 0.0007869105856774018, "loss": 3.6223, "step": 25100 }, { "epoch": 1.7057344747927707, "grad_norm": 2.0251452922821045, "learning_rate": 0.0007868681206685691, "loss": 3.6318, "step": 25105 }, { "epoch": 1.7060741948634326, "grad_norm": 1.427959680557251, "learning_rate": 0.0007868256556597364, "loss": 3.5081, "step": 25110 }, { "epoch": 1.7064139149340942, "grad_norm": 1.6483527421951294, "learning_rate": 0.0007867831906509037, "loss": 3.4903, "step": 25115 }, { "epoch": 1.706753635004756, "grad_norm": 2.0400054454803467, "learning_rate": 0.000786740725642071, "loss": 3.4275, "step": 25120 }, { "epoch": 1.707093355075418, "grad_norm": 2.5995237827301025, "learning_rate": 0.0007866982606332383, "loss": 3.5439, "step": 25125 }, { "epoch": 1.7074330751460796, "grad_norm": 1.8108634948730469, "learning_rate": 0.0007866557956244055, "loss": 3.864, "step": 25130 }, { "epoch": 1.7077727952167414, "grad_norm": 1.6542426347732544, "learning_rate": 0.0007866133306155728, "loss": 3.4365, "step": 25135 }, { "epoch": 1.7081125152874033, "grad_norm": 1.825119972229004, "learning_rate": 0.00078657086560674, "loss": 3.7987, "step": 25140 }, { "epoch": 1.708452235358065, "grad_norm": 3.137856960296631, "learning_rate": 0.0007865284005979073, "loss": 3.5814, "step": 25145 }, { "epoch": 1.7087919554287267, "grad_norm": 2.157243490219116, "learning_rate": 0.0007864859355890747, "loss": 3.9177, "step": 25150 }, { "epoch": 1.7091316754993886, "grad_norm": 2.14719557762146, "learning_rate": 0.0007864434705802419, "loss": 3.6539, "step": 25155 }, { "epoch": 1.7094713955700502, "grad_norm": 2.8954877853393555, "learning_rate": 0.0007864010055714092, "loss": 3.5307, "step": 25160 }, { "epoch": 1.709811115640712, "grad_norm": 1.9995588064193726, "learning_rate": 0.0007863585405625765, "loss": 3.4248, "step": 25165 }, { "epoch": 1.710150835711374, "grad_norm": 2.4728918075561523, "learning_rate": 0.0007863160755537437, "loss": 3.6035, "step": 25170 }, { "epoch": 1.7104905557820356, "grad_norm": 1.9808955192565918, "learning_rate": 0.0007862736105449109, "loss": 3.7026, "step": 25175 }, { "epoch": 1.7108302758526974, "grad_norm": 2.067430257797241, "learning_rate": 0.0007862311455360783, "loss": 3.6671, "step": 25180 }, { "epoch": 1.7111699959233593, "grad_norm": 1.9426785707473755, "learning_rate": 0.0007861886805272456, "loss": 3.708, "step": 25185 }, { "epoch": 1.711509715994021, "grad_norm": 2.8325862884521484, "learning_rate": 0.0007861462155184128, "loss": 3.7235, "step": 25190 }, { "epoch": 1.7118494360646825, "grad_norm": 1.9589022397994995, "learning_rate": 0.0007861037505095802, "loss": 3.6703, "step": 25195 }, { "epoch": 1.7121891561353446, "grad_norm": 1.8201489448547363, "learning_rate": 0.0007860612855007474, "loss": 3.449, "step": 25200 }, { "epoch": 1.7125288762060062, "grad_norm": 2.068521738052368, "learning_rate": 0.0007860188204919146, "loss": 3.4531, "step": 25205 }, { "epoch": 1.7128685962766679, "grad_norm": 1.9802258014678955, "learning_rate": 0.000785976355483082, "loss": 3.5774, "step": 25210 }, { "epoch": 1.71320831634733, "grad_norm": 1.8421871662139893, "learning_rate": 0.0007859338904742492, "loss": 3.4703, "step": 25215 }, { "epoch": 1.7135480364179916, "grad_norm": 1.8757784366607666, "learning_rate": 0.0007858914254654165, "loss": 3.6814, "step": 25220 }, { "epoch": 1.7138877564886532, "grad_norm": 1.9633817672729492, "learning_rate": 0.0007858489604565839, "loss": 3.5744, "step": 25225 }, { "epoch": 1.7142274765593153, "grad_norm": 1.823642611503601, "learning_rate": 0.0007858064954477511, "loss": 3.2803, "step": 25230 }, { "epoch": 1.714567196629977, "grad_norm": 2.058093309402466, "learning_rate": 0.0007857640304389183, "loss": 3.5112, "step": 25235 }, { "epoch": 1.7149069167006386, "grad_norm": 2.744415760040283, "learning_rate": 0.0007857215654300856, "loss": 4.0263, "step": 25240 }, { "epoch": 1.7152466367713004, "grad_norm": 1.612244963645935, "learning_rate": 0.0007856791004212529, "loss": 3.4721, "step": 25245 }, { "epoch": 1.7155863568419623, "grad_norm": 1.841460108757019, "learning_rate": 0.0007856366354124201, "loss": 3.6199, "step": 25250 }, { "epoch": 1.7159260769126239, "grad_norm": 1.8394899368286133, "learning_rate": 0.0007855941704035875, "loss": 3.4761, "step": 25255 }, { "epoch": 1.7162657969832857, "grad_norm": 2.1243245601654053, "learning_rate": 0.0007855517053947548, "loss": 3.5535, "step": 25260 }, { "epoch": 1.7166055170539476, "grad_norm": 2.0651116371154785, "learning_rate": 0.000785509240385922, "loss": 3.7882, "step": 25265 }, { "epoch": 1.7169452371246092, "grad_norm": 1.672230839729309, "learning_rate": 0.0007854667753770893, "loss": 3.6361, "step": 25270 }, { "epoch": 1.717284957195271, "grad_norm": 1.7171298265457153, "learning_rate": 0.0007854243103682565, "loss": 3.4812, "step": 25275 }, { "epoch": 1.717624677265933, "grad_norm": 2.357259750366211, "learning_rate": 0.0007853818453594238, "loss": 3.6576, "step": 25280 }, { "epoch": 1.7179643973365946, "grad_norm": 1.861465334892273, "learning_rate": 0.0007853393803505911, "loss": 3.7387, "step": 25285 }, { "epoch": 1.7183041174072564, "grad_norm": 1.7714899778366089, "learning_rate": 0.0007852969153417584, "loss": 3.6622, "step": 25290 }, { "epoch": 1.7186438374779183, "grad_norm": 2.196199655532837, "learning_rate": 0.0007852544503329257, "loss": 3.3228, "step": 25295 }, { "epoch": 1.71898355754858, "grad_norm": 1.5608468055725098, "learning_rate": 0.000785211985324093, "loss": 3.5939, "step": 25300 }, { "epoch": 1.7193232776192418, "grad_norm": 2.0835864543914795, "learning_rate": 0.0007851695203152602, "loss": 3.3605, "step": 25305 }, { "epoch": 1.7196629976899036, "grad_norm": 2.4996867179870605, "learning_rate": 0.0007851270553064275, "loss": 3.7441, "step": 25310 }, { "epoch": 1.7200027177605652, "grad_norm": 2.1697640419006348, "learning_rate": 0.0007850845902975948, "loss": 3.6423, "step": 25315 }, { "epoch": 1.720342437831227, "grad_norm": 1.8396254777908325, "learning_rate": 0.000785042125288762, "loss": 3.6234, "step": 25320 }, { "epoch": 1.720682157901889, "grad_norm": 1.9975824356079102, "learning_rate": 0.0007849996602799293, "loss": 3.5258, "step": 25325 }, { "epoch": 1.7210218779725506, "grad_norm": 1.7042887210845947, "learning_rate": 0.0007849571952710967, "loss": 3.622, "step": 25330 }, { "epoch": 1.7213615980432124, "grad_norm": 2.3808062076568604, "learning_rate": 0.0007849147302622639, "loss": 3.4078, "step": 25335 }, { "epoch": 1.7217013181138743, "grad_norm": 1.7496337890625, "learning_rate": 0.0007848722652534311, "loss": 3.5437, "step": 25340 }, { "epoch": 1.722041038184536, "grad_norm": 1.8692189455032349, "learning_rate": 0.0007848298002445985, "loss": 3.3537, "step": 25345 }, { "epoch": 1.7223807582551978, "grad_norm": 2.05462908744812, "learning_rate": 0.0007847873352357657, "loss": 3.2947, "step": 25350 }, { "epoch": 1.7227204783258596, "grad_norm": 2.0694496631622314, "learning_rate": 0.0007847448702269329, "loss": 3.4638, "step": 25355 }, { "epoch": 1.7230601983965212, "grad_norm": 2.47814679145813, "learning_rate": 0.0007847024052181004, "loss": 3.3619, "step": 25360 }, { "epoch": 1.7233999184671829, "grad_norm": 1.985935091972351, "learning_rate": 0.0007846599402092676, "loss": 3.3998, "step": 25365 }, { "epoch": 1.723739638537845, "grad_norm": 1.8481824398040771, "learning_rate": 0.0007846174752004348, "loss": 3.7797, "step": 25370 }, { "epoch": 1.7240793586085066, "grad_norm": 2.0172317028045654, "learning_rate": 0.0007845750101916021, "loss": 3.5471, "step": 25375 }, { "epoch": 1.7244190786791682, "grad_norm": 2.332138776779175, "learning_rate": 0.0007845325451827694, "loss": 3.4961, "step": 25380 }, { "epoch": 1.7247587987498303, "grad_norm": 2.2789652347564697, "learning_rate": 0.0007844900801739366, "loss": 3.6629, "step": 25385 }, { "epoch": 1.725098518820492, "grad_norm": 1.7012922763824463, "learning_rate": 0.000784447615165104, "loss": 3.6862, "step": 25390 }, { "epoch": 1.7254382388911536, "grad_norm": 2.162764549255371, "learning_rate": 0.0007844051501562713, "loss": 3.5042, "step": 25395 }, { "epoch": 1.7257779589618156, "grad_norm": 1.9768747091293335, "learning_rate": 0.0007843626851474386, "loss": 3.4918, "step": 25400 }, { "epoch": 1.7261176790324773, "grad_norm": 2.3521742820739746, "learning_rate": 0.0007843202201386058, "loss": 3.53, "step": 25405 }, { "epoch": 1.726457399103139, "grad_norm": 1.9608522653579712, "learning_rate": 0.0007842777551297731, "loss": 3.645, "step": 25410 }, { "epoch": 1.7267971191738007, "grad_norm": 1.889914631843567, "learning_rate": 0.0007842352901209404, "loss": 3.6206, "step": 25415 }, { "epoch": 1.7271368392444626, "grad_norm": 1.958311915397644, "learning_rate": 0.0007841928251121076, "loss": 3.7547, "step": 25420 }, { "epoch": 1.7274765593151242, "grad_norm": 1.9851317405700684, "learning_rate": 0.000784150360103275, "loss": 3.4838, "step": 25425 }, { "epoch": 1.727816279385786, "grad_norm": 2.124025821685791, "learning_rate": 0.0007841078950944423, "loss": 3.4539, "step": 25430 }, { "epoch": 1.728155999456448, "grad_norm": 2.631645441055298, "learning_rate": 0.0007840654300856095, "loss": 3.5799, "step": 25435 }, { "epoch": 1.7284957195271096, "grad_norm": 1.6689176559448242, "learning_rate": 0.0007840229650767767, "loss": 3.6137, "step": 25440 }, { "epoch": 1.7288354395977714, "grad_norm": 2.302030324935913, "learning_rate": 0.0007839805000679441, "loss": 3.4555, "step": 25445 }, { "epoch": 1.7291751596684333, "grad_norm": 2.2714622020721436, "learning_rate": 0.0007839380350591113, "loss": 3.5532, "step": 25450 }, { "epoch": 1.729514879739095, "grad_norm": 2.1682066917419434, "learning_rate": 0.0007838955700502785, "loss": 3.5842, "step": 25455 }, { "epoch": 1.7298545998097568, "grad_norm": 1.8763288259506226, "learning_rate": 0.000783853105041446, "loss": 3.7304, "step": 25460 }, { "epoch": 1.7301943198804186, "grad_norm": 2.101531744003296, "learning_rate": 0.0007838106400326132, "loss": 3.6533, "step": 25465 }, { "epoch": 1.7305340399510802, "grad_norm": 2.4480302333831787, "learning_rate": 0.0007837681750237804, "loss": 3.444, "step": 25470 }, { "epoch": 1.730873760021742, "grad_norm": 1.8923609256744385, "learning_rate": 0.0007837257100149478, "loss": 3.5835, "step": 25475 }, { "epoch": 1.731213480092404, "grad_norm": 2.1186070442199707, "learning_rate": 0.000783683245006115, "loss": 3.3301, "step": 25480 }, { "epoch": 1.7315532001630656, "grad_norm": 2.9385769367218018, "learning_rate": 0.0007836407799972822, "loss": 3.5615, "step": 25485 }, { "epoch": 1.7318929202337274, "grad_norm": 2.3886237144470215, "learning_rate": 0.0007835983149884495, "loss": 3.5531, "step": 25490 }, { "epoch": 1.7322326403043893, "grad_norm": 2.0804660320281982, "learning_rate": 0.0007835558499796169, "loss": 3.4119, "step": 25495 }, { "epoch": 1.732572360375051, "grad_norm": 2.7706289291381836, "learning_rate": 0.0007835133849707841, "loss": 3.5812, "step": 25500 }, { "epoch": 1.7329120804457128, "grad_norm": 2.1392102241516113, "learning_rate": 0.0007834709199619514, "loss": 3.587, "step": 25505 }, { "epoch": 1.7332518005163746, "grad_norm": 2.1994831562042236, "learning_rate": 0.0007834284549531187, "loss": 3.7617, "step": 25510 }, { "epoch": 1.7335915205870362, "grad_norm": 1.9923841953277588, "learning_rate": 0.0007833859899442859, "loss": 3.3861, "step": 25515 }, { "epoch": 1.733931240657698, "grad_norm": 2.163722515106201, "learning_rate": 0.0007833435249354532, "loss": 3.5678, "step": 25520 }, { "epoch": 1.73427096072836, "grad_norm": 2.275738477706909, "learning_rate": 0.0007833010599266204, "loss": 3.2533, "step": 25525 }, { "epoch": 1.7346106807990216, "grad_norm": 1.9636945724487305, "learning_rate": 0.0007832585949177878, "loss": 3.2832, "step": 25530 }, { "epoch": 1.7349504008696832, "grad_norm": 1.6645368337631226, "learning_rate": 0.0007832161299089551, "loss": 3.4052, "step": 25535 }, { "epoch": 1.7352901209403453, "grad_norm": 1.7404865026474, "learning_rate": 0.0007831736649001223, "loss": 3.4503, "step": 25540 }, { "epoch": 1.735629841011007, "grad_norm": 2.4063119888305664, "learning_rate": 0.0007831311998912896, "loss": 3.5459, "step": 25545 }, { "epoch": 1.7359695610816686, "grad_norm": 2.394749402999878, "learning_rate": 0.0007830887348824569, "loss": 3.6982, "step": 25550 }, { "epoch": 1.7363092811523306, "grad_norm": 2.070873498916626, "learning_rate": 0.0007830462698736241, "loss": 3.6282, "step": 25555 }, { "epoch": 1.7366490012229923, "grad_norm": 1.612656831741333, "learning_rate": 0.0007830038048647913, "loss": 3.5115, "step": 25560 }, { "epoch": 1.736988721293654, "grad_norm": 2.3173577785491943, "learning_rate": 0.0007829613398559588, "loss": 3.5095, "step": 25565 }, { "epoch": 1.737328441364316, "grad_norm": 2.514810562133789, "learning_rate": 0.000782918874847126, "loss": 3.2684, "step": 25570 }, { "epoch": 1.7376681614349776, "grad_norm": 2.039429187774658, "learning_rate": 0.0007828764098382932, "loss": 3.494, "step": 25575 }, { "epoch": 1.7380078815056392, "grad_norm": 2.246990919113159, "learning_rate": 0.0007828339448294606, "loss": 3.6726, "step": 25580 }, { "epoch": 1.738347601576301, "grad_norm": 2.011967182159424, "learning_rate": 0.0007827914798206278, "loss": 3.4514, "step": 25585 }, { "epoch": 1.738687321646963, "grad_norm": 2.130028486251831, "learning_rate": 0.000782749014811795, "loss": 3.5282, "step": 25590 }, { "epoch": 1.7390270417176246, "grad_norm": 2.26773738861084, "learning_rate": 0.0007827065498029624, "loss": 3.671, "step": 25595 }, { "epoch": 1.7393667617882864, "grad_norm": 2.316812038421631, "learning_rate": 0.0007826640847941297, "loss": 3.8166, "step": 25600 }, { "epoch": 1.7397064818589483, "grad_norm": 2.0839858055114746, "learning_rate": 0.0007826216197852969, "loss": 3.5762, "step": 25605 }, { "epoch": 1.74004620192961, "grad_norm": 2.0432376861572266, "learning_rate": 0.0007825791547764643, "loss": 3.54, "step": 25610 }, { "epoch": 1.7403859220002718, "grad_norm": 2.3272223472595215, "learning_rate": 0.0007825366897676315, "loss": 3.8372, "step": 25615 }, { "epoch": 1.7407256420709336, "grad_norm": 2.385831832885742, "learning_rate": 0.0007824942247587987, "loss": 3.4841, "step": 25620 }, { "epoch": 1.7410653621415952, "grad_norm": 2.056164503097534, "learning_rate": 0.000782451759749966, "loss": 3.566, "step": 25625 }, { "epoch": 1.741405082212257, "grad_norm": 1.9770021438598633, "learning_rate": 0.0007824092947411333, "loss": 3.6452, "step": 25630 }, { "epoch": 1.741744802282919, "grad_norm": 2.1251978874206543, "learning_rate": 0.0007823668297323006, "loss": 3.544, "step": 25635 }, { "epoch": 1.7420845223535806, "grad_norm": 1.8251336812973022, "learning_rate": 0.0007823243647234679, "loss": 3.3254, "step": 25640 }, { "epoch": 1.7424242424242424, "grad_norm": 2.2392590045928955, "learning_rate": 0.0007822818997146352, "loss": 3.4338, "step": 25645 }, { "epoch": 1.7427639624949043, "grad_norm": 2.3061935901641846, "learning_rate": 0.0007822394347058024, "loss": 3.4073, "step": 25650 }, { "epoch": 1.743103682565566, "grad_norm": 1.6745692491531372, "learning_rate": 0.0007821969696969697, "loss": 3.5233, "step": 25655 }, { "epoch": 1.7434434026362278, "grad_norm": 2.816922187805176, "learning_rate": 0.000782154504688137, "loss": 3.399, "step": 25660 }, { "epoch": 1.7437831227068896, "grad_norm": 2.2864468097686768, "learning_rate": 0.0007821120396793042, "loss": 3.5463, "step": 25665 }, { "epoch": 1.7441228427775513, "grad_norm": 1.773720622062683, "learning_rate": 0.0007820695746704716, "loss": 3.8341, "step": 25670 }, { "epoch": 1.744462562848213, "grad_norm": 1.9244436025619507, "learning_rate": 0.0007820271096616388, "loss": 3.4002, "step": 25675 }, { "epoch": 1.744802282918875, "grad_norm": 1.8472152948379517, "learning_rate": 0.0007819846446528061, "loss": 3.6167, "step": 25680 }, { "epoch": 1.7451420029895366, "grad_norm": 2.493680000305176, "learning_rate": 0.0007819421796439734, "loss": 3.4762, "step": 25685 }, { "epoch": 1.7454817230601984, "grad_norm": 2.1246490478515625, "learning_rate": 0.0007818997146351406, "loss": 3.5953, "step": 25690 }, { "epoch": 1.7458214431308603, "grad_norm": 1.901170253753662, "learning_rate": 0.0007818572496263079, "loss": 3.6311, "step": 25695 }, { "epoch": 1.746161163201522, "grad_norm": 1.9192404747009277, "learning_rate": 0.0007818147846174752, "loss": 3.4093, "step": 25700 }, { "epoch": 1.7465008832721836, "grad_norm": 2.2956435680389404, "learning_rate": 0.0007817723196086425, "loss": 3.7122, "step": 25705 }, { "epoch": 1.7468406033428456, "grad_norm": 2.5184059143066406, "learning_rate": 0.0007817298545998098, "loss": 3.4985, "step": 25710 }, { "epoch": 1.7471803234135073, "grad_norm": 2.669534921646118, "learning_rate": 0.0007816873895909771, "loss": 3.4815, "step": 25715 }, { "epoch": 1.747520043484169, "grad_norm": 1.8833541870117188, "learning_rate": 0.0007816449245821443, "loss": 3.5288, "step": 25720 }, { "epoch": 1.747859763554831, "grad_norm": 2.052539587020874, "learning_rate": 0.0007816024595733115, "loss": 3.4903, "step": 25725 }, { "epoch": 1.7481994836254926, "grad_norm": 1.6810287237167358, "learning_rate": 0.0007815599945644789, "loss": 3.5545, "step": 25730 }, { "epoch": 1.7485392036961542, "grad_norm": 1.7324599027633667, "learning_rate": 0.0007815175295556461, "loss": 3.2266, "step": 25735 }, { "epoch": 1.7488789237668163, "grad_norm": 1.7781243324279785, "learning_rate": 0.0007814750645468135, "loss": 3.6114, "step": 25740 }, { "epoch": 1.749218643837478, "grad_norm": 1.4304509162902832, "learning_rate": 0.0007814325995379808, "loss": 3.2214, "step": 25745 }, { "epoch": 1.7495583639081396, "grad_norm": 1.9658812284469604, "learning_rate": 0.000781390134529148, "loss": 3.6479, "step": 25750 }, { "epoch": 1.7498980839788014, "grad_norm": 2.8673791885375977, "learning_rate": 0.0007813476695203153, "loss": 3.518, "step": 25755 }, { "epoch": 1.7502378040494633, "grad_norm": 2.566286325454712, "learning_rate": 0.0007813052045114826, "loss": 3.561, "step": 25760 }, { "epoch": 1.750577524120125, "grad_norm": 1.961592674255371, "learning_rate": 0.0007812627395026498, "loss": 3.5291, "step": 25765 }, { "epoch": 1.7509172441907868, "grad_norm": 1.5602104663848877, "learning_rate": 0.0007812202744938171, "loss": 3.3317, "step": 25770 }, { "epoch": 1.7512569642614486, "grad_norm": 1.9256736040115356, "learning_rate": 0.0007811778094849844, "loss": 3.6503, "step": 25775 }, { "epoch": 1.7515966843321102, "grad_norm": 1.9631811380386353, "learning_rate": 0.0007811353444761517, "loss": 3.5555, "step": 25780 }, { "epoch": 1.751936404402772, "grad_norm": 1.9925085306167603, "learning_rate": 0.000781092879467319, "loss": 3.3448, "step": 25785 }, { "epoch": 1.752276124473434, "grad_norm": 2.00445294380188, "learning_rate": 0.0007810504144584862, "loss": 3.7801, "step": 25790 }, { "epoch": 1.7526158445440956, "grad_norm": 1.6913527250289917, "learning_rate": 0.0007810079494496535, "loss": 3.4901, "step": 25795 }, { "epoch": 1.7529555646147574, "grad_norm": 2.424006938934326, "learning_rate": 0.0007809654844408208, "loss": 3.6241, "step": 25800 }, { "epoch": 1.7532952846854193, "grad_norm": 2.6800537109375, "learning_rate": 0.000780923019431988, "loss": 3.6012, "step": 25805 }, { "epoch": 1.753635004756081, "grad_norm": 1.6786895990371704, "learning_rate": 0.0007808805544231554, "loss": 3.7703, "step": 25810 }, { "epoch": 1.7539747248267428, "grad_norm": 2.027472496032715, "learning_rate": 0.0007808380894143227, "loss": 3.7815, "step": 25815 }, { "epoch": 1.7543144448974046, "grad_norm": 2.569924831390381, "learning_rate": 0.0007807956244054899, "loss": 3.814, "step": 25820 }, { "epoch": 1.7546541649680663, "grad_norm": 2.074516773223877, "learning_rate": 0.0007807531593966571, "loss": 3.497, "step": 25825 }, { "epoch": 1.754993885038728, "grad_norm": 2.5717835426330566, "learning_rate": 0.0007807106943878245, "loss": 3.7271, "step": 25830 }, { "epoch": 1.75533360510939, "grad_norm": 2.0119094848632812, "learning_rate": 0.0007806682293789917, "loss": 3.6874, "step": 25835 }, { "epoch": 1.7556733251800516, "grad_norm": 1.7375222444534302, "learning_rate": 0.0007806257643701589, "loss": 3.5741, "step": 25840 }, { "epoch": 1.7560130452507134, "grad_norm": 2.009744644165039, "learning_rate": 0.0007805832993613264, "loss": 3.6423, "step": 25845 }, { "epoch": 1.7563527653213753, "grad_norm": 2.2322192192077637, "learning_rate": 0.0007805408343524936, "loss": 3.662, "step": 25850 }, { "epoch": 1.756692485392037, "grad_norm": 1.753854751586914, "learning_rate": 0.0007804983693436608, "loss": 3.5368, "step": 25855 }, { "epoch": 1.7570322054626988, "grad_norm": 2.2690930366516113, "learning_rate": 0.0007804559043348282, "loss": 3.6237, "step": 25860 }, { "epoch": 1.7573719255333606, "grad_norm": 2.4537532329559326, "learning_rate": 0.0007804134393259954, "loss": 3.6157, "step": 25865 }, { "epoch": 1.7577116456040223, "grad_norm": 1.5543171167373657, "learning_rate": 0.0007803709743171626, "loss": 3.3558, "step": 25870 }, { "epoch": 1.758051365674684, "grad_norm": 2.075808525085449, "learning_rate": 0.0007803285093083299, "loss": 3.3155, "step": 25875 }, { "epoch": 1.758391085745346, "grad_norm": 1.943849802017212, "learning_rate": 0.0007802860442994973, "loss": 3.3774, "step": 25880 }, { "epoch": 1.7587308058160076, "grad_norm": 1.7245413064956665, "learning_rate": 0.0007802435792906645, "loss": 3.5354, "step": 25885 }, { "epoch": 1.7590705258866692, "grad_norm": 2.0178287029266357, "learning_rate": 0.0007802011142818318, "loss": 3.3812, "step": 25890 }, { "epoch": 1.7594102459573313, "grad_norm": 1.6917328834533691, "learning_rate": 0.0007801586492729991, "loss": 3.5287, "step": 25895 }, { "epoch": 1.759749966027993, "grad_norm": 1.872955083847046, "learning_rate": 0.0007801161842641663, "loss": 3.1713, "step": 25900 }, { "epoch": 1.7600896860986546, "grad_norm": 2.5121910572052, "learning_rate": 0.0007800737192553336, "loss": 3.5365, "step": 25905 }, { "epoch": 1.7604294061693166, "grad_norm": 1.9650663137435913, "learning_rate": 0.0007800312542465008, "loss": 3.665, "step": 25910 }, { "epoch": 1.7607691262399783, "grad_norm": 1.7023468017578125, "learning_rate": 0.0007799887892376682, "loss": 3.5291, "step": 25915 }, { "epoch": 1.76110884631064, "grad_norm": 1.743595004081726, "learning_rate": 0.0007799463242288355, "loss": 3.4354, "step": 25920 }, { "epoch": 1.7614485663813018, "grad_norm": 2.190004348754883, "learning_rate": 0.0007799038592200027, "loss": 3.5121, "step": 25925 }, { "epoch": 1.7617882864519636, "grad_norm": 1.6248581409454346, "learning_rate": 0.00077986139421117, "loss": 3.5279, "step": 25930 }, { "epoch": 1.7621280065226252, "grad_norm": 2.5460824966430664, "learning_rate": 0.0007798189292023373, "loss": 3.6303, "step": 25935 }, { "epoch": 1.762467726593287, "grad_norm": 2.1860454082489014, "learning_rate": 0.0007797764641935045, "loss": 3.4659, "step": 25940 }, { "epoch": 1.762807446663949, "grad_norm": 1.6352239847183228, "learning_rate": 0.0007797339991846718, "loss": 3.8149, "step": 25945 }, { "epoch": 1.7631471667346106, "grad_norm": 2.4146857261657715, "learning_rate": 0.0007796915341758392, "loss": 3.4887, "step": 25950 }, { "epoch": 1.7634868868052724, "grad_norm": 1.9181498289108276, "learning_rate": 0.0007796490691670064, "loss": 3.7165, "step": 25955 }, { "epoch": 1.7638266068759343, "grad_norm": 1.8046187162399292, "learning_rate": 0.0007796066041581736, "loss": 3.6961, "step": 25960 }, { "epoch": 1.764166326946596, "grad_norm": 1.9721488952636719, "learning_rate": 0.000779564139149341, "loss": 3.4873, "step": 25965 }, { "epoch": 1.7645060470172578, "grad_norm": 1.829228162765503, "learning_rate": 0.0007795216741405082, "loss": 3.787, "step": 25970 }, { "epoch": 1.7648457670879196, "grad_norm": 1.423202395439148, "learning_rate": 0.0007794792091316754, "loss": 3.5927, "step": 25975 }, { "epoch": 1.7651854871585813, "grad_norm": 1.4817588329315186, "learning_rate": 0.0007794367441228429, "loss": 3.4388, "step": 25980 }, { "epoch": 1.765525207229243, "grad_norm": 2.4095919132232666, "learning_rate": 0.0007793942791140101, "loss": 3.4155, "step": 25985 }, { "epoch": 1.765864927299905, "grad_norm": 1.940173625946045, "learning_rate": 0.0007793518141051773, "loss": 3.5378, "step": 25990 }, { "epoch": 1.7662046473705666, "grad_norm": 2.0696847438812256, "learning_rate": 0.0007793093490963447, "loss": 3.8348, "step": 25995 }, { "epoch": 1.7665443674412284, "grad_norm": 1.7478995323181152, "learning_rate": 0.0007792668840875119, "loss": 3.6273, "step": 26000 }, { "epoch": 1.7668840875118903, "grad_norm": 1.6707974672317505, "learning_rate": 0.0007792244190786791, "loss": 3.4827, "step": 26005 }, { "epoch": 1.767223807582552, "grad_norm": 2.1016242504119873, "learning_rate": 0.0007791819540698464, "loss": 3.6757, "step": 26010 }, { "epoch": 1.7675635276532138, "grad_norm": 1.6731044054031372, "learning_rate": 0.0007791394890610138, "loss": 3.5592, "step": 26015 }, { "epoch": 1.7679032477238756, "grad_norm": 2.114224910736084, "learning_rate": 0.000779097024052181, "loss": 3.4568, "step": 26020 }, { "epoch": 1.7682429677945373, "grad_norm": 1.8154431581497192, "learning_rate": 0.0007790545590433483, "loss": 3.2866, "step": 26025 }, { "epoch": 1.7685826878651991, "grad_norm": 2.029017448425293, "learning_rate": 0.0007790120940345156, "loss": 3.4401, "step": 26030 }, { "epoch": 1.768922407935861, "grad_norm": 1.8870606422424316, "learning_rate": 0.0007789696290256828, "loss": 3.6299, "step": 26035 }, { "epoch": 1.7692621280065226, "grad_norm": 1.9421077966690063, "learning_rate": 0.0007789271640168501, "loss": 3.6468, "step": 26040 }, { "epoch": 1.7696018480771842, "grad_norm": 2.1602416038513184, "learning_rate": 0.0007788846990080174, "loss": 3.5474, "step": 26045 }, { "epoch": 1.7699415681478463, "grad_norm": 2.4455490112304688, "learning_rate": 0.0007788422339991847, "loss": 3.6744, "step": 26050 }, { "epoch": 1.770281288218508, "grad_norm": 1.9118095636367798, "learning_rate": 0.000778799768990352, "loss": 3.3837, "step": 26055 }, { "epoch": 1.7706210082891696, "grad_norm": 2.2585806846618652, "learning_rate": 0.0007787573039815192, "loss": 3.4998, "step": 26060 }, { "epoch": 1.7709607283598316, "grad_norm": 1.9654114246368408, "learning_rate": 0.0007787148389726865, "loss": 3.5637, "step": 26065 }, { "epoch": 1.7713004484304933, "grad_norm": 2.0795469284057617, "learning_rate": 0.0007786723739638538, "loss": 3.3196, "step": 26070 }, { "epoch": 1.771640168501155, "grad_norm": 2.022200345993042, "learning_rate": 0.000778629908955021, "loss": 3.3278, "step": 26075 }, { "epoch": 1.771979888571817, "grad_norm": 2.1321754455566406, "learning_rate": 0.0007785874439461884, "loss": 3.726, "step": 26080 }, { "epoch": 1.7723196086424786, "grad_norm": 1.819328784942627, "learning_rate": 0.0007785449789373557, "loss": 3.4207, "step": 26085 }, { "epoch": 1.7726593287131402, "grad_norm": 1.8266879320144653, "learning_rate": 0.0007785025139285229, "loss": 3.5667, "step": 26090 }, { "epoch": 1.772999048783802, "grad_norm": 2.51706600189209, "learning_rate": 0.0007784600489196903, "loss": 3.318, "step": 26095 }, { "epoch": 1.773338768854464, "grad_norm": 2.1409049034118652, "learning_rate": 0.0007784175839108575, "loss": 3.422, "step": 26100 }, { "epoch": 1.7736784889251256, "grad_norm": 1.3352044820785522, "learning_rate": 0.0007783751189020247, "loss": 3.4706, "step": 26105 }, { "epoch": 1.7740182089957874, "grad_norm": 2.3013339042663574, "learning_rate": 0.000778332653893192, "loss": 3.2753, "step": 26110 }, { "epoch": 1.7743579290664493, "grad_norm": 2.1084585189819336, "learning_rate": 0.0007782901888843593, "loss": 3.5983, "step": 26115 }, { "epoch": 1.774697649137111, "grad_norm": 2.176020383834839, "learning_rate": 0.0007782477238755266, "loss": 3.69, "step": 26120 }, { "epoch": 1.7750373692077728, "grad_norm": 1.8545968532562256, "learning_rate": 0.0007782052588666939, "loss": 3.5559, "step": 26125 }, { "epoch": 1.7753770892784346, "grad_norm": 1.926684856414795, "learning_rate": 0.0007781627938578612, "loss": 3.5023, "step": 26130 }, { "epoch": 1.7757168093490963, "grad_norm": 2.1261489391326904, "learning_rate": 0.0007781203288490284, "loss": 3.5448, "step": 26135 }, { "epoch": 1.7760565294197581, "grad_norm": 1.709985375404358, "learning_rate": 0.0007780778638401957, "loss": 3.4981, "step": 26140 }, { "epoch": 1.77639624949042, "grad_norm": 2.2492048740386963, "learning_rate": 0.000778035398831363, "loss": 3.4982, "step": 26145 }, { "epoch": 1.7767359695610816, "grad_norm": 2.1797876358032227, "learning_rate": 0.0007779929338225302, "loss": 3.3939, "step": 26150 }, { "epoch": 1.7770756896317434, "grad_norm": 1.9279884099960327, "learning_rate": 0.0007779504688136976, "loss": 3.3608, "step": 26155 }, { "epoch": 1.7774154097024053, "grad_norm": 1.7969236373901367, "learning_rate": 0.0007779080038048648, "loss": 3.5341, "step": 26160 }, { "epoch": 1.777755129773067, "grad_norm": 2.2409725189208984, "learning_rate": 0.0007778655387960321, "loss": 3.6467, "step": 26165 }, { "epoch": 1.7780948498437288, "grad_norm": 2.58180570602417, "learning_rate": 0.0007778230737871994, "loss": 3.6247, "step": 26170 }, { "epoch": 1.7784345699143906, "grad_norm": 2.200249195098877, "learning_rate": 0.0007777806087783666, "loss": 3.3335, "step": 26175 }, { "epoch": 1.7787742899850523, "grad_norm": 1.7941057682037354, "learning_rate": 0.0007777381437695339, "loss": 3.7952, "step": 26180 }, { "epoch": 1.7791140100557141, "grad_norm": 1.6263114213943481, "learning_rate": 0.0007776956787607012, "loss": 3.5684, "step": 26185 }, { "epoch": 1.779453730126376, "grad_norm": 2.1227617263793945, "learning_rate": 0.0007776532137518685, "loss": 3.4023, "step": 26190 }, { "epoch": 1.7797934501970376, "grad_norm": 1.95046067237854, "learning_rate": 0.0007776107487430358, "loss": 3.3097, "step": 26195 }, { "epoch": 1.7801331702676995, "grad_norm": 2.3608949184417725, "learning_rate": 0.0007775682837342031, "loss": 3.3665, "step": 26200 }, { "epoch": 1.7804728903383613, "grad_norm": 1.905358076095581, "learning_rate": 0.0007775258187253703, "loss": 3.481, "step": 26205 }, { "epoch": 1.780812610409023, "grad_norm": 2.160761833190918, "learning_rate": 0.0007774833537165375, "loss": 3.4946, "step": 26210 }, { "epoch": 1.7811523304796846, "grad_norm": 2.1533117294311523, "learning_rate": 0.0007774408887077049, "loss": 3.6241, "step": 26215 }, { "epoch": 1.7814920505503467, "grad_norm": 1.8992141485214233, "learning_rate": 0.0007773984236988721, "loss": 3.582, "step": 26220 }, { "epoch": 1.7818317706210083, "grad_norm": 1.5159318447113037, "learning_rate": 0.0007773559586900394, "loss": 3.5748, "step": 26225 }, { "epoch": 1.78217149069167, "grad_norm": 1.809570074081421, "learning_rate": 0.0007773134936812068, "loss": 3.6901, "step": 26230 }, { "epoch": 1.782511210762332, "grad_norm": 1.6238775253295898, "learning_rate": 0.000777271028672374, "loss": 3.5862, "step": 26235 }, { "epoch": 1.7828509308329936, "grad_norm": 2.188758134841919, "learning_rate": 0.0007772285636635412, "loss": 3.5534, "step": 26240 }, { "epoch": 1.7831906509036552, "grad_norm": 2.0384597778320312, "learning_rate": 0.0007771860986547086, "loss": 3.6095, "step": 26245 }, { "epoch": 1.7835303709743173, "grad_norm": 2.01759672164917, "learning_rate": 0.0007771436336458758, "loss": 3.771, "step": 26250 }, { "epoch": 1.783870091044979, "grad_norm": 1.6717607975006104, "learning_rate": 0.000777101168637043, "loss": 3.4675, "step": 26255 }, { "epoch": 1.7842098111156406, "grad_norm": 2.2479536533355713, "learning_rate": 0.0007770587036282105, "loss": 3.5564, "step": 26260 }, { "epoch": 1.7845495311863024, "grad_norm": 1.9271337985992432, "learning_rate": 0.0007770162386193777, "loss": 3.4925, "step": 26265 }, { "epoch": 1.7848892512569643, "grad_norm": 1.5457578897476196, "learning_rate": 0.0007769737736105449, "loss": 3.5943, "step": 26270 }, { "epoch": 1.785228971327626, "grad_norm": 2.181164026260376, "learning_rate": 0.0007769313086017122, "loss": 3.519, "step": 26275 }, { "epoch": 1.7855686913982878, "grad_norm": 1.6721975803375244, "learning_rate": 0.0007768888435928795, "loss": 3.6089, "step": 26280 }, { "epoch": 1.7859084114689496, "grad_norm": 2.0259549617767334, "learning_rate": 0.0007768463785840467, "loss": 3.6314, "step": 26285 }, { "epoch": 1.7862481315396113, "grad_norm": 1.7239779233932495, "learning_rate": 0.000776803913575214, "loss": 3.602, "step": 26290 }, { "epoch": 1.7865878516102731, "grad_norm": 1.968583106994629, "learning_rate": 0.0007767614485663814, "loss": 3.6238, "step": 26295 }, { "epoch": 1.786927571680935, "grad_norm": 1.5892367362976074, "learning_rate": 0.0007767189835575486, "loss": 3.4343, "step": 26300 }, { "epoch": 1.7872672917515966, "grad_norm": 1.670674204826355, "learning_rate": 0.0007766765185487159, "loss": 3.438, "step": 26305 }, { "epoch": 1.7876070118222585, "grad_norm": 2.052812337875366, "learning_rate": 0.0007766340535398831, "loss": 3.5646, "step": 26310 }, { "epoch": 1.7879467318929203, "grad_norm": 1.9142816066741943, "learning_rate": 0.0007765915885310504, "loss": 3.6525, "step": 26315 }, { "epoch": 1.788286451963582, "grad_norm": 1.6414393186569214, "learning_rate": 0.0007765491235222177, "loss": 3.3864, "step": 26320 }, { "epoch": 1.7886261720342438, "grad_norm": 1.8164596557617188, "learning_rate": 0.0007765066585133849, "loss": 3.3939, "step": 26325 }, { "epoch": 1.7889658921049056, "grad_norm": 2.1442770957946777, "learning_rate": 0.0007764641935045523, "loss": 3.5852, "step": 26330 }, { "epoch": 1.7893056121755673, "grad_norm": 3.1066031455993652, "learning_rate": 0.0007764217284957196, "loss": 3.4292, "step": 26335 }, { "epoch": 1.7896453322462291, "grad_norm": 2.0107531547546387, "learning_rate": 0.0007763792634868868, "loss": 3.6433, "step": 26340 }, { "epoch": 1.789985052316891, "grad_norm": 1.755921721458435, "learning_rate": 0.000776336798478054, "loss": 3.5903, "step": 26345 }, { "epoch": 1.7903247723875526, "grad_norm": 2.1705541610717773, "learning_rate": 0.0007762943334692214, "loss": 3.9216, "step": 26350 }, { "epoch": 1.7906644924582145, "grad_norm": 1.8980458974838257, "learning_rate": 0.0007762518684603886, "loss": 3.727, "step": 26355 }, { "epoch": 1.7910042125288763, "grad_norm": 1.804860234260559, "learning_rate": 0.0007762094034515558, "loss": 3.5356, "step": 26360 }, { "epoch": 1.791343932599538, "grad_norm": 1.9174402952194214, "learning_rate": 0.0007761669384427233, "loss": 3.6157, "step": 26365 }, { "epoch": 1.7916836526701998, "grad_norm": 2.1306474208831787, "learning_rate": 0.0007761244734338905, "loss": 3.644, "step": 26370 }, { "epoch": 1.7920233727408617, "grad_norm": 1.8213073015213013, "learning_rate": 0.0007760820084250577, "loss": 3.5562, "step": 26375 }, { "epoch": 1.7923630928115233, "grad_norm": 1.85281240940094, "learning_rate": 0.0007760395434162251, "loss": 3.7924, "step": 26380 }, { "epoch": 1.792702812882185, "grad_norm": 1.8423385620117188, "learning_rate": 0.0007759970784073923, "loss": 3.6621, "step": 26385 }, { "epoch": 1.793042532952847, "grad_norm": 2.2908997535705566, "learning_rate": 0.0007759546133985595, "loss": 3.6135, "step": 26390 }, { "epoch": 1.7933822530235086, "grad_norm": 1.619513750076294, "learning_rate": 0.0007759121483897269, "loss": 3.7507, "step": 26395 }, { "epoch": 1.7937219730941703, "grad_norm": 2.0022573471069336, "learning_rate": 0.0007758696833808942, "loss": 3.4875, "step": 26400 }, { "epoch": 1.7940616931648323, "grad_norm": 1.7330870628356934, "learning_rate": 0.0007758272183720614, "loss": 3.5326, "step": 26405 }, { "epoch": 1.794401413235494, "grad_norm": 1.8657814264297485, "learning_rate": 0.0007757847533632287, "loss": 3.4744, "step": 26410 }, { "epoch": 1.7947411333061556, "grad_norm": 1.419620156288147, "learning_rate": 0.000775742288354396, "loss": 3.5162, "step": 26415 }, { "epoch": 1.7950808533768177, "grad_norm": 1.6969765424728394, "learning_rate": 0.0007756998233455633, "loss": 3.3494, "step": 26420 }, { "epoch": 1.7954205734474793, "grad_norm": 2.2015933990478516, "learning_rate": 0.0007756573583367305, "loss": 3.4086, "step": 26425 }, { "epoch": 1.795760293518141, "grad_norm": 2.3158888816833496, "learning_rate": 0.0007756148933278978, "loss": 3.502, "step": 26430 }, { "epoch": 1.7961000135888028, "grad_norm": 2.35949444770813, "learning_rate": 0.0007755724283190652, "loss": 3.3292, "step": 26435 }, { "epoch": 1.7964397336594646, "grad_norm": 2.6258463859558105, "learning_rate": 0.0007755299633102324, "loss": 3.7442, "step": 26440 }, { "epoch": 1.7967794537301263, "grad_norm": 1.5349769592285156, "learning_rate": 0.0007754874983013997, "loss": 3.3791, "step": 26445 }, { "epoch": 1.7971191738007881, "grad_norm": 1.8676457405090332, "learning_rate": 0.000775445033292567, "loss": 3.2784, "step": 26450 }, { "epoch": 1.79745889387145, "grad_norm": 2.4037303924560547, "learning_rate": 0.0007754025682837342, "loss": 3.5543, "step": 26455 }, { "epoch": 1.7977986139421116, "grad_norm": 2.2450597286224365, "learning_rate": 0.0007753601032749014, "loss": 3.6474, "step": 26460 }, { "epoch": 1.7981383340127735, "grad_norm": 1.6200833320617676, "learning_rate": 0.0007753176382660688, "loss": 3.4757, "step": 26465 }, { "epoch": 1.7984780540834353, "grad_norm": 2.0388193130493164, "learning_rate": 0.0007752751732572361, "loss": 3.5496, "step": 26470 }, { "epoch": 1.798817774154097, "grad_norm": 2.219073534011841, "learning_rate": 0.0007752327082484033, "loss": 3.5543, "step": 26475 }, { "epoch": 1.7991574942247588, "grad_norm": 1.8688132762908936, "learning_rate": 0.0007751902432395707, "loss": 3.6571, "step": 26480 }, { "epoch": 1.7994972142954206, "grad_norm": 13.360969543457031, "learning_rate": 0.0007751477782307379, "loss": 3.5004, "step": 26485 }, { "epoch": 1.7998369343660823, "grad_norm": 2.197066068649292, "learning_rate": 0.0007751053132219051, "loss": 3.5209, "step": 26490 }, { "epoch": 1.8001766544367441, "grad_norm": 1.722739338874817, "learning_rate": 0.0007750628482130725, "loss": 3.5961, "step": 26495 }, { "epoch": 1.800516374507406, "grad_norm": 1.7826082706451416, "learning_rate": 0.0007750203832042397, "loss": 3.5505, "step": 26500 }, { "epoch": 1.8008560945780676, "grad_norm": 2.6572790145874023, "learning_rate": 0.000774977918195407, "loss": 3.4157, "step": 26505 }, { "epoch": 1.8011958146487295, "grad_norm": 1.9238001108169556, "learning_rate": 0.0007749354531865743, "loss": 3.8164, "step": 26510 }, { "epoch": 1.8015355347193913, "grad_norm": 2.1414449214935303, "learning_rate": 0.0007748929881777416, "loss": 3.6422, "step": 26515 }, { "epoch": 1.801875254790053, "grad_norm": 1.8275905847549438, "learning_rate": 0.0007748505231689088, "loss": 3.2778, "step": 26520 }, { "epoch": 1.8022149748607148, "grad_norm": 1.868326187133789, "learning_rate": 0.0007748080581600761, "loss": 3.7495, "step": 26525 }, { "epoch": 1.8025546949313767, "grad_norm": 1.7701306343078613, "learning_rate": 0.0007747655931512434, "loss": 3.4832, "step": 26530 }, { "epoch": 1.8028944150020383, "grad_norm": 2.1704888343811035, "learning_rate": 0.0007747231281424106, "loss": 3.7554, "step": 26535 }, { "epoch": 1.8032341350727001, "grad_norm": 2.001795768737793, "learning_rate": 0.000774680663133578, "loss": 3.6137, "step": 26540 }, { "epoch": 1.803573855143362, "grad_norm": 2.037165403366089, "learning_rate": 0.0007746381981247453, "loss": 3.5933, "step": 26545 }, { "epoch": 1.8039135752140236, "grad_norm": 2.3130648136138916, "learning_rate": 0.000774604226117679, "loss": 3.5271, "step": 26550 }, { "epoch": 1.8042532952846853, "grad_norm": 1.870412826538086, "learning_rate": 0.0007745617611088463, "loss": 3.7473, "step": 26555 }, { "epoch": 1.8045930153553473, "grad_norm": 2.2626895904541016, "learning_rate": 0.0007745192961000136, "loss": 3.8117, "step": 26560 }, { "epoch": 1.804932735426009, "grad_norm": 1.5938310623168945, "learning_rate": 0.0007744768310911809, "loss": 3.5161, "step": 26565 }, { "epoch": 1.8052724554966706, "grad_norm": 2.0119411945343018, "learning_rate": 0.0007744343660823482, "loss": 3.6142, "step": 26570 }, { "epoch": 1.8056121755673327, "grad_norm": 2.1774544715881348, "learning_rate": 0.0007743919010735154, "loss": 3.4081, "step": 26575 }, { "epoch": 1.8059518956379943, "grad_norm": 1.8998535871505737, "learning_rate": 0.0007743494360646826, "loss": 3.4628, "step": 26580 }, { "epoch": 1.806291615708656, "grad_norm": 1.8271706104278564, "learning_rate": 0.00077430697105585, "loss": 3.5006, "step": 26585 }, { "epoch": 1.806631335779318, "grad_norm": 1.8590805530548096, "learning_rate": 0.0007742645060470172, "loss": 3.5556, "step": 26590 }, { "epoch": 1.8069710558499796, "grad_norm": 1.9832192659378052, "learning_rate": 0.0007742220410381845, "loss": 3.6576, "step": 26595 }, { "epoch": 1.8073107759206413, "grad_norm": 2.07606840133667, "learning_rate": 0.0007741795760293519, "loss": 3.6655, "step": 26600 }, { "epoch": 1.8076504959913031, "grad_norm": 3.935300350189209, "learning_rate": 0.0007741371110205191, "loss": 3.5347, "step": 26605 }, { "epoch": 1.807990216061965, "grad_norm": 1.642800211906433, "learning_rate": 0.0007740946460116863, "loss": 3.6206, "step": 26610 }, { "epoch": 1.8083299361326266, "grad_norm": 1.5357545614242554, "learning_rate": 0.0007740521810028537, "loss": 3.5107, "step": 26615 }, { "epoch": 1.8086696562032885, "grad_norm": 1.663451910018921, "learning_rate": 0.0007740097159940209, "loss": 3.5164, "step": 26620 }, { "epoch": 1.8090093762739503, "grad_norm": 2.083974838256836, "learning_rate": 0.0007739672509851882, "loss": 3.4647, "step": 26625 }, { "epoch": 1.809349096344612, "grad_norm": 1.7136940956115723, "learning_rate": 0.0007739247859763556, "loss": 3.5957, "step": 26630 }, { "epoch": 1.8096888164152738, "grad_norm": 1.597852349281311, "learning_rate": 0.0007738823209675228, "loss": 3.8073, "step": 26635 }, { "epoch": 1.8100285364859356, "grad_norm": 2.002103328704834, "learning_rate": 0.0007738398559586901, "loss": 3.519, "step": 26640 }, { "epoch": 1.8103682565565973, "grad_norm": 1.6205568313598633, "learning_rate": 0.0007737973909498573, "loss": 3.554, "step": 26645 }, { "epoch": 1.8107079766272591, "grad_norm": 1.8017230033874512, "learning_rate": 0.0007737549259410246, "loss": 3.6705, "step": 26650 }, { "epoch": 1.811047696697921, "grad_norm": 1.9550813436508179, "learning_rate": 0.0007737124609321919, "loss": 3.6249, "step": 26655 }, { "epoch": 1.8113874167685826, "grad_norm": 1.7798559665679932, "learning_rate": 0.0007736699959233591, "loss": 3.5029, "step": 26660 }, { "epoch": 1.8117271368392445, "grad_norm": 1.6845568418502808, "learning_rate": 0.0007736275309145265, "loss": 3.4855, "step": 26665 }, { "epoch": 1.8120668569099063, "grad_norm": 2.0391294956207275, "learning_rate": 0.0007735850659056938, "loss": 3.4365, "step": 26670 }, { "epoch": 1.812406576980568, "grad_norm": 2.2894668579101562, "learning_rate": 0.000773542600896861, "loss": 3.5215, "step": 26675 }, { "epoch": 1.8127462970512298, "grad_norm": 1.8296414613723755, "learning_rate": 0.0007735001358880283, "loss": 3.4136, "step": 26680 }, { "epoch": 1.8130860171218917, "grad_norm": 1.8252779245376587, "learning_rate": 0.0007734576708791956, "loss": 3.5723, "step": 26685 }, { "epoch": 1.8134257371925533, "grad_norm": 1.9877376556396484, "learning_rate": 0.0007734152058703628, "loss": 3.4358, "step": 26690 }, { "epoch": 1.8137654572632151, "grad_norm": 2.2084403038024902, "learning_rate": 0.00077337274086153, "loss": 3.8708, "step": 26695 }, { "epoch": 1.814105177333877, "grad_norm": 1.5048854351043701, "learning_rate": 0.0007733302758526975, "loss": 3.6436, "step": 26700 }, { "epoch": 1.8144448974045386, "grad_norm": 1.9105616807937622, "learning_rate": 0.0007732878108438647, "loss": 3.3476, "step": 26705 }, { "epoch": 1.8147846174752005, "grad_norm": 1.6426924467086792, "learning_rate": 0.0007732453458350319, "loss": 3.6528, "step": 26710 }, { "epoch": 1.8151243375458623, "grad_norm": 2.1946346759796143, "learning_rate": 0.0007732028808261993, "loss": 3.5105, "step": 26715 }, { "epoch": 1.815464057616524, "grad_norm": 2.1869330406188965, "learning_rate": 0.0007731604158173665, "loss": 3.5671, "step": 26720 }, { "epoch": 1.8158037776871856, "grad_norm": 1.642791509628296, "learning_rate": 0.0007731179508085337, "loss": 3.51, "step": 26725 }, { "epoch": 1.8161434977578477, "grad_norm": 2.130632162094116, "learning_rate": 0.0007730754857997012, "loss": 3.6814, "step": 26730 }, { "epoch": 1.8164832178285093, "grad_norm": 2.047659397125244, "learning_rate": 0.0007730330207908684, "loss": 3.7334, "step": 26735 }, { "epoch": 1.816822937899171, "grad_norm": 1.7286359071731567, "learning_rate": 0.0007729905557820356, "loss": 3.2783, "step": 26740 }, { "epoch": 1.817162657969833, "grad_norm": 1.8128468990325928, "learning_rate": 0.000772948090773203, "loss": 3.6948, "step": 26745 }, { "epoch": 1.8175023780404946, "grad_norm": 2.935516119003296, "learning_rate": 0.0007729056257643702, "loss": 3.7413, "step": 26750 }, { "epoch": 1.8178420981111563, "grad_norm": 1.3637845516204834, "learning_rate": 0.0007728631607555374, "loss": 3.5885, "step": 26755 }, { "epoch": 1.8181818181818183, "grad_norm": 2.715554714202881, "learning_rate": 0.0007728206957467047, "loss": 3.4971, "step": 26760 }, { "epoch": 1.81852153825248, "grad_norm": 1.870681881904602, "learning_rate": 0.0007727782307378721, "loss": 3.348, "step": 26765 }, { "epoch": 1.8188612583231416, "grad_norm": 2.277263641357422, "learning_rate": 0.0007727357657290393, "loss": 3.561, "step": 26770 }, { "epoch": 1.8192009783938035, "grad_norm": 2.7550673484802246, "learning_rate": 0.0007726933007202066, "loss": 3.7039, "step": 26775 }, { "epoch": 1.8195406984644653, "grad_norm": 2.6252920627593994, "learning_rate": 0.0007726508357113739, "loss": 3.4417, "step": 26780 }, { "epoch": 1.819880418535127, "grad_norm": 1.8669710159301758, "learning_rate": 0.0007726083707025411, "loss": 3.543, "step": 26785 }, { "epoch": 1.8202201386057888, "grad_norm": 2.4262843132019043, "learning_rate": 0.0007725659056937084, "loss": 3.4297, "step": 26790 }, { "epoch": 1.8205598586764506, "grad_norm": 2.0202643871307373, "learning_rate": 0.0007725234406848756, "loss": 3.4709, "step": 26795 }, { "epoch": 1.8208995787471123, "grad_norm": 1.9643136262893677, "learning_rate": 0.000772480975676043, "loss": 3.6387, "step": 26800 }, { "epoch": 1.8212392988177741, "grad_norm": 1.9489153623580933, "learning_rate": 0.0007724385106672103, "loss": 3.7276, "step": 26805 }, { "epoch": 1.821579018888436, "grad_norm": 2.5298564434051514, "learning_rate": 0.0007723960456583775, "loss": 3.5993, "step": 26810 }, { "epoch": 1.8219187389590976, "grad_norm": 1.9682644605636597, "learning_rate": 0.0007723535806495448, "loss": 3.3999, "step": 26815 }, { "epoch": 1.8222584590297595, "grad_norm": 1.7642608880996704, "learning_rate": 0.0007723111156407121, "loss": 3.3848, "step": 26820 }, { "epoch": 1.8225981791004213, "grad_norm": 2.2424845695495605, "learning_rate": 0.0007722686506318793, "loss": 3.4067, "step": 26825 }, { "epoch": 1.822937899171083, "grad_norm": 1.8435527086257935, "learning_rate": 0.0007722261856230465, "loss": 3.475, "step": 26830 }, { "epoch": 1.8232776192417448, "grad_norm": 1.8805272579193115, "learning_rate": 0.000772183720614214, "loss": 3.5128, "step": 26835 }, { "epoch": 1.8236173393124067, "grad_norm": 1.8232485055923462, "learning_rate": 0.0007721412556053812, "loss": 3.5865, "step": 26840 }, { "epoch": 1.8239570593830683, "grad_norm": 2.038296937942505, "learning_rate": 0.0007720987905965484, "loss": 3.5617, "step": 26845 }, { "epoch": 1.8242967794537301, "grad_norm": 1.7328726053237915, "learning_rate": 0.0007720563255877158, "loss": 3.6425, "step": 26850 }, { "epoch": 1.824636499524392, "grad_norm": 2.028968334197998, "learning_rate": 0.000772013860578883, "loss": 3.5859, "step": 26855 }, { "epoch": 1.8249762195950536, "grad_norm": 3.1723592281341553, "learning_rate": 0.0007719713955700502, "loss": 3.3841, "step": 26860 }, { "epoch": 1.8253159396657155, "grad_norm": 1.7322545051574707, "learning_rate": 0.0007719289305612176, "loss": 3.5809, "step": 26865 }, { "epoch": 1.8256556597363773, "grad_norm": 2.086751699447632, "learning_rate": 0.0007718864655523849, "loss": 3.6896, "step": 26870 }, { "epoch": 1.825995379807039, "grad_norm": 2.306055784225464, "learning_rate": 0.0007718440005435521, "loss": 3.9815, "step": 26875 }, { "epoch": 1.8263350998777008, "grad_norm": 1.8198786973953247, "learning_rate": 0.0007718015355347195, "loss": 3.5006, "step": 26880 }, { "epoch": 1.8266748199483627, "grad_norm": 1.8263912200927734, "learning_rate": 0.0007717590705258867, "loss": 3.4543, "step": 26885 }, { "epoch": 1.8270145400190243, "grad_norm": 2.7946507930755615, "learning_rate": 0.0007717166055170539, "loss": 3.5697, "step": 26890 }, { "epoch": 1.827354260089686, "grad_norm": 1.7723100185394287, "learning_rate": 0.0007716741405082212, "loss": 3.5387, "step": 26895 }, { "epoch": 1.827693980160348, "grad_norm": 2.3368453979492188, "learning_rate": 0.0007716316754993885, "loss": 3.3813, "step": 26900 }, { "epoch": 1.8280337002310096, "grad_norm": 2.7844338417053223, "learning_rate": 0.0007715892104905558, "loss": 3.5561, "step": 26905 }, { "epoch": 1.8283734203016713, "grad_norm": 1.8231568336486816, "learning_rate": 0.0007715467454817231, "loss": 3.4612, "step": 26910 }, { "epoch": 1.8287131403723333, "grad_norm": 2.145444631576538, "learning_rate": 0.0007715042804728904, "loss": 3.6913, "step": 26915 }, { "epoch": 1.829052860442995, "grad_norm": 2.0792880058288574, "learning_rate": 0.0007714618154640576, "loss": 3.4731, "step": 26920 }, { "epoch": 1.8293925805136566, "grad_norm": 2.8540120124816895, "learning_rate": 0.0007714193504552249, "loss": 3.4525, "step": 26925 }, { "epoch": 1.8297323005843187, "grad_norm": 1.8824135065078735, "learning_rate": 0.0007713768854463921, "loss": 3.5986, "step": 26930 }, { "epoch": 1.8300720206549803, "grad_norm": 1.4424251317977905, "learning_rate": 0.0007713344204375594, "loss": 3.5849, "step": 26935 }, { "epoch": 1.830411740725642, "grad_norm": 2.076561450958252, "learning_rate": 0.0007712919554287268, "loss": 3.5256, "step": 26940 }, { "epoch": 1.8307514607963038, "grad_norm": 1.7819149494171143, "learning_rate": 0.000771249490419894, "loss": 3.3982, "step": 26945 }, { "epoch": 1.8310911808669657, "grad_norm": 2.4246277809143066, "learning_rate": 0.0007712070254110613, "loss": 3.229, "step": 26950 }, { "epoch": 1.8314309009376273, "grad_norm": 2.788156747817993, "learning_rate": 0.0007711645604022286, "loss": 3.5673, "step": 26955 }, { "epoch": 1.8317706210082891, "grad_norm": 1.5822327136993408, "learning_rate": 0.0007711220953933958, "loss": 3.5349, "step": 26960 }, { "epoch": 1.832110341078951, "grad_norm": 1.952601432800293, "learning_rate": 0.0007710796303845632, "loss": 3.5184, "step": 26965 }, { "epoch": 1.8324500611496126, "grad_norm": 1.9271857738494873, "learning_rate": 0.0007710371653757304, "loss": 3.6024, "step": 26970 }, { "epoch": 1.8327897812202745, "grad_norm": 2.0258584022521973, "learning_rate": 0.0007709947003668977, "loss": 3.8119, "step": 26975 }, { "epoch": 1.8331295012909363, "grad_norm": 1.5896430015563965, "learning_rate": 0.0007709522353580651, "loss": 3.5529, "step": 26980 }, { "epoch": 1.833469221361598, "grad_norm": 1.99948251247406, "learning_rate": 0.0007709097703492323, "loss": 3.52, "step": 26985 }, { "epoch": 1.8338089414322598, "grad_norm": 2.042479991912842, "learning_rate": 0.0007708673053403995, "loss": 3.4517, "step": 26990 }, { "epoch": 1.8341486615029217, "grad_norm": 1.7131555080413818, "learning_rate": 0.0007708248403315668, "loss": 3.2922, "step": 26995 }, { "epoch": 1.8344883815735833, "grad_norm": 1.7944613695144653, "learning_rate": 0.0007707823753227341, "loss": 3.6372, "step": 27000 }, { "epoch": 1.8348281016442451, "grad_norm": 2.3093106746673584, "learning_rate": 0.0007707399103139013, "loss": 3.3783, "step": 27005 }, { "epoch": 1.835167821714907, "grad_norm": 2.325122356414795, "learning_rate": 0.0007706974453050687, "loss": 3.3643, "step": 27010 }, { "epoch": 1.8355075417855686, "grad_norm": 1.6738390922546387, "learning_rate": 0.000770654980296236, "loss": 3.5211, "step": 27015 }, { "epoch": 1.8358472618562305, "grad_norm": 2.0308914184570312, "learning_rate": 0.0007706125152874032, "loss": 3.6114, "step": 27020 }, { "epoch": 1.8361869819268923, "grad_norm": 2.30039644241333, "learning_rate": 0.0007705700502785705, "loss": 3.7072, "step": 27025 }, { "epoch": 1.836526701997554, "grad_norm": 2.4952001571655273, "learning_rate": 0.0007705275852697377, "loss": 3.3846, "step": 27030 }, { "epoch": 1.8368664220682158, "grad_norm": 1.646220088005066, "learning_rate": 0.000770485120260905, "loss": 3.5229, "step": 27035 }, { "epoch": 1.8372061421388777, "grad_norm": 1.955236792564392, "learning_rate": 0.0007704426552520723, "loss": 3.2398, "step": 27040 }, { "epoch": 1.8375458622095393, "grad_norm": 2.5731422901153564, "learning_rate": 0.0007704001902432396, "loss": 3.4361, "step": 27045 }, { "epoch": 1.8378855822802012, "grad_norm": 1.874875545501709, "learning_rate": 0.0007703577252344069, "loss": 3.6268, "step": 27050 }, { "epoch": 1.838225302350863, "grad_norm": 1.8987629413604736, "learning_rate": 0.0007703152602255742, "loss": 3.5802, "step": 27055 }, { "epoch": 1.8385650224215246, "grad_norm": 1.8412704467773438, "learning_rate": 0.0007702727952167414, "loss": 3.4967, "step": 27060 }, { "epoch": 1.8389047424921863, "grad_norm": 2.1039440631866455, "learning_rate": 0.0007702303302079087, "loss": 3.6864, "step": 27065 }, { "epoch": 1.8392444625628483, "grad_norm": 1.8961361646652222, "learning_rate": 0.000770187865199076, "loss": 3.5395, "step": 27070 }, { "epoch": 1.83958418263351, "grad_norm": 1.9197665452957153, "learning_rate": 0.0007701454001902432, "loss": 3.5566, "step": 27075 }, { "epoch": 1.8399239027041716, "grad_norm": 2.1236953735351562, "learning_rate": 0.0007701029351814105, "loss": 3.4822, "step": 27080 }, { "epoch": 1.8402636227748337, "grad_norm": 3.1203863620758057, "learning_rate": 0.0007700604701725779, "loss": 3.6434, "step": 27085 }, { "epoch": 1.8406033428454953, "grad_norm": 2.0773706436157227, "learning_rate": 0.0007700180051637451, "loss": 3.546, "step": 27090 }, { "epoch": 1.840943062916157, "grad_norm": 2.134498357772827, "learning_rate": 0.0007699755401549123, "loss": 3.3482, "step": 27095 }, { "epoch": 1.841282782986819, "grad_norm": 2.45295786857605, "learning_rate": 0.0007699330751460797, "loss": 3.542, "step": 27100 }, { "epoch": 1.8416225030574807, "grad_norm": 1.7702380418777466, "learning_rate": 0.0007698906101372469, "loss": 3.5077, "step": 27105 }, { "epoch": 1.8419622231281423, "grad_norm": 1.7179415225982666, "learning_rate": 0.0007698481451284141, "loss": 3.5994, "step": 27110 }, { "epoch": 1.8423019431988041, "grad_norm": 1.819347858428955, "learning_rate": 0.0007698056801195816, "loss": 3.3436, "step": 27115 }, { "epoch": 1.842641663269466, "grad_norm": 1.8358031511306763, "learning_rate": 0.0007697632151107488, "loss": 3.6605, "step": 27120 }, { "epoch": 1.8429813833401276, "grad_norm": 2.101956605911255, "learning_rate": 0.000769720750101916, "loss": 3.6847, "step": 27125 }, { "epoch": 1.8433211034107895, "grad_norm": 1.8454796075820923, "learning_rate": 0.0007696782850930833, "loss": 3.5669, "step": 27130 }, { "epoch": 1.8436608234814513, "grad_norm": 1.6915318965911865, "learning_rate": 0.0007696358200842506, "loss": 3.5386, "step": 27135 }, { "epoch": 1.844000543552113, "grad_norm": 1.632065773010254, "learning_rate": 0.0007695933550754178, "loss": 3.5851, "step": 27140 }, { "epoch": 1.8443402636227748, "grad_norm": 1.6471941471099854, "learning_rate": 0.0007695508900665851, "loss": 3.4015, "step": 27145 }, { "epoch": 1.8446799836934367, "grad_norm": 2.483654022216797, "learning_rate": 0.0007695084250577525, "loss": 3.587, "step": 27150 }, { "epoch": 1.8450197037640983, "grad_norm": 1.9097473621368408, "learning_rate": 0.0007694659600489197, "loss": 3.5875, "step": 27155 }, { "epoch": 1.8453594238347601, "grad_norm": 2.068725109100342, "learning_rate": 0.000769423495040087, "loss": 3.6865, "step": 27160 }, { "epoch": 1.845699143905422, "grad_norm": 2.4569811820983887, "learning_rate": 0.0007693810300312543, "loss": 3.3317, "step": 27165 }, { "epoch": 1.8460388639760836, "grad_norm": 2.011319398880005, "learning_rate": 0.0007693385650224215, "loss": 3.4731, "step": 27170 }, { "epoch": 1.8463785840467455, "grad_norm": 1.9032338857650757, "learning_rate": 0.0007692961000135888, "loss": 3.7067, "step": 27175 }, { "epoch": 1.8467183041174073, "grad_norm": 2.560387372970581, "learning_rate": 0.000769253635004756, "loss": 3.8777, "step": 27180 }, { "epoch": 1.847058024188069, "grad_norm": 2.4362688064575195, "learning_rate": 0.0007692111699959234, "loss": 3.651, "step": 27185 }, { "epoch": 1.8473977442587308, "grad_norm": 1.9380851984024048, "learning_rate": 0.0007691687049870907, "loss": 3.7259, "step": 27190 }, { "epoch": 1.8477374643293927, "grad_norm": 1.6236460208892822, "learning_rate": 0.0007691262399782579, "loss": 3.7368, "step": 27195 }, { "epoch": 1.8480771844000543, "grad_norm": 1.6342700719833374, "learning_rate": 0.0007690837749694252, "loss": 3.6066, "step": 27200 }, { "epoch": 1.8484169044707162, "grad_norm": 1.5217605829238892, "learning_rate": 0.0007690413099605925, "loss": 3.8593, "step": 27205 }, { "epoch": 1.848756624541378, "grad_norm": 1.6937692165374756, "learning_rate": 0.0007689988449517597, "loss": 3.5932, "step": 27210 }, { "epoch": 1.8490963446120396, "grad_norm": 1.5521364212036133, "learning_rate": 0.000768956379942927, "loss": 3.3501, "step": 27215 }, { "epoch": 1.8494360646827015, "grad_norm": 1.7649668455123901, "learning_rate": 0.0007689139149340944, "loss": 3.6524, "step": 27220 }, { "epoch": 1.8497757847533634, "grad_norm": 2.5230250358581543, "learning_rate": 0.0007688714499252616, "loss": 3.5732, "step": 27225 }, { "epoch": 1.850115504824025, "grad_norm": 2.2616724967956543, "learning_rate": 0.0007688289849164288, "loss": 3.3948, "step": 27230 }, { "epoch": 1.8504552248946866, "grad_norm": 1.7383990287780762, "learning_rate": 0.0007687865199075962, "loss": 3.605, "step": 27235 }, { "epoch": 1.8507949449653487, "grad_norm": 2.0534069538116455, "learning_rate": 0.0007687440548987634, "loss": 3.5399, "step": 27240 }, { "epoch": 1.8511346650360103, "grad_norm": 2.061574935913086, "learning_rate": 0.0007687015898899306, "loss": 3.5799, "step": 27245 }, { "epoch": 1.851474385106672, "grad_norm": 2.6777966022491455, "learning_rate": 0.000768659124881098, "loss": 3.4037, "step": 27250 }, { "epoch": 1.851814105177334, "grad_norm": 1.939151406288147, "learning_rate": 0.0007686166598722653, "loss": 3.5281, "step": 27255 }, { "epoch": 1.8521538252479957, "grad_norm": 1.7822304964065552, "learning_rate": 0.0007685741948634325, "loss": 3.6382, "step": 27260 }, { "epoch": 1.8524935453186573, "grad_norm": 2.0144007205963135, "learning_rate": 0.0007685317298545999, "loss": 3.6875, "step": 27265 }, { "epoch": 1.8528332653893194, "grad_norm": 2.4200782775878906, "learning_rate": 0.0007684892648457671, "loss": 3.5958, "step": 27270 }, { "epoch": 1.853172985459981, "grad_norm": 2.0667495727539062, "learning_rate": 0.0007684467998369343, "loss": 3.8817, "step": 27275 }, { "epoch": 1.8535127055306426, "grad_norm": 2.118577241897583, "learning_rate": 0.0007684043348281016, "loss": 3.6109, "step": 27280 }, { "epoch": 1.8538524256013045, "grad_norm": 2.0755598545074463, "learning_rate": 0.0007683618698192689, "loss": 3.5701, "step": 27285 }, { "epoch": 1.8541921456719663, "grad_norm": 1.4088127613067627, "learning_rate": 0.0007683194048104362, "loss": 3.5473, "step": 27290 }, { "epoch": 1.854531865742628, "grad_norm": 2.44043231010437, "learning_rate": 0.0007682769398016035, "loss": 3.6012, "step": 27295 }, { "epoch": 1.8548715858132898, "grad_norm": 2.4410054683685303, "learning_rate": 0.0007682344747927708, "loss": 3.5881, "step": 27300 }, { "epoch": 1.8552113058839517, "grad_norm": 1.8173208236694336, "learning_rate": 0.0007681920097839381, "loss": 3.4849, "step": 27305 }, { "epoch": 1.8555510259546133, "grad_norm": 2.412916660308838, "learning_rate": 0.0007681495447751053, "loss": 3.4123, "step": 27310 }, { "epoch": 1.8558907460252752, "grad_norm": 2.5214955806732178, "learning_rate": 0.0007681070797662725, "loss": 3.7116, "step": 27315 }, { "epoch": 1.856230466095937, "grad_norm": 1.93662428855896, "learning_rate": 0.00076806461475744, "loss": 3.4535, "step": 27320 }, { "epoch": 1.8565701861665986, "grad_norm": 1.8452801704406738, "learning_rate": 0.0007680221497486072, "loss": 3.4462, "step": 27325 }, { "epoch": 1.8569099062372605, "grad_norm": 2.143303632736206, "learning_rate": 0.0007679796847397744, "loss": 3.4651, "step": 27330 }, { "epoch": 1.8572496263079223, "grad_norm": 2.0545456409454346, "learning_rate": 0.0007679372197309418, "loss": 3.418, "step": 27335 }, { "epoch": 1.857589346378584, "grad_norm": 1.7911347150802612, "learning_rate": 0.000767894754722109, "loss": 3.4094, "step": 27340 }, { "epoch": 1.8579290664492458, "grad_norm": 2.303499460220337, "learning_rate": 0.0007678522897132762, "loss": 3.3528, "step": 27345 }, { "epoch": 1.8582687865199077, "grad_norm": 1.9649271965026855, "learning_rate": 0.0007678098247044436, "loss": 3.5012, "step": 27350 }, { "epoch": 1.8586085065905693, "grad_norm": 1.9391851425170898, "learning_rate": 0.0007677673596956109, "loss": 3.5674, "step": 27355 }, { "epoch": 1.8589482266612312, "grad_norm": 1.7109405994415283, "learning_rate": 0.0007677248946867781, "loss": 3.4961, "step": 27360 }, { "epoch": 1.859287946731893, "grad_norm": 2.141817808151245, "learning_rate": 0.0007676824296779455, "loss": 3.3976, "step": 27365 }, { "epoch": 1.8596276668025546, "grad_norm": 1.7299555540084839, "learning_rate": 0.0007676399646691127, "loss": 3.458, "step": 27370 }, { "epoch": 1.8599673868732165, "grad_norm": 2.3820114135742188, "learning_rate": 0.0007675974996602799, "loss": 3.5053, "step": 27375 }, { "epoch": 1.8603071069438784, "grad_norm": 1.8078795671463013, "learning_rate": 0.0007675550346514472, "loss": 3.427, "step": 27380 }, { "epoch": 1.86064682701454, "grad_norm": 2.281376600265503, "learning_rate": 0.0007675125696426145, "loss": 3.5204, "step": 27385 }, { "epoch": 1.8609865470852018, "grad_norm": 2.207038402557373, "learning_rate": 0.0007674701046337818, "loss": 3.3015, "step": 27390 }, { "epoch": 1.8613262671558637, "grad_norm": 1.583687424659729, "learning_rate": 0.0007674276396249491, "loss": 3.6488, "step": 27395 }, { "epoch": 1.8616659872265253, "grad_norm": 2.257953405380249, "learning_rate": 0.0007673851746161164, "loss": 3.7834, "step": 27400 }, { "epoch": 1.862005707297187, "grad_norm": 2.395348310470581, "learning_rate": 0.0007673427096072836, "loss": 3.4307, "step": 27405 }, { "epoch": 1.862345427367849, "grad_norm": 2.046198844909668, "learning_rate": 0.0007673002445984509, "loss": 3.7468, "step": 27410 }, { "epoch": 1.8626851474385107, "grad_norm": 2.258808135986328, "learning_rate": 0.0007672577795896182, "loss": 3.8462, "step": 27415 }, { "epoch": 1.8630248675091723, "grad_norm": 1.8246365785598755, "learning_rate": 0.0007672153145807854, "loss": 3.6214, "step": 27420 }, { "epoch": 1.8633645875798344, "grad_norm": 1.9711850881576538, "learning_rate": 0.0007671728495719528, "loss": 3.6093, "step": 27425 }, { "epoch": 1.863704307650496, "grad_norm": 1.9460467100143433, "learning_rate": 0.00076713038456312, "loss": 3.6749, "step": 27430 }, { "epoch": 1.8640440277211576, "grad_norm": 1.460493803024292, "learning_rate": 0.0007670879195542873, "loss": 3.5852, "step": 27435 }, { "epoch": 1.8643837477918197, "grad_norm": 2.2327425479888916, "learning_rate": 0.0007670454545454546, "loss": 3.7283, "step": 27440 }, { "epoch": 1.8647234678624813, "grad_norm": 1.8298872709274292, "learning_rate": 0.0007670029895366218, "loss": 3.5655, "step": 27445 }, { "epoch": 1.865063187933143, "grad_norm": 2.269139051437378, "learning_rate": 0.0007669605245277891, "loss": 3.3991, "step": 27450 }, { "epoch": 1.8654029080038048, "grad_norm": 2.0590004920959473, "learning_rate": 0.0007669180595189564, "loss": 3.4006, "step": 27455 }, { "epoch": 1.8657426280744667, "grad_norm": 1.5579423904418945, "learning_rate": 0.0007668755945101237, "loss": 3.6729, "step": 27460 }, { "epoch": 1.8660823481451283, "grad_norm": 2.574517250061035, "learning_rate": 0.000766833129501291, "loss": 3.43, "step": 27465 }, { "epoch": 1.8664220682157902, "grad_norm": 2.1159451007843018, "learning_rate": 0.0007667906644924583, "loss": 3.6331, "step": 27470 }, { "epoch": 1.866761788286452, "grad_norm": 1.8273475170135498, "learning_rate": 0.0007667481994836255, "loss": 3.3227, "step": 27475 }, { "epoch": 1.8671015083571136, "grad_norm": 2.0781960487365723, "learning_rate": 0.0007667057344747927, "loss": 3.7014, "step": 27480 }, { "epoch": 1.8674412284277755, "grad_norm": 1.7468147277832031, "learning_rate": 0.0007666632694659601, "loss": 3.4388, "step": 27485 }, { "epoch": 1.8677809484984373, "grad_norm": 2.188514471054077, "learning_rate": 0.0007666208044571273, "loss": 3.3895, "step": 27490 }, { "epoch": 1.868120668569099, "grad_norm": 2.5235211849212646, "learning_rate": 0.0007665783394482946, "loss": 3.7499, "step": 27495 }, { "epoch": 1.8684603886397608, "grad_norm": 2.3004448413848877, "learning_rate": 0.000766535874439462, "loss": 3.4882, "step": 27500 }, { "epoch": 1.8688001087104227, "grad_norm": 1.7603703737258911, "learning_rate": 0.0007664934094306292, "loss": 3.6075, "step": 27505 }, { "epoch": 1.8691398287810843, "grad_norm": 2.24393630027771, "learning_rate": 0.0007664509444217964, "loss": 3.3369, "step": 27510 }, { "epoch": 1.8694795488517462, "grad_norm": 1.810063362121582, "learning_rate": 0.0007664084794129638, "loss": 3.6017, "step": 27515 }, { "epoch": 1.869819268922408, "grad_norm": 1.9081840515136719, "learning_rate": 0.000766366014404131, "loss": 3.4358, "step": 27520 }, { "epoch": 1.8701589889930696, "grad_norm": 1.7024098634719849, "learning_rate": 0.0007663235493952982, "loss": 3.423, "step": 27525 }, { "epoch": 1.8704987090637315, "grad_norm": 1.54313063621521, "learning_rate": 0.0007662810843864656, "loss": 3.6201, "step": 27530 }, { "epoch": 1.8708384291343934, "grad_norm": 2.6050078868865967, "learning_rate": 0.0007662386193776329, "loss": 3.2662, "step": 27535 }, { "epoch": 1.871178149205055, "grad_norm": 1.9131656885147095, "learning_rate": 0.0007661961543688001, "loss": 3.8542, "step": 27540 }, { "epoch": 1.8715178692757168, "grad_norm": 1.7376536130905151, "learning_rate": 0.0007661536893599674, "loss": 3.5169, "step": 27545 }, { "epoch": 1.8718575893463787, "grad_norm": 1.7794142961502075, "learning_rate": 0.0007661112243511347, "loss": 3.785, "step": 27550 }, { "epoch": 1.8721973094170403, "grad_norm": 1.8449710607528687, "learning_rate": 0.0007660687593423019, "loss": 3.4599, "step": 27555 }, { "epoch": 1.8725370294877022, "grad_norm": 2.260777235031128, "learning_rate": 0.0007660262943334692, "loss": 3.1959, "step": 27560 }, { "epoch": 1.872876749558364, "grad_norm": 2.012187957763672, "learning_rate": 0.0007659838293246366, "loss": 3.5049, "step": 27565 }, { "epoch": 1.8732164696290257, "grad_norm": 1.751616358757019, "learning_rate": 0.0007659413643158038, "loss": 3.3858, "step": 27570 }, { "epoch": 1.8735561896996873, "grad_norm": 2.188561201095581, "learning_rate": 0.0007658988993069711, "loss": 3.7378, "step": 27575 }, { "epoch": 1.8738959097703494, "grad_norm": 2.5104124546051025, "learning_rate": 0.0007658564342981383, "loss": 3.7927, "step": 27580 }, { "epoch": 1.874235629841011, "grad_norm": 1.7739447355270386, "learning_rate": 0.0007658139692893056, "loss": 3.5758, "step": 27585 }, { "epoch": 1.8745753499116726, "grad_norm": 1.5961581468582153, "learning_rate": 0.0007657715042804729, "loss": 3.3302, "step": 27590 }, { "epoch": 1.8749150699823347, "grad_norm": 2.011143922805786, "learning_rate": 0.0007657290392716401, "loss": 3.4453, "step": 27595 }, { "epoch": 1.8752547900529963, "grad_norm": 2.0785861015319824, "learning_rate": 0.0007656865742628075, "loss": 3.498, "step": 27600 }, { "epoch": 1.875594510123658, "grad_norm": 2.2862820625305176, "learning_rate": 0.0007656441092539748, "loss": 3.5854, "step": 27605 }, { "epoch": 1.87593423019432, "grad_norm": 1.9261432886123657, "learning_rate": 0.000765601644245142, "loss": 3.5013, "step": 27610 }, { "epoch": 1.8762739502649817, "grad_norm": 1.922593593597412, "learning_rate": 0.0007655591792363092, "loss": 3.714, "step": 27615 }, { "epoch": 1.8766136703356433, "grad_norm": 1.807446002960205, "learning_rate": 0.0007655167142274766, "loss": 3.4369, "step": 27620 }, { "epoch": 1.8769533904063052, "grad_norm": 2.118143320083618, "learning_rate": 0.0007654742492186438, "loss": 3.7232, "step": 27625 }, { "epoch": 1.877293110476967, "grad_norm": 1.6430045366287231, "learning_rate": 0.000765431784209811, "loss": 3.5003, "step": 27630 }, { "epoch": 1.8776328305476286, "grad_norm": 2.0612826347351074, "learning_rate": 0.0007653893192009785, "loss": 3.2223, "step": 27635 }, { "epoch": 1.8779725506182905, "grad_norm": 1.5293877124786377, "learning_rate": 0.0007653468541921457, "loss": 3.5859, "step": 27640 }, { "epoch": 1.8783122706889523, "grad_norm": 1.8001408576965332, "learning_rate": 0.000765304389183313, "loss": 3.301, "step": 27645 }, { "epoch": 1.878651990759614, "grad_norm": 2.7974936962127686, "learning_rate": 0.0007652619241744803, "loss": 3.4088, "step": 27650 }, { "epoch": 1.8789917108302758, "grad_norm": 1.5703132152557373, "learning_rate": 0.0007652194591656475, "loss": 3.5035, "step": 27655 }, { "epoch": 1.8793314309009377, "grad_norm": 1.7543600797653198, "learning_rate": 0.0007651769941568148, "loss": 3.4817, "step": 27660 }, { "epoch": 1.8796711509715993, "grad_norm": 1.9892876148223877, "learning_rate": 0.000765134529147982, "loss": 3.6339, "step": 27665 }, { "epoch": 1.8800108710422612, "grad_norm": 1.8799763917922974, "learning_rate": 0.0007650920641391494, "loss": 3.5187, "step": 27670 }, { "epoch": 1.880350591112923, "grad_norm": 2.167750597000122, "learning_rate": 0.0007650495991303167, "loss": 3.4313, "step": 27675 }, { "epoch": 1.8806903111835847, "grad_norm": 1.942077875137329, "learning_rate": 0.0007650071341214839, "loss": 3.678, "step": 27680 }, { "epoch": 1.8810300312542465, "grad_norm": 2.1430788040161133, "learning_rate": 0.0007649646691126512, "loss": 3.5224, "step": 27685 }, { "epoch": 1.8813697513249084, "grad_norm": 1.565243124961853, "learning_rate": 0.0007649222041038185, "loss": 3.505, "step": 27690 }, { "epoch": 1.88170947139557, "grad_norm": 1.9945684671401978, "learning_rate": 0.0007648797390949857, "loss": 3.653, "step": 27695 }, { "epoch": 1.8820491914662318, "grad_norm": 2.0098745822906494, "learning_rate": 0.000764837274086153, "loss": 3.4201, "step": 27700 }, { "epoch": 1.8823889115368937, "grad_norm": 2.2093729972839355, "learning_rate": 0.0007647948090773204, "loss": 3.431, "step": 27705 }, { "epoch": 1.8827286316075553, "grad_norm": 2.2469749450683594, "learning_rate": 0.0007647523440684876, "loss": 3.5521, "step": 27710 }, { "epoch": 1.8830683516782172, "grad_norm": 1.5204404592514038, "learning_rate": 0.0007647098790596548, "loss": 3.6174, "step": 27715 }, { "epoch": 1.883408071748879, "grad_norm": 1.6919485330581665, "learning_rate": 0.0007646674140508222, "loss": 3.4086, "step": 27720 }, { "epoch": 1.8837477918195407, "grad_norm": 1.565580129623413, "learning_rate": 0.0007646249490419894, "loss": 3.5481, "step": 27725 }, { "epoch": 1.8840875118902025, "grad_norm": 2.1750402450561523, "learning_rate": 0.0007645824840331566, "loss": 3.5306, "step": 27730 }, { "epoch": 1.8844272319608644, "grad_norm": 2.1113133430480957, "learning_rate": 0.000764540019024324, "loss": 3.6879, "step": 27735 }, { "epoch": 1.884766952031526, "grad_norm": 2.0204389095306396, "learning_rate": 0.0007644975540154913, "loss": 3.6367, "step": 27740 }, { "epoch": 1.8851066721021879, "grad_norm": 1.9810158014297485, "learning_rate": 0.0007644550890066585, "loss": 3.3796, "step": 27745 }, { "epoch": 1.8854463921728497, "grad_norm": 1.491025447845459, "learning_rate": 0.0007644126239978259, "loss": 3.4105, "step": 27750 }, { "epoch": 1.8857861122435113, "grad_norm": 1.7252562046051025, "learning_rate": 0.0007643701589889931, "loss": 3.4089, "step": 27755 }, { "epoch": 1.886125832314173, "grad_norm": 2.0381174087524414, "learning_rate": 0.0007643276939801603, "loss": 3.6061, "step": 27760 }, { "epoch": 1.886465552384835, "grad_norm": 2.044104814529419, "learning_rate": 0.0007642852289713276, "loss": 3.6262, "step": 27765 }, { "epoch": 1.8868052724554967, "grad_norm": 2.0813093185424805, "learning_rate": 0.0007642427639624949, "loss": 3.7022, "step": 27770 }, { "epoch": 1.8871449925261583, "grad_norm": 1.9358245134353638, "learning_rate": 0.0007642002989536622, "loss": 3.3249, "step": 27775 }, { "epoch": 1.8874847125968204, "grad_norm": 1.5278213024139404, "learning_rate": 0.0007641578339448295, "loss": 3.4307, "step": 27780 }, { "epoch": 1.887824432667482, "grad_norm": 2.398834705352783, "learning_rate": 0.0007641153689359968, "loss": 3.3908, "step": 27785 }, { "epoch": 1.8881641527381436, "grad_norm": 1.9427762031555176, "learning_rate": 0.000764072903927164, "loss": 3.7097, "step": 27790 }, { "epoch": 1.8885038728088055, "grad_norm": 1.6638544797897339, "learning_rate": 0.0007640304389183313, "loss": 3.492, "step": 27795 }, { "epoch": 1.8888435928794673, "grad_norm": 2.121737241744995, "learning_rate": 0.0007639879739094986, "loss": 3.4985, "step": 27800 }, { "epoch": 1.889183312950129, "grad_norm": 1.773142695426941, "learning_rate": 0.0007639455089006658, "loss": 3.5083, "step": 27805 }, { "epoch": 1.8895230330207908, "grad_norm": 1.8002885580062866, "learning_rate": 0.0007639030438918332, "loss": 3.6889, "step": 27810 }, { "epoch": 1.8898627530914527, "grad_norm": 2.8757004737854004, "learning_rate": 0.0007638605788830004, "loss": 3.5253, "step": 27815 }, { "epoch": 1.8902024731621143, "grad_norm": 1.783242106437683, "learning_rate": 0.0007638181138741677, "loss": 3.4582, "step": 27820 }, { "epoch": 1.8905421932327762, "grad_norm": 1.5741477012634277, "learning_rate": 0.000763775648865335, "loss": 3.4958, "step": 27825 }, { "epoch": 1.890881913303438, "grad_norm": 2.193770408630371, "learning_rate": 0.0007637331838565022, "loss": 3.8505, "step": 27830 }, { "epoch": 1.8912216333740997, "grad_norm": 1.987174391746521, "learning_rate": 0.0007636907188476695, "loss": 3.6886, "step": 27835 }, { "epoch": 1.8915613534447615, "grad_norm": 2.114187479019165, "learning_rate": 0.0007636482538388368, "loss": 3.4498, "step": 27840 }, { "epoch": 1.8919010735154234, "grad_norm": 1.7927589416503906, "learning_rate": 0.0007636057888300041, "loss": 3.3838, "step": 27845 }, { "epoch": 1.892240793586085, "grad_norm": 2.2421343326568604, "learning_rate": 0.0007635633238211714, "loss": 3.4893, "step": 27850 }, { "epoch": 1.8925805136567468, "grad_norm": 1.843545913696289, "learning_rate": 0.0007635208588123387, "loss": 3.8854, "step": 27855 }, { "epoch": 1.8929202337274087, "grad_norm": 1.6196763515472412, "learning_rate": 0.0007634783938035059, "loss": 3.8637, "step": 27860 }, { "epoch": 1.8932599537980703, "grad_norm": 2.540172576904297, "learning_rate": 0.0007634359287946731, "loss": 3.2967, "step": 27865 }, { "epoch": 1.8935996738687322, "grad_norm": 1.9716315269470215, "learning_rate": 0.0007633934637858405, "loss": 3.768, "step": 27870 }, { "epoch": 1.893939393939394, "grad_norm": 2.4767165184020996, "learning_rate": 0.0007633509987770077, "loss": 3.7081, "step": 27875 }, { "epoch": 1.8942791140100557, "grad_norm": 1.8271777629852295, "learning_rate": 0.000763308533768175, "loss": 3.66, "step": 27880 }, { "epoch": 1.8946188340807175, "grad_norm": 2.1004796028137207, "learning_rate": 0.0007632660687593424, "loss": 3.3929, "step": 27885 }, { "epoch": 1.8949585541513794, "grad_norm": 1.5095902681350708, "learning_rate": 0.0007632236037505096, "loss": 3.4403, "step": 27890 }, { "epoch": 1.895298274222041, "grad_norm": 2.0910422801971436, "learning_rate": 0.0007631811387416768, "loss": 3.5492, "step": 27895 }, { "epoch": 1.8956379942927029, "grad_norm": 2.190464973449707, "learning_rate": 0.0007631386737328442, "loss": 3.6376, "step": 27900 }, { "epoch": 1.8959777143633647, "grad_norm": 2.4309353828430176, "learning_rate": 0.0007630962087240114, "loss": 3.4178, "step": 27905 }, { "epoch": 1.8963174344340263, "grad_norm": 1.974522352218628, "learning_rate": 0.0007630537437151786, "loss": 3.6308, "step": 27910 }, { "epoch": 1.8966571545046882, "grad_norm": 1.923298954963684, "learning_rate": 0.000763011278706346, "loss": 3.4739, "step": 27915 }, { "epoch": 1.89699687457535, "grad_norm": 2.0282726287841797, "learning_rate": 0.0007629688136975133, "loss": 3.575, "step": 27920 }, { "epoch": 1.8973365946460117, "grad_norm": 2.0918986797332764, "learning_rate": 0.0007629263486886805, "loss": 3.5864, "step": 27925 }, { "epoch": 1.8976763147166733, "grad_norm": 1.7686805725097656, "learning_rate": 0.0007628838836798478, "loss": 3.5472, "step": 27930 }, { "epoch": 1.8980160347873354, "grad_norm": 2.1642050743103027, "learning_rate": 0.0007628414186710151, "loss": 3.4662, "step": 27935 }, { "epoch": 1.898355754857997, "grad_norm": 2.2553696632385254, "learning_rate": 0.0007627989536621823, "loss": 3.5267, "step": 27940 }, { "epoch": 1.8986954749286586, "grad_norm": 2.1515703201293945, "learning_rate": 0.0007627564886533497, "loss": 3.4458, "step": 27945 }, { "epoch": 1.8990351949993207, "grad_norm": 1.6763867139816284, "learning_rate": 0.000762714023644517, "loss": 3.4888, "step": 27950 }, { "epoch": 1.8993749150699824, "grad_norm": 1.824834942817688, "learning_rate": 0.0007626715586356842, "loss": 3.3344, "step": 27955 }, { "epoch": 1.899714635140644, "grad_norm": 1.7604990005493164, "learning_rate": 0.0007626290936268515, "loss": 3.4422, "step": 27960 }, { "epoch": 1.9000543552113058, "grad_norm": 2.2227532863616943, "learning_rate": 0.0007625866286180187, "loss": 3.6421, "step": 27965 }, { "epoch": 1.9003940752819677, "grad_norm": 1.5764347314834595, "learning_rate": 0.000762544163609186, "loss": 3.7017, "step": 27970 }, { "epoch": 1.9007337953526293, "grad_norm": 1.7522392272949219, "learning_rate": 0.0007625016986003533, "loss": 3.492, "step": 27975 }, { "epoch": 1.9010735154232912, "grad_norm": 1.5160739421844482, "learning_rate": 0.0007624592335915206, "loss": 3.6276, "step": 27980 }, { "epoch": 1.901413235493953, "grad_norm": 1.8019318580627441, "learning_rate": 0.000762416768582688, "loss": 3.864, "step": 27985 }, { "epoch": 1.9017529555646147, "grad_norm": 5.316531658172607, "learning_rate": 0.0007623743035738552, "loss": 3.546, "step": 27990 }, { "epoch": 1.9020926756352765, "grad_norm": 2.6595115661621094, "learning_rate": 0.0007623318385650224, "loss": 3.5029, "step": 27995 }, { "epoch": 1.9024323957059384, "grad_norm": 1.7280770540237427, "learning_rate": 0.0007622893735561898, "loss": 3.4552, "step": 28000 }, { "epoch": 1.9027721157766, "grad_norm": 1.7219644784927368, "learning_rate": 0.000762246908547357, "loss": 3.566, "step": 28005 }, { "epoch": 1.9031118358472618, "grad_norm": 1.950426697731018, "learning_rate": 0.0007622044435385242, "loss": 3.7002, "step": 28010 }, { "epoch": 1.9034515559179237, "grad_norm": 2.399981737136841, "learning_rate": 0.0007621619785296917, "loss": 3.335, "step": 28015 }, { "epoch": 1.9037912759885853, "grad_norm": 2.0625851154327393, "learning_rate": 0.0007621195135208589, "loss": 3.6155, "step": 28020 }, { "epoch": 1.9041309960592472, "grad_norm": 1.921293020248413, "learning_rate": 0.0007620770485120261, "loss": 3.5422, "step": 28025 }, { "epoch": 1.904470716129909, "grad_norm": 1.5925769805908203, "learning_rate": 0.0007620345835031934, "loss": 3.5682, "step": 28030 }, { "epoch": 1.9048104362005707, "grad_norm": 2.1131229400634766, "learning_rate": 0.0007619921184943607, "loss": 3.5993, "step": 28035 }, { "epoch": 1.9051501562712325, "grad_norm": 2.3828957080841064, "learning_rate": 0.0007619496534855279, "loss": 3.647, "step": 28040 }, { "epoch": 1.9054898763418944, "grad_norm": 1.8133063316345215, "learning_rate": 0.0007619071884766952, "loss": 3.7354, "step": 28045 }, { "epoch": 1.905829596412556, "grad_norm": 1.8667174577713013, "learning_rate": 0.0007618647234678626, "loss": 3.3982, "step": 28050 }, { "epoch": 1.9061693164832179, "grad_norm": 2.450914144515991, "learning_rate": 0.0007618222584590298, "loss": 3.4938, "step": 28055 }, { "epoch": 1.9065090365538797, "grad_norm": 1.6466244459152222, "learning_rate": 0.0007617797934501971, "loss": 3.6114, "step": 28060 }, { "epoch": 1.9068487566245413, "grad_norm": 1.7966073751449585, "learning_rate": 0.0007617373284413643, "loss": 3.3312, "step": 28065 }, { "epoch": 1.9071884766952032, "grad_norm": 1.840393304824829, "learning_rate": 0.0007616948634325316, "loss": 3.5157, "step": 28070 }, { "epoch": 1.907528196765865, "grad_norm": 1.7540682554244995, "learning_rate": 0.0007616523984236989, "loss": 3.6533, "step": 28075 }, { "epoch": 1.9078679168365267, "grad_norm": 1.8950121402740479, "learning_rate": 0.0007616099334148661, "loss": 3.5446, "step": 28080 }, { "epoch": 1.9082076369071885, "grad_norm": 2.0194222927093506, "learning_rate": 0.0007615674684060335, "loss": 3.4471, "step": 28085 }, { "epoch": 1.9085473569778504, "grad_norm": 1.8284969329833984, "learning_rate": 0.0007615250033972008, "loss": 3.6683, "step": 28090 }, { "epoch": 1.908887077048512, "grad_norm": 2.0097928047180176, "learning_rate": 0.000761482538388368, "loss": 3.4086, "step": 28095 }, { "epoch": 1.9092267971191736, "grad_norm": 2.0478293895721436, "learning_rate": 0.0007614400733795353, "loss": 3.5126, "step": 28100 }, { "epoch": 1.9095665171898357, "grad_norm": 1.8216105699539185, "learning_rate": 0.0007613976083707026, "loss": 3.4532, "step": 28105 }, { "epoch": 1.9099062372604974, "grad_norm": 2.168888568878174, "learning_rate": 0.0007613551433618698, "loss": 3.5209, "step": 28110 }, { "epoch": 1.910245957331159, "grad_norm": 1.8903448581695557, "learning_rate": 0.000761312678353037, "loss": 3.4937, "step": 28115 }, { "epoch": 1.910585677401821, "grad_norm": 2.1419837474823, "learning_rate": 0.0007612702133442045, "loss": 3.761, "step": 28120 }, { "epoch": 1.9109253974724827, "grad_norm": 1.8634822368621826, "learning_rate": 0.0007612277483353717, "loss": 3.8102, "step": 28125 }, { "epoch": 1.9112651175431443, "grad_norm": 2.0745551586151123, "learning_rate": 0.0007611852833265389, "loss": 3.5142, "step": 28130 }, { "epoch": 1.9116048376138062, "grad_norm": 2.3930931091308594, "learning_rate": 0.0007611428183177063, "loss": 3.3438, "step": 28135 }, { "epoch": 1.911944557684468, "grad_norm": 1.8835614919662476, "learning_rate": 0.0007611003533088735, "loss": 3.8911, "step": 28140 }, { "epoch": 1.9122842777551297, "grad_norm": 1.7349308729171753, "learning_rate": 0.0007610578883000407, "loss": 3.6671, "step": 28145 }, { "epoch": 1.9126239978257915, "grad_norm": 1.851715087890625, "learning_rate": 0.000761015423291208, "loss": 3.5281, "step": 28150 }, { "epoch": 1.9129637178964534, "grad_norm": 1.8150007724761963, "learning_rate": 0.0007609729582823754, "loss": 3.6079, "step": 28155 }, { "epoch": 1.913303437967115, "grad_norm": 2.2614662647247314, "learning_rate": 0.0007609304932735426, "loss": 3.7185, "step": 28160 }, { "epoch": 1.9136431580377768, "grad_norm": 1.9603536128997803, "learning_rate": 0.00076088802826471, "loss": 3.6953, "step": 28165 }, { "epoch": 1.9139828781084387, "grad_norm": 2.2422659397125244, "learning_rate": 0.0007608455632558772, "loss": 3.4617, "step": 28170 }, { "epoch": 1.9143225981791003, "grad_norm": 4.148995399475098, "learning_rate": 0.0007608030982470444, "loss": 3.3721, "step": 28175 }, { "epoch": 1.9146623182497622, "grad_norm": 2.1106786727905273, "learning_rate": 0.0007607606332382117, "loss": 3.6483, "step": 28180 }, { "epoch": 1.915002038320424, "grad_norm": 2.0631234645843506, "learning_rate": 0.000760718168229379, "loss": 3.5578, "step": 28185 }, { "epoch": 1.9153417583910857, "grad_norm": 1.8859889507293701, "learning_rate": 0.0007606757032205463, "loss": 3.5188, "step": 28190 }, { "epoch": 1.9156814784617475, "grad_norm": 1.813650369644165, "learning_rate": 0.0007606332382117136, "loss": 3.3919, "step": 28195 }, { "epoch": 1.9160211985324094, "grad_norm": 1.7453880310058594, "learning_rate": 0.0007605907732028809, "loss": 3.8778, "step": 28200 }, { "epoch": 1.916360918603071, "grad_norm": 2.0670933723449707, "learning_rate": 0.0007605483081940481, "loss": 3.6119, "step": 28205 }, { "epoch": 1.9167006386737329, "grad_norm": 1.4897210597991943, "learning_rate": 0.0007605058431852154, "loss": 3.4747, "step": 28210 }, { "epoch": 1.9170403587443947, "grad_norm": 1.7607176303863525, "learning_rate": 0.0007604633781763826, "loss": 3.431, "step": 28215 }, { "epoch": 1.9173800788150563, "grad_norm": 2.0438055992126465, "learning_rate": 0.0007604209131675499, "loss": 3.5275, "step": 28220 }, { "epoch": 1.9177197988857182, "grad_norm": 1.992998480796814, "learning_rate": 0.0007603784481587173, "loss": 3.3867, "step": 28225 }, { "epoch": 1.91805951895638, "grad_norm": 2.9548678398132324, "learning_rate": 0.0007603359831498845, "loss": 3.2238, "step": 28230 }, { "epoch": 1.9183992390270417, "grad_norm": 1.8030256032943726, "learning_rate": 0.0007602935181410518, "loss": 3.52, "step": 28235 }, { "epoch": 1.9187389590977035, "grad_norm": 1.8219703435897827, "learning_rate": 0.0007602510531322191, "loss": 3.5889, "step": 28240 }, { "epoch": 1.9190786791683654, "grad_norm": 1.9258267879486084, "learning_rate": 0.0007602085881233863, "loss": 3.5011, "step": 28245 }, { "epoch": 1.919418399239027, "grad_norm": 2.3993167877197266, "learning_rate": 0.0007601661231145535, "loss": 3.5492, "step": 28250 }, { "epoch": 1.9197581193096889, "grad_norm": 1.9240727424621582, "learning_rate": 0.0007601236581057209, "loss": 3.6636, "step": 28255 }, { "epoch": 1.9200978393803507, "grad_norm": 1.939048409461975, "learning_rate": 0.0007600811930968882, "loss": 3.6505, "step": 28260 }, { "epoch": 1.9204375594510124, "grad_norm": 2.435324192047119, "learning_rate": 0.0007600387280880554, "loss": 3.8216, "step": 28265 }, { "epoch": 1.920777279521674, "grad_norm": 2.2188501358032227, "learning_rate": 0.0007599962630792228, "loss": 3.7203, "step": 28270 }, { "epoch": 1.921116999592336, "grad_norm": 1.8708419799804688, "learning_rate": 0.00075995379807039, "loss": 3.6504, "step": 28275 }, { "epoch": 1.9214567196629977, "grad_norm": 1.6742067337036133, "learning_rate": 0.0007599113330615572, "loss": 3.3926, "step": 28280 }, { "epoch": 1.9217964397336593, "grad_norm": 2.0157339572906494, "learning_rate": 0.0007598688680527246, "loss": 3.5934, "step": 28285 }, { "epoch": 1.9221361598043214, "grad_norm": 1.4929890632629395, "learning_rate": 0.0007598264030438918, "loss": 3.678, "step": 28290 }, { "epoch": 1.922475879874983, "grad_norm": 1.9534629583358765, "learning_rate": 0.0007597839380350591, "loss": 3.5563, "step": 28295 }, { "epoch": 1.9228155999456447, "grad_norm": 1.7867627143859863, "learning_rate": 0.0007597414730262265, "loss": 3.7733, "step": 28300 }, { "epoch": 1.9231553200163065, "grad_norm": 1.433765172958374, "learning_rate": 0.0007596990080173937, "loss": 3.6244, "step": 28305 }, { "epoch": 1.9234950400869684, "grad_norm": 2.0270917415618896, "learning_rate": 0.0007596565430085609, "loss": 3.6034, "step": 28310 }, { "epoch": 1.92383476015763, "grad_norm": 1.6974772214889526, "learning_rate": 0.0007596140779997282, "loss": 3.8053, "step": 28315 }, { "epoch": 1.9241744802282919, "grad_norm": 1.8203550577163696, "learning_rate": 0.0007595716129908955, "loss": 3.6106, "step": 28320 }, { "epoch": 1.9245142002989537, "grad_norm": 1.8057887554168701, "learning_rate": 0.0007595291479820628, "loss": 3.4974, "step": 28325 }, { "epoch": 1.9248539203696153, "grad_norm": 2.3155081272125244, "learning_rate": 0.0007594866829732301, "loss": 3.4593, "step": 28330 }, { "epoch": 1.9251936404402772, "grad_norm": 2.128777265548706, "learning_rate": 0.0007594442179643974, "loss": 3.5876, "step": 28335 }, { "epoch": 1.925533360510939, "grad_norm": 1.957950472831726, "learning_rate": 0.0007594017529555647, "loss": 3.6666, "step": 28340 }, { "epoch": 1.9258730805816007, "grad_norm": 2.0182032585144043, "learning_rate": 0.0007593592879467319, "loss": 3.4542, "step": 28345 }, { "epoch": 1.9262128006522625, "grad_norm": 2.0462093353271484, "learning_rate": 0.0007593168229378991, "loss": 3.5418, "step": 28350 }, { "epoch": 1.9265525207229244, "grad_norm": 1.992087721824646, "learning_rate": 0.0007592743579290665, "loss": 3.39, "step": 28355 }, { "epoch": 1.926892240793586, "grad_norm": 2.3152883052825928, "learning_rate": 0.0007592318929202337, "loss": 3.4872, "step": 28360 }, { "epoch": 1.9272319608642479, "grad_norm": 2.0088484287261963, "learning_rate": 0.000759189427911401, "loss": 3.5632, "step": 28365 }, { "epoch": 1.9275716809349097, "grad_norm": 1.52671217918396, "learning_rate": 0.0007591469629025684, "loss": 3.4617, "step": 28370 }, { "epoch": 1.9279114010055713, "grad_norm": 1.901323914527893, "learning_rate": 0.0007591044978937356, "loss": 3.4596, "step": 28375 }, { "epoch": 1.9282511210762332, "grad_norm": 2.1748759746551514, "learning_rate": 0.0007590620328849028, "loss": 3.5202, "step": 28380 }, { "epoch": 1.928590841146895, "grad_norm": 1.6845366954803467, "learning_rate": 0.0007590195678760702, "loss": 3.3467, "step": 28385 }, { "epoch": 1.9289305612175567, "grad_norm": 2.729318857192993, "learning_rate": 0.0007589771028672374, "loss": 3.6508, "step": 28390 }, { "epoch": 1.9292702812882185, "grad_norm": 1.7438068389892578, "learning_rate": 0.0007589346378584046, "loss": 3.5321, "step": 28395 }, { "epoch": 1.9296100013588804, "grad_norm": 2.3589277267456055, "learning_rate": 0.0007588921728495721, "loss": 3.5326, "step": 28400 }, { "epoch": 1.929949721429542, "grad_norm": 2.0148284435272217, "learning_rate": 0.0007588497078407393, "loss": 3.565, "step": 28405 }, { "epoch": 1.9302894415002039, "grad_norm": 1.5440062284469604, "learning_rate": 0.0007588072428319065, "loss": 3.4693, "step": 28410 }, { "epoch": 1.9306291615708657, "grad_norm": 2.2393898963928223, "learning_rate": 0.0007587647778230738, "loss": 3.2713, "step": 28415 }, { "epoch": 1.9309688816415274, "grad_norm": 1.784164547920227, "learning_rate": 0.0007587223128142411, "loss": 3.6843, "step": 28420 }, { "epoch": 1.9313086017121892, "grad_norm": 1.7981915473937988, "learning_rate": 0.0007586798478054083, "loss": 3.5535, "step": 28425 }, { "epoch": 1.931648321782851, "grad_norm": 2.1250975131988525, "learning_rate": 0.0007586373827965756, "loss": 3.5187, "step": 28430 }, { "epoch": 1.9319880418535127, "grad_norm": 2.2871177196502686, "learning_rate": 0.000758594917787743, "loss": 3.5363, "step": 28435 }, { "epoch": 1.9323277619241743, "grad_norm": 2.107041835784912, "learning_rate": 0.0007585524527789102, "loss": 3.5512, "step": 28440 }, { "epoch": 1.9326674819948364, "grad_norm": 1.7721790075302124, "learning_rate": 0.0007585099877700775, "loss": 3.5542, "step": 28445 }, { "epoch": 1.933007202065498, "grad_norm": 2.039663076400757, "learning_rate": 0.0007584675227612447, "loss": 3.6405, "step": 28450 }, { "epoch": 1.9333469221361597, "grad_norm": 1.8263064622879028, "learning_rate": 0.000758425057752412, "loss": 3.5953, "step": 28455 }, { "epoch": 1.9336866422068217, "grad_norm": 1.4186841249465942, "learning_rate": 0.0007583825927435793, "loss": 3.655, "step": 28460 }, { "epoch": 1.9340263622774834, "grad_norm": 1.953641653060913, "learning_rate": 0.0007583401277347465, "loss": 3.6585, "step": 28465 }, { "epoch": 1.934366082348145, "grad_norm": 1.7207980155944824, "learning_rate": 0.0007582976627259139, "loss": 3.3625, "step": 28470 }, { "epoch": 1.9347058024188069, "grad_norm": 2.059875965118408, "learning_rate": 0.0007582551977170812, "loss": 3.828, "step": 28475 }, { "epoch": 1.9350455224894687, "grad_norm": 1.488520860671997, "learning_rate": 0.0007582127327082484, "loss": 3.5834, "step": 28480 }, { "epoch": 1.9353852425601303, "grad_norm": 1.9822267293930054, "learning_rate": 0.0007581702676994157, "loss": 3.5257, "step": 28485 }, { "epoch": 1.9357249626307922, "grad_norm": 1.6778134107589722, "learning_rate": 0.000758127802690583, "loss": 3.3286, "step": 28490 }, { "epoch": 1.936064682701454, "grad_norm": 2.251790761947632, "learning_rate": 0.0007580853376817502, "loss": 3.6061, "step": 28495 }, { "epoch": 1.9364044027721157, "grad_norm": 1.9982267618179321, "learning_rate": 0.0007580428726729174, "loss": 3.5793, "step": 28500 }, { "epoch": 1.9367441228427775, "grad_norm": 2.3828999996185303, "learning_rate": 0.0007580004076640849, "loss": 3.7561, "step": 28505 }, { "epoch": 1.9370838429134394, "grad_norm": 2.0530192852020264, "learning_rate": 0.0007579579426552521, "loss": 3.489, "step": 28510 }, { "epoch": 1.937423562984101, "grad_norm": 1.8216581344604492, "learning_rate": 0.0007579154776464193, "loss": 3.5106, "step": 28515 }, { "epoch": 1.9377632830547629, "grad_norm": 2.325629949569702, "learning_rate": 0.0007578730126375867, "loss": 3.6019, "step": 28520 }, { "epoch": 1.9381030031254247, "grad_norm": 1.7488113641738892, "learning_rate": 0.0007578305476287539, "loss": 3.5961, "step": 28525 }, { "epoch": 1.9384427231960863, "grad_norm": 1.660378336906433, "learning_rate": 0.0007577880826199211, "loss": 3.526, "step": 28530 }, { "epoch": 1.9387824432667482, "grad_norm": 1.8319913148880005, "learning_rate": 0.0007577456176110886, "loss": 3.5098, "step": 28535 }, { "epoch": 1.93912216333741, "grad_norm": 3.6473453044891357, "learning_rate": 0.0007577031526022558, "loss": 3.6454, "step": 28540 }, { "epoch": 1.9394618834080717, "grad_norm": 2.027536630630493, "learning_rate": 0.000757660687593423, "loss": 3.6079, "step": 28545 }, { "epoch": 1.9398016034787335, "grad_norm": 1.808202862739563, "learning_rate": 0.0007576182225845903, "loss": 3.6027, "step": 28550 }, { "epoch": 1.9401413235493954, "grad_norm": 1.7507200241088867, "learning_rate": 0.0007575757575757576, "loss": 3.4607, "step": 28555 }, { "epoch": 1.940481043620057, "grad_norm": 2.0342819690704346, "learning_rate": 0.0007575332925669248, "loss": 3.7244, "step": 28560 }, { "epoch": 1.9408207636907189, "grad_norm": 1.5192320346832275, "learning_rate": 0.0007574908275580921, "loss": 3.4382, "step": 28565 }, { "epoch": 1.9411604837613807, "grad_norm": 2.3818788528442383, "learning_rate": 0.0007574483625492595, "loss": 3.6088, "step": 28570 }, { "epoch": 1.9415002038320424, "grad_norm": 2.0674710273742676, "learning_rate": 0.0007574058975404267, "loss": 3.2331, "step": 28575 }, { "epoch": 1.9418399239027042, "grad_norm": 2.0398638248443604, "learning_rate": 0.000757363432531594, "loss": 3.6049, "step": 28580 }, { "epoch": 1.942179643973366, "grad_norm": 1.614019751548767, "learning_rate": 0.0007573209675227613, "loss": 3.8488, "step": 28585 }, { "epoch": 1.9425193640440277, "grad_norm": 1.967692255973816, "learning_rate": 0.0007572785025139285, "loss": 3.6133, "step": 28590 }, { "epoch": 1.9428590841146895, "grad_norm": 1.8151670694351196, "learning_rate": 0.0007572360375050958, "loss": 3.479, "step": 28595 }, { "epoch": 1.9431988041853514, "grad_norm": 2.476569175720215, "learning_rate": 0.000757193572496263, "loss": 3.2583, "step": 28600 }, { "epoch": 1.943538524256013, "grad_norm": 2.1355576515197754, "learning_rate": 0.0007571511074874304, "loss": 3.4955, "step": 28605 }, { "epoch": 1.9438782443266747, "grad_norm": 2.1126861572265625, "learning_rate": 0.0007571086424785977, "loss": 3.4478, "step": 28610 }, { "epoch": 1.9442179643973367, "grad_norm": 1.7560362815856934, "learning_rate": 0.0007570661774697649, "loss": 3.4942, "step": 28615 }, { "epoch": 1.9445576844679984, "grad_norm": 2.6731739044189453, "learning_rate": 0.0007570237124609322, "loss": 3.1767, "step": 28620 }, { "epoch": 1.94489740453866, "grad_norm": 2.181579113006592, "learning_rate": 0.0007569812474520995, "loss": 3.5993, "step": 28625 }, { "epoch": 1.945237124609322, "grad_norm": 1.6809967756271362, "learning_rate": 0.0007569387824432667, "loss": 3.508, "step": 28630 }, { "epoch": 1.9455768446799837, "grad_norm": 1.873593807220459, "learning_rate": 0.000756896317434434, "loss": 3.4667, "step": 28635 }, { "epoch": 1.9459165647506453, "grad_norm": 2.032247304916382, "learning_rate": 0.0007568538524256014, "loss": 3.422, "step": 28640 }, { "epoch": 1.9462562848213072, "grad_norm": 1.8650765419006348, "learning_rate": 0.0007568113874167686, "loss": 3.5517, "step": 28645 }, { "epoch": 1.946596004891969, "grad_norm": 2.4450154304504395, "learning_rate": 0.0007567689224079358, "loss": 3.2225, "step": 28650 }, { "epoch": 1.9469357249626307, "grad_norm": 2.185624122619629, "learning_rate": 0.0007567264573991032, "loss": 3.6864, "step": 28655 }, { "epoch": 1.9472754450332925, "grad_norm": 2.145592212677002, "learning_rate": 0.0007566839923902704, "loss": 3.8017, "step": 28660 }, { "epoch": 1.9476151651039544, "grad_norm": 2.4986507892608643, "learning_rate": 0.0007566415273814377, "loss": 3.437, "step": 28665 }, { "epoch": 1.947954885174616, "grad_norm": 2.458648204803467, "learning_rate": 0.000756599062372605, "loss": 3.4676, "step": 28670 }, { "epoch": 1.9482946052452779, "grad_norm": 2.371431350708008, "learning_rate": 0.0007565565973637723, "loss": 3.2886, "step": 28675 }, { "epoch": 1.9486343253159397, "grad_norm": 1.8894994258880615, "learning_rate": 0.0007565141323549396, "loss": 3.7915, "step": 28680 }, { "epoch": 1.9489740453866014, "grad_norm": 2.073513984680176, "learning_rate": 0.0007564716673461069, "loss": 3.6333, "step": 28685 }, { "epoch": 1.9493137654572632, "grad_norm": 1.7558339834213257, "learning_rate": 0.0007564292023372741, "loss": 3.5885, "step": 28690 }, { "epoch": 1.949653485527925, "grad_norm": 1.646849513053894, "learning_rate": 0.0007563867373284414, "loss": 3.5708, "step": 28695 }, { "epoch": 1.9499932055985867, "grad_norm": 2.3540053367614746, "learning_rate": 0.0007563442723196086, "loss": 3.619, "step": 28700 }, { "epoch": 1.9503329256692485, "grad_norm": 1.8143569231033325, "learning_rate": 0.0007563018073107759, "loss": 3.4673, "step": 28705 }, { "epoch": 1.9506726457399104, "grad_norm": 2.4786787033081055, "learning_rate": 0.0007562593423019433, "loss": 3.7871, "step": 28710 }, { "epoch": 1.951012365810572, "grad_norm": 2.064392566680908, "learning_rate": 0.0007562168772931105, "loss": 3.6083, "step": 28715 }, { "epoch": 1.9513520858812339, "grad_norm": 1.7983787059783936, "learning_rate": 0.0007561744122842778, "loss": 3.5995, "step": 28720 }, { "epoch": 1.9516918059518957, "grad_norm": 2.0205748081207275, "learning_rate": 0.0007561319472754451, "loss": 3.5532, "step": 28725 }, { "epoch": 1.9520315260225574, "grad_norm": 2.3359053134918213, "learning_rate": 0.0007560894822666123, "loss": 3.6323, "step": 28730 }, { "epoch": 1.9523712460932192, "grad_norm": 1.8980040550231934, "learning_rate": 0.0007560470172577795, "loss": 3.7261, "step": 28735 }, { "epoch": 1.952710966163881, "grad_norm": 1.6983160972595215, "learning_rate": 0.0007560045522489469, "loss": 3.6597, "step": 28740 }, { "epoch": 1.9530506862345427, "grad_norm": 1.9276742935180664, "learning_rate": 0.0007559620872401142, "loss": 3.7714, "step": 28745 }, { "epoch": 1.9533904063052046, "grad_norm": 1.8672256469726562, "learning_rate": 0.0007559196222312814, "loss": 3.4906, "step": 28750 }, { "epoch": 1.9537301263758664, "grad_norm": 2.0383784770965576, "learning_rate": 0.0007558771572224488, "loss": 3.6867, "step": 28755 }, { "epoch": 1.954069846446528, "grad_norm": 1.8671423196792603, "learning_rate": 0.000755834692213616, "loss": 3.6931, "step": 28760 }, { "epoch": 1.95440956651719, "grad_norm": 2.7774505615234375, "learning_rate": 0.0007557922272047832, "loss": 3.6443, "step": 28765 }, { "epoch": 1.9547492865878517, "grad_norm": 2.4257616996765137, "learning_rate": 0.0007557497621959506, "loss": 3.6409, "step": 28770 }, { "epoch": 1.9550890066585134, "grad_norm": 2.271291971206665, "learning_rate": 0.0007557072971871178, "loss": 3.4847, "step": 28775 }, { "epoch": 1.955428726729175, "grad_norm": 1.8536708354949951, "learning_rate": 0.0007556648321782851, "loss": 3.773, "step": 28780 }, { "epoch": 1.955768446799837, "grad_norm": 2.26810359954834, "learning_rate": 0.0007556223671694525, "loss": 3.4959, "step": 28785 }, { "epoch": 1.9561081668704987, "grad_norm": 1.6657240390777588, "learning_rate": 0.0007555799021606197, "loss": 3.71, "step": 28790 }, { "epoch": 1.9564478869411603, "grad_norm": 2.4657132625579834, "learning_rate": 0.0007555374371517869, "loss": 3.5232, "step": 28795 }, { "epoch": 1.9567876070118224, "grad_norm": 1.6508173942565918, "learning_rate": 0.0007554949721429542, "loss": 3.486, "step": 28800 }, { "epoch": 1.957127327082484, "grad_norm": 2.0402984619140625, "learning_rate": 0.0007554525071341215, "loss": 3.3088, "step": 28805 }, { "epoch": 1.9574670471531457, "grad_norm": 2.0037219524383545, "learning_rate": 0.0007554100421252887, "loss": 3.5819, "step": 28810 }, { "epoch": 1.9578067672238075, "grad_norm": 2.2459774017333984, "learning_rate": 0.0007553675771164561, "loss": 3.5443, "step": 28815 }, { "epoch": 1.9581464872944694, "grad_norm": 1.9215900897979736, "learning_rate": 0.0007553251121076234, "loss": 3.5271, "step": 28820 }, { "epoch": 1.958486207365131, "grad_norm": 1.845320224761963, "learning_rate": 0.0007552826470987906, "loss": 3.4658, "step": 28825 }, { "epoch": 1.9588259274357929, "grad_norm": 2.4314420223236084, "learning_rate": 0.0007552401820899579, "loss": 3.4323, "step": 28830 }, { "epoch": 1.9591656475064547, "grad_norm": 2.533937931060791, "learning_rate": 0.0007551977170811252, "loss": 3.4279, "step": 28835 }, { "epoch": 1.9595053675771164, "grad_norm": 1.7066808938980103, "learning_rate": 0.000755163745074059, "loss": 3.6882, "step": 28840 }, { "epoch": 1.9598450876477782, "grad_norm": 1.779746413230896, "learning_rate": 0.0007551212800652263, "loss": 3.6634, "step": 28845 }, { "epoch": 1.96018480771844, "grad_norm": 2.6585474014282227, "learning_rate": 0.0007550788150563935, "loss": 3.5985, "step": 28850 }, { "epoch": 1.9605245277891017, "grad_norm": 1.5480992794036865, "learning_rate": 0.0007550363500475608, "loss": 3.5927, "step": 28855 }, { "epoch": 1.9608642478597635, "grad_norm": 1.9794684648513794, "learning_rate": 0.0007549938850387281, "loss": 3.6802, "step": 28860 }, { "epoch": 1.9612039679304254, "grad_norm": 1.6468297243118286, "learning_rate": 0.0007549514200298953, "loss": 3.6055, "step": 28865 }, { "epoch": 1.961543688001087, "grad_norm": 2.0960986614227295, "learning_rate": 0.0007549089550210628, "loss": 3.5162, "step": 28870 }, { "epoch": 1.9618834080717489, "grad_norm": 2.0043814182281494, "learning_rate": 0.00075486649001223, "loss": 3.7888, "step": 28875 }, { "epoch": 1.9622231281424107, "grad_norm": 1.5843803882598877, "learning_rate": 0.0007548240250033972, "loss": 3.5395, "step": 28880 }, { "epoch": 1.9625628482130724, "grad_norm": 2.1027615070343018, "learning_rate": 0.0007547815599945645, "loss": 3.3633, "step": 28885 }, { "epoch": 1.9629025682837342, "grad_norm": 1.8689805269241333, "learning_rate": 0.0007547390949857318, "loss": 3.5294, "step": 28890 }, { "epoch": 1.963242288354396, "grad_norm": 2.266254425048828, "learning_rate": 0.000754696629976899, "loss": 3.4579, "step": 28895 }, { "epoch": 1.9635820084250577, "grad_norm": 2.0531063079833984, "learning_rate": 0.0007546541649680663, "loss": 3.4223, "step": 28900 }, { "epoch": 1.9639217284957196, "grad_norm": 2.358427047729492, "learning_rate": 0.0007546116999592337, "loss": 3.8085, "step": 28905 }, { "epoch": 1.9642614485663814, "grad_norm": 1.8371251821517944, "learning_rate": 0.0007545692349504009, "loss": 3.3496, "step": 28910 }, { "epoch": 1.964601168637043, "grad_norm": 1.7883079051971436, "learning_rate": 0.0007545267699415682, "loss": 3.4696, "step": 28915 }, { "epoch": 1.964940888707705, "grad_norm": 1.664993405342102, "learning_rate": 0.0007544843049327355, "loss": 3.7512, "step": 28920 }, { "epoch": 1.9652806087783667, "grad_norm": 1.8590102195739746, "learning_rate": 0.0007544418399239027, "loss": 3.5092, "step": 28925 }, { "epoch": 1.9656203288490284, "grad_norm": 2.0494449138641357, "learning_rate": 0.00075439937491507, "loss": 3.5734, "step": 28930 }, { "epoch": 1.9659600489196902, "grad_norm": 1.7976523637771606, "learning_rate": 0.0007543569099062372, "loss": 3.4325, "step": 28935 }, { "epoch": 1.966299768990352, "grad_norm": 1.8390685319900513, "learning_rate": 0.0007543144448974046, "loss": 3.5535, "step": 28940 }, { "epoch": 1.9666394890610137, "grad_norm": 1.7072911262512207, "learning_rate": 0.0007542719798885719, "loss": 3.6305, "step": 28945 }, { "epoch": 1.9669792091316753, "grad_norm": 2.5659055709838867, "learning_rate": 0.0007542295148797391, "loss": 3.7947, "step": 28950 }, { "epoch": 1.9673189292023374, "grad_norm": 2.1801674365997314, "learning_rate": 0.0007541870498709064, "loss": 3.4549, "step": 28955 }, { "epoch": 1.967658649272999, "grad_norm": 2.4315953254699707, "learning_rate": 0.0007541445848620737, "loss": 3.372, "step": 28960 }, { "epoch": 1.9679983693436607, "grad_norm": 2.206204414367676, "learning_rate": 0.0007541021198532409, "loss": 3.8092, "step": 28965 }, { "epoch": 1.9683380894143228, "grad_norm": 2.453930377960205, "learning_rate": 0.0007540596548444081, "loss": 3.6236, "step": 28970 }, { "epoch": 1.9686778094849844, "grad_norm": 2.0712695121765137, "learning_rate": 0.0007540171898355756, "loss": 3.2923, "step": 28975 }, { "epoch": 1.969017529555646, "grad_norm": 1.7497748136520386, "learning_rate": 0.0007539747248267428, "loss": 3.403, "step": 28980 }, { "epoch": 1.9693572496263079, "grad_norm": 2.8111677169799805, "learning_rate": 0.00075393225981791, "loss": 3.4772, "step": 28985 }, { "epoch": 1.9696969696969697, "grad_norm": 1.8438998460769653, "learning_rate": 0.0007538897948090774, "loss": 3.4551, "step": 28990 }, { "epoch": 1.9700366897676314, "grad_norm": 1.7272573709487915, "learning_rate": 0.0007538473298002446, "loss": 3.3233, "step": 28995 }, { "epoch": 1.9703764098382932, "grad_norm": 1.8875010013580322, "learning_rate": 0.0007538048647914118, "loss": 3.5648, "step": 29000 }, { "epoch": 1.970716129908955, "grad_norm": 1.8527495861053467, "learning_rate": 0.0007537623997825792, "loss": 3.4976, "step": 29005 }, { "epoch": 1.9710558499796167, "grad_norm": 1.8503010272979736, "learning_rate": 0.0007537199347737465, "loss": 3.5788, "step": 29010 }, { "epoch": 1.9713955700502785, "grad_norm": 2.073554515838623, "learning_rate": 0.0007536774697649137, "loss": 3.585, "step": 29015 }, { "epoch": 1.9717352901209404, "grad_norm": 1.9698046445846558, "learning_rate": 0.0007536350047560811, "loss": 3.6884, "step": 29020 }, { "epoch": 1.972075010191602, "grad_norm": 2.0110578536987305, "learning_rate": 0.0007535925397472483, "loss": 3.6759, "step": 29025 }, { "epoch": 1.9724147302622639, "grad_norm": 2.4639840126037598, "learning_rate": 0.0007535500747384155, "loss": 3.7109, "step": 29030 }, { "epoch": 1.9727544503329257, "grad_norm": 2.2300455570220947, "learning_rate": 0.0007535076097295828, "loss": 3.5868, "step": 29035 }, { "epoch": 1.9730941704035874, "grad_norm": 1.899619460105896, "learning_rate": 0.0007534651447207501, "loss": 3.5952, "step": 29040 }, { "epoch": 1.9734338904742492, "grad_norm": 2.1253206729888916, "learning_rate": 0.0007534226797119174, "loss": 3.485, "step": 29045 }, { "epoch": 1.973773610544911, "grad_norm": 2.1081883907318115, "learning_rate": 0.0007533802147030847, "loss": 3.5874, "step": 29050 }, { "epoch": 1.9741133306155727, "grad_norm": 2.2414042949676514, "learning_rate": 0.000753337749694252, "loss": 3.825, "step": 29055 }, { "epoch": 1.9744530506862346, "grad_norm": 2.086024522781372, "learning_rate": 0.0007532952846854192, "loss": 3.528, "step": 29060 }, { "epoch": 1.9747927707568964, "grad_norm": 2.117297410964966, "learning_rate": 0.0007532528196765865, "loss": 3.8467, "step": 29065 }, { "epoch": 1.975132490827558, "grad_norm": 1.9390350580215454, "learning_rate": 0.0007532103546677538, "loss": 3.5151, "step": 29070 }, { "epoch": 1.97547221089822, "grad_norm": 2.083444595336914, "learning_rate": 0.000753167889658921, "loss": 3.5113, "step": 29075 }, { "epoch": 1.9758119309688817, "grad_norm": 2.0591847896575928, "learning_rate": 0.0007531254246500884, "loss": 3.4543, "step": 29080 }, { "epoch": 1.9761516510395434, "grad_norm": 2.303586483001709, "learning_rate": 0.0007530829596412556, "loss": 3.6503, "step": 29085 }, { "epoch": 1.9764913711102052, "grad_norm": 2.1299703121185303, "learning_rate": 0.0007530404946324229, "loss": 3.5595, "step": 29090 }, { "epoch": 1.976831091180867, "grad_norm": 1.7647058963775635, "learning_rate": 0.0007529980296235902, "loss": 3.6454, "step": 29095 }, { "epoch": 1.9771708112515287, "grad_norm": 1.7209886312484741, "learning_rate": 0.0007529555646147574, "loss": 3.3264, "step": 29100 }, { "epoch": 1.9775105313221906, "grad_norm": 1.4366106986999512, "learning_rate": 0.0007529130996059247, "loss": 3.753, "step": 29105 }, { "epoch": 1.9778502513928524, "grad_norm": 2.1331253051757812, "learning_rate": 0.000752870634597092, "loss": 3.3105, "step": 29110 }, { "epoch": 1.978189971463514, "grad_norm": 1.5480860471725464, "learning_rate": 0.0007528281695882593, "loss": 3.5469, "step": 29115 }, { "epoch": 1.9785296915341757, "grad_norm": 2.00034499168396, "learning_rate": 0.0007527857045794266, "loss": 3.6863, "step": 29120 }, { "epoch": 1.9788694116048378, "grad_norm": 2.313823699951172, "learning_rate": 0.0007527432395705939, "loss": 3.7543, "step": 29125 }, { "epoch": 1.9792091316754994, "grad_norm": 2.0292277336120605, "learning_rate": 0.0007527007745617611, "loss": 3.6525, "step": 29130 }, { "epoch": 1.979548851746161, "grad_norm": 2.079293966293335, "learning_rate": 0.0007526583095529283, "loss": 3.6867, "step": 29135 }, { "epoch": 1.979888571816823, "grad_norm": 2.3945350646972656, "learning_rate": 0.0007526158445440957, "loss": 3.3704, "step": 29140 }, { "epoch": 1.9802282918874847, "grad_norm": 1.5215321779251099, "learning_rate": 0.0007525733795352629, "loss": 3.5758, "step": 29145 }, { "epoch": 1.9805680119581464, "grad_norm": 1.9508931636810303, "learning_rate": 0.0007525309145264302, "loss": 3.5839, "step": 29150 }, { "epoch": 1.9809077320288082, "grad_norm": 2.1186399459838867, "learning_rate": 0.0007524884495175976, "loss": 3.4587, "step": 29155 }, { "epoch": 1.98124745209947, "grad_norm": 2.5662615299224854, "learning_rate": 0.0007524459845087648, "loss": 3.5458, "step": 29160 }, { "epoch": 1.9815871721701317, "grad_norm": 3.3621485233306885, "learning_rate": 0.000752403519499932, "loss": 3.3708, "step": 29165 }, { "epoch": 1.9819268922407935, "grad_norm": 2.346076488494873, "learning_rate": 0.0007523610544910994, "loss": 3.7321, "step": 29170 }, { "epoch": 1.9822666123114554, "grad_norm": 1.6483404636383057, "learning_rate": 0.0007523185894822666, "loss": 3.8035, "step": 29175 }, { "epoch": 1.982606332382117, "grad_norm": 1.8588306903839111, "learning_rate": 0.0007522761244734338, "loss": 3.4592, "step": 29180 }, { "epoch": 1.9829460524527789, "grad_norm": 1.7083041667938232, "learning_rate": 0.0007522336594646012, "loss": 3.678, "step": 29185 }, { "epoch": 1.9832857725234407, "grad_norm": 2.079293727874756, "learning_rate": 0.0007521911944557685, "loss": 3.7369, "step": 29190 }, { "epoch": 1.9836254925941024, "grad_norm": 2.321451187133789, "learning_rate": 0.0007521487294469357, "loss": 3.7104, "step": 29195 }, { "epoch": 1.9839652126647642, "grad_norm": 2.028963565826416, "learning_rate": 0.000752106264438103, "loss": 3.728, "step": 29200 }, { "epoch": 1.984304932735426, "grad_norm": 1.9774326086044312, "learning_rate": 0.0007520637994292703, "loss": 3.5383, "step": 29205 }, { "epoch": 1.9846446528060877, "grad_norm": 2.082972764968872, "learning_rate": 0.0007520213344204376, "loss": 3.3174, "step": 29210 }, { "epoch": 1.9849843728767496, "grad_norm": 2.259152412414551, "learning_rate": 0.0007519788694116048, "loss": 3.6707, "step": 29215 }, { "epoch": 1.9853240929474114, "grad_norm": 2.483609437942505, "learning_rate": 0.0007519364044027722, "loss": 3.3779, "step": 29220 }, { "epoch": 1.985663813018073, "grad_norm": 2.006988286972046, "learning_rate": 0.0007518939393939395, "loss": 3.5975, "step": 29225 }, { "epoch": 1.986003533088735, "grad_norm": 2.028993844985962, "learning_rate": 0.0007518514743851067, "loss": 3.5583, "step": 29230 }, { "epoch": 1.9863432531593967, "grad_norm": 1.920965313911438, "learning_rate": 0.0007518090093762739, "loss": 3.4424, "step": 29235 }, { "epoch": 1.9866829732300584, "grad_norm": 2.74080228805542, "learning_rate": 0.0007517665443674413, "loss": 3.4523, "step": 29240 }, { "epoch": 1.9870226933007202, "grad_norm": 2.2325186729431152, "learning_rate": 0.0007517240793586085, "loss": 3.6599, "step": 29245 }, { "epoch": 1.987362413371382, "grad_norm": 1.9820371866226196, "learning_rate": 0.0007516816143497757, "loss": 3.5953, "step": 29250 }, { "epoch": 1.9877021334420437, "grad_norm": 2.734900712966919, "learning_rate": 0.0007516391493409432, "loss": 3.5302, "step": 29255 }, { "epoch": 1.9880418535127056, "grad_norm": 1.8925930261611938, "learning_rate": 0.0007515966843321104, "loss": 3.73, "step": 29260 }, { "epoch": 1.9883815735833674, "grad_norm": 2.1075966358184814, "learning_rate": 0.0007515542193232776, "loss": 3.372, "step": 29265 }, { "epoch": 1.988721293654029, "grad_norm": 1.9788633584976196, "learning_rate": 0.000751511754314445, "loss": 3.5478, "step": 29270 }, { "epoch": 1.989061013724691, "grad_norm": 1.9766944646835327, "learning_rate": 0.0007514692893056122, "loss": 3.3013, "step": 29275 }, { "epoch": 1.9894007337953528, "grad_norm": 1.5793731212615967, "learning_rate": 0.0007514268242967794, "loss": 3.5119, "step": 29280 }, { "epoch": 1.9897404538660144, "grad_norm": 2.747837543487549, "learning_rate": 0.0007513843592879468, "loss": 3.619, "step": 29285 }, { "epoch": 1.990080173936676, "grad_norm": 1.672873616218567, "learning_rate": 0.0007513418942791141, "loss": 3.4901, "step": 29290 }, { "epoch": 1.990419894007338, "grad_norm": 1.706365942955017, "learning_rate": 0.0007512994292702813, "loss": 3.5481, "step": 29295 }, { "epoch": 1.9907596140779997, "grad_norm": 1.9169540405273438, "learning_rate": 0.0007512569642614486, "loss": 3.7965, "step": 29300 }, { "epoch": 1.9910993341486614, "grad_norm": 1.736554503440857, "learning_rate": 0.0007512144992526159, "loss": 3.4583, "step": 29305 }, { "epoch": 1.9914390542193234, "grad_norm": 1.5431687831878662, "learning_rate": 0.0007511720342437831, "loss": 3.5551, "step": 29310 }, { "epoch": 1.991778774289985, "grad_norm": 1.7761691808700562, "learning_rate": 0.0007511295692349504, "loss": 3.4166, "step": 29315 }, { "epoch": 1.9921184943606467, "grad_norm": 1.557693362236023, "learning_rate": 0.0007510871042261178, "loss": 3.5333, "step": 29320 }, { "epoch": 1.9924582144313085, "grad_norm": 2.0925211906433105, "learning_rate": 0.000751044639217285, "loss": 3.6471, "step": 29325 }, { "epoch": 1.9927979345019704, "grad_norm": 1.6572637557983398, "learning_rate": 0.0007510021742084523, "loss": 3.3363, "step": 29330 }, { "epoch": 1.993137654572632, "grad_norm": 2.0694658756256104, "learning_rate": 0.0007509597091996195, "loss": 3.5285, "step": 29335 }, { "epoch": 1.9934773746432939, "grad_norm": 1.957450270652771, "learning_rate": 0.0007509172441907868, "loss": 3.7825, "step": 29340 }, { "epoch": 1.9938170947139557, "grad_norm": 2.2885327339172363, "learning_rate": 0.0007508747791819541, "loss": 3.3117, "step": 29345 }, { "epoch": 1.9941568147846174, "grad_norm": 1.803332805633545, "learning_rate": 0.0007508323141731213, "loss": 3.8073, "step": 29350 }, { "epoch": 1.9944965348552792, "grad_norm": 1.8016422986984253, "learning_rate": 0.0007507898491642887, "loss": 3.4062, "step": 29355 }, { "epoch": 1.994836254925941, "grad_norm": 3.1084139347076416, "learning_rate": 0.000750747384155456, "loss": 3.7299, "step": 29360 }, { "epoch": 1.9951759749966027, "grad_norm": 2.024219274520874, "learning_rate": 0.0007507049191466232, "loss": 3.5673, "step": 29365 }, { "epoch": 1.9955156950672646, "grad_norm": 1.9923977851867676, "learning_rate": 0.0007506624541377904, "loss": 3.607, "step": 29370 }, { "epoch": 1.9958554151379264, "grad_norm": 2.1676149368286133, "learning_rate": 0.0007506199891289578, "loss": 3.5724, "step": 29375 }, { "epoch": 1.996195135208588, "grad_norm": 1.6653499603271484, "learning_rate": 0.000750577524120125, "loss": 3.6889, "step": 29380 }, { "epoch": 1.99653485527925, "grad_norm": 2.5445237159729004, "learning_rate": 0.0007505350591112922, "loss": 3.588, "step": 29385 }, { "epoch": 1.9968745753499118, "grad_norm": 1.9592434167861938, "learning_rate": 0.0007504925941024597, "loss": 3.458, "step": 29390 }, { "epoch": 1.9972142954205734, "grad_norm": 1.5625184774398804, "learning_rate": 0.0007504501290936269, "loss": 3.6298, "step": 29395 }, { "epoch": 1.9975540154912352, "grad_norm": 2.089862585067749, "learning_rate": 0.0007504076640847941, "loss": 3.7715, "step": 29400 }, { "epoch": 1.997893735561897, "grad_norm": 1.7860420942306519, "learning_rate": 0.0007503651990759615, "loss": 3.654, "step": 29405 }, { "epoch": 1.9982334556325587, "grad_norm": 1.9726300239562988, "learning_rate": 0.0007503227340671287, "loss": 3.5819, "step": 29410 }, { "epoch": 1.9985731757032206, "grad_norm": 2.0497801303863525, "learning_rate": 0.0007502802690582959, "loss": 3.4538, "step": 29415 }, { "epoch": 1.9989128957738824, "grad_norm": 2.3045904636383057, "learning_rate": 0.0007502378040494632, "loss": 3.7284, "step": 29420 }, { "epoch": 1.999252615844544, "grad_norm": 1.7268067598342896, "learning_rate": 0.0007501953390406306, "loss": 3.5394, "step": 29425 }, { "epoch": 1.999592335915206, "grad_norm": 1.8649452924728394, "learning_rate": 0.0007501528740317978, "loss": 3.5404, "step": 29430 }, { "epoch": 1.9999320559858678, "grad_norm": 1.583579659461975, "learning_rate": 0.0007501104090229651, "loss": 3.5967, "step": 29435 }, { "epoch": 2.0, "eval_bertscore": { "f1": 0.8449352047438538, "precision": 0.8532625713694036, "recall": 0.8376309546184512 }, "eval_bleu_4": 0.010369407790170787, "eval_exact_match": 0.0, "eval_loss": 3.4991793632507324, "eval_meteor": 0.09256322562154896, "eval_rouge": { "rouge1": 0.12752456629783704, "rouge2": 0.017377031301040277, "rougeL": 0.10814874258904114, "rougeLsum": 0.10820075426166936 }, "eval_runtime": 969.1615, "eval_samples_per_second": 10.647, "eval_steps_per_second": 1.331, "step": 29436 }, { "epoch": 2.0002717760565294, "grad_norm": 2.354802370071411, "learning_rate": 0.0007500679440141324, "loss": 3.5122, "step": 29440 }, { "epoch": 2.000611496127191, "grad_norm": 1.6462907791137695, "learning_rate": 0.0007500254790052996, "loss": 3.4816, "step": 29445 }, { "epoch": 2.000951216197853, "grad_norm": 1.5473395586013794, "learning_rate": 0.0007499830139964669, "loss": 3.4656, "step": 29450 }, { "epoch": 2.0012909362685147, "grad_norm": 1.7334367036819458, "learning_rate": 0.0007499405489876342, "loss": 3.4931, "step": 29455 }, { "epoch": 2.0016306563391764, "grad_norm": 2.5015969276428223, "learning_rate": 0.0007498980839788015, "loss": 3.4255, "step": 29460 }, { "epoch": 2.0019703764098384, "grad_norm": 2.563007354736328, "learning_rate": 0.0007498556189699688, "loss": 3.3199, "step": 29465 }, { "epoch": 2.0023100964805, "grad_norm": 2.4323694705963135, "learning_rate": 0.000749813153961136, "loss": 3.4118, "step": 29470 }, { "epoch": 2.0026498165511617, "grad_norm": 2.188671350479126, "learning_rate": 0.0007497706889523033, "loss": 3.4047, "step": 29475 }, { "epoch": 2.0029895366218238, "grad_norm": 2.1609208583831787, "learning_rate": 0.0007497282239434706, "loss": 3.4226, "step": 29480 }, { "epoch": 2.0033292566924854, "grad_norm": 2.0111353397369385, "learning_rate": 0.0007496857589346378, "loss": 3.4175, "step": 29485 }, { "epoch": 2.003668976763147, "grad_norm": 1.9254322052001953, "learning_rate": 0.0007496432939258051, "loss": 3.6181, "step": 29490 }, { "epoch": 2.004008696833809, "grad_norm": 1.6120940446853638, "learning_rate": 0.0007496008289169725, "loss": 3.5989, "step": 29495 }, { "epoch": 2.0043484169044707, "grad_norm": 1.9713140726089478, "learning_rate": 0.0007495583639081397, "loss": 3.3294, "step": 29500 }, { "epoch": 2.0046881369751324, "grad_norm": 2.6405980587005615, "learning_rate": 0.000749515898899307, "loss": 3.6283, "step": 29505 }, { "epoch": 2.0050278570457944, "grad_norm": 2.0683741569519043, "learning_rate": 0.0007494734338904743, "loss": 3.6904, "step": 29510 }, { "epoch": 2.005367577116456, "grad_norm": 1.7828086614608765, "learning_rate": 0.0007494309688816415, "loss": 3.5345, "step": 29515 }, { "epoch": 2.0057072971871177, "grad_norm": 4.0340800285339355, "learning_rate": 0.0007493885038728087, "loss": 3.3618, "step": 29520 }, { "epoch": 2.00604701725778, "grad_norm": 2.2615630626678467, "learning_rate": 0.0007493460388639761, "loss": 3.5634, "step": 29525 }, { "epoch": 2.0063867373284414, "grad_norm": 1.651471734046936, "learning_rate": 0.0007493035738551434, "loss": 3.4552, "step": 29530 }, { "epoch": 2.006726457399103, "grad_norm": 2.094200372695923, "learning_rate": 0.0007492611088463106, "loss": 3.7104, "step": 29535 }, { "epoch": 2.007066177469765, "grad_norm": 1.7657393217086792, "learning_rate": 0.000749218643837478, "loss": 3.6542, "step": 29540 }, { "epoch": 2.0074058975404268, "grad_norm": 2.0444278717041016, "learning_rate": 0.0007491761788286452, "loss": 3.7896, "step": 29545 }, { "epoch": 2.0077456176110884, "grad_norm": 2.592200756072998, "learning_rate": 0.0007491337138198124, "loss": 3.0516, "step": 29550 }, { "epoch": 2.0080853376817505, "grad_norm": 2.028357744216919, "learning_rate": 0.0007490912488109798, "loss": 3.5472, "step": 29555 }, { "epoch": 2.008425057752412, "grad_norm": 2.2153687477111816, "learning_rate": 0.000749048783802147, "loss": 3.2113, "step": 29560 }, { "epoch": 2.0087647778230737, "grad_norm": 1.9959059953689575, "learning_rate": 0.0007490063187933144, "loss": 3.4543, "step": 29565 }, { "epoch": 2.0091044978937354, "grad_norm": 1.9414331912994385, "learning_rate": 0.0007489638537844816, "loss": 3.6291, "step": 29570 }, { "epoch": 2.0094442179643974, "grad_norm": 1.751476764678955, "learning_rate": 0.0007489213887756489, "loss": 3.5563, "step": 29575 }, { "epoch": 2.009783938035059, "grad_norm": 1.9295953512191772, "learning_rate": 0.0007488789237668162, "loss": 3.6631, "step": 29580 }, { "epoch": 2.0101236581057207, "grad_norm": 1.668055534362793, "learning_rate": 0.0007488364587579834, "loss": 3.4817, "step": 29585 }, { "epoch": 2.0104633781763828, "grad_norm": 2.097961902618408, "learning_rate": 0.0007487939937491507, "loss": 3.5193, "step": 29590 }, { "epoch": 2.0108030982470444, "grad_norm": 2.544877052307129, "learning_rate": 0.000748751528740318, "loss": 3.4933, "step": 29595 }, { "epoch": 2.011142818317706, "grad_norm": 1.9485348463058472, "learning_rate": 0.0007487090637314853, "loss": 3.3115, "step": 29600 }, { "epoch": 2.011482538388368, "grad_norm": 2.025362253189087, "learning_rate": 0.0007486665987226526, "loss": 3.3682, "step": 29605 }, { "epoch": 2.0118222584590297, "grad_norm": 1.7866382598876953, "learning_rate": 0.0007486241337138199, "loss": 3.4085, "step": 29610 }, { "epoch": 2.0121619785296914, "grad_norm": 2.0620009899139404, "learning_rate": 0.0007485816687049871, "loss": 3.6147, "step": 29615 }, { "epoch": 2.0125016986003534, "grad_norm": 2.4003982543945312, "learning_rate": 0.0007485392036961543, "loss": 3.545, "step": 29620 }, { "epoch": 2.012841418671015, "grad_norm": 1.9838953018188477, "learning_rate": 0.0007484967386873217, "loss": 3.4185, "step": 29625 }, { "epoch": 2.0131811387416767, "grad_norm": 2.1932272911071777, "learning_rate": 0.0007484542736784889, "loss": 3.5698, "step": 29630 }, { "epoch": 2.0135208588123388, "grad_norm": 1.7268091440200806, "learning_rate": 0.0007484118086696562, "loss": 3.4193, "step": 29635 }, { "epoch": 2.0138605788830004, "grad_norm": 2.146329402923584, "learning_rate": 0.0007483693436608236, "loss": 3.2768, "step": 29640 }, { "epoch": 2.014200298953662, "grad_norm": 1.6625173091888428, "learning_rate": 0.0007483268786519908, "loss": 3.3873, "step": 29645 }, { "epoch": 2.014540019024324, "grad_norm": 1.844943881034851, "learning_rate": 0.000748284413643158, "loss": 3.4758, "step": 29650 }, { "epoch": 2.0148797390949857, "grad_norm": 2.403050422668457, "learning_rate": 0.0007482419486343254, "loss": 3.7577, "step": 29655 }, { "epoch": 2.0152194591656474, "grad_norm": 1.6082336902618408, "learning_rate": 0.0007481994836254926, "loss": 3.5859, "step": 29660 }, { "epoch": 2.0155591792363095, "grad_norm": 1.7785990238189697, "learning_rate": 0.0007481570186166598, "loss": 3.515, "step": 29665 }, { "epoch": 2.015898899306971, "grad_norm": 2.2215940952301025, "learning_rate": 0.0007481145536078273, "loss": 3.3976, "step": 29670 }, { "epoch": 2.0162386193776327, "grad_norm": 2.204834461212158, "learning_rate": 0.0007480720885989945, "loss": 3.1716, "step": 29675 }, { "epoch": 2.016578339448295, "grad_norm": 2.1441543102264404, "learning_rate": 0.0007480296235901617, "loss": 3.8233, "step": 29680 }, { "epoch": 2.0169180595189564, "grad_norm": 2.0585436820983887, "learning_rate": 0.000747987158581329, "loss": 3.4097, "step": 29685 }, { "epoch": 2.017257779589618, "grad_norm": 1.7025636434555054, "learning_rate": 0.0007479446935724963, "loss": 3.6538, "step": 29690 }, { "epoch": 2.01759749966028, "grad_norm": 1.5264040231704712, "learning_rate": 0.0007479022285636635, "loss": 3.6236, "step": 29695 }, { "epoch": 2.0179372197309418, "grad_norm": 2.350144386291504, "learning_rate": 0.0007478597635548308, "loss": 3.5273, "step": 29700 }, { "epoch": 2.0182769398016034, "grad_norm": 1.7705981731414795, "learning_rate": 0.0007478172985459982, "loss": 3.385, "step": 29705 }, { "epoch": 2.0186166598722655, "grad_norm": 1.961063265800476, "learning_rate": 0.0007477748335371654, "loss": 3.5591, "step": 29710 }, { "epoch": 2.018956379942927, "grad_norm": 2.2715280055999756, "learning_rate": 0.0007477323685283327, "loss": 3.5392, "step": 29715 }, { "epoch": 2.0192961000135887, "grad_norm": 1.7572439908981323, "learning_rate": 0.0007476899035194999, "loss": 3.2691, "step": 29720 }, { "epoch": 2.0196358200842504, "grad_norm": 1.9276973009109497, "learning_rate": 0.0007476474385106672, "loss": 3.4729, "step": 29725 }, { "epoch": 2.0199755401549124, "grad_norm": 2.2530107498168945, "learning_rate": 0.0007476049735018345, "loss": 3.2129, "step": 29730 }, { "epoch": 2.020315260225574, "grad_norm": 2.244389295578003, "learning_rate": 0.0007475625084930017, "loss": 3.3986, "step": 29735 }, { "epoch": 2.0206549802962357, "grad_norm": 1.8687806129455566, "learning_rate": 0.0007475200434841691, "loss": 3.4091, "step": 29740 }, { "epoch": 2.0209947003668978, "grad_norm": 2.7104263305664062, "learning_rate": 0.0007474775784753364, "loss": 3.3728, "step": 29745 }, { "epoch": 2.0213344204375594, "grad_norm": 1.9739253520965576, "learning_rate": 0.0007474351134665036, "loss": 3.4725, "step": 29750 }, { "epoch": 2.021674140508221, "grad_norm": 1.7036128044128418, "learning_rate": 0.0007473926484576708, "loss": 3.5157, "step": 29755 }, { "epoch": 2.022013860578883, "grad_norm": 1.7086161375045776, "learning_rate": 0.0007473501834488382, "loss": 3.7046, "step": 29760 }, { "epoch": 2.0223535806495447, "grad_norm": 1.8089094161987305, "learning_rate": 0.0007473077184400054, "loss": 3.4686, "step": 29765 }, { "epoch": 2.0226933007202064, "grad_norm": 2.0740582942962646, "learning_rate": 0.0007472652534311726, "loss": 3.381, "step": 29770 }, { "epoch": 2.0230330207908684, "grad_norm": 1.906786322593689, "learning_rate": 0.0007472227884223401, "loss": 3.4646, "step": 29775 }, { "epoch": 2.02337274086153, "grad_norm": 2.430172920227051, "learning_rate": 0.0007471803234135073, "loss": 3.6084, "step": 29780 }, { "epoch": 2.0237124609321917, "grad_norm": 1.7714914083480835, "learning_rate": 0.0007471378584046745, "loss": 3.5173, "step": 29785 }, { "epoch": 2.024052181002854, "grad_norm": 1.7662431001663208, "learning_rate": 0.0007470953933958419, "loss": 3.6106, "step": 29790 }, { "epoch": 2.0243919010735154, "grad_norm": 2.569659948348999, "learning_rate": 0.0007470529283870091, "loss": 3.4312, "step": 29795 }, { "epoch": 2.024731621144177, "grad_norm": 1.469433069229126, "learning_rate": 0.0007470104633781763, "loss": 3.8217, "step": 29800 }, { "epoch": 2.025071341214839, "grad_norm": 2.3980469703674316, "learning_rate": 0.0007469679983693437, "loss": 3.6344, "step": 29805 }, { "epoch": 2.0254110612855007, "grad_norm": 1.8682554960250854, "learning_rate": 0.000746925533360511, "loss": 3.6461, "step": 29810 }, { "epoch": 2.0257507813561624, "grad_norm": 2.087423324584961, "learning_rate": 0.0007468830683516782, "loss": 3.5544, "step": 29815 }, { "epoch": 2.0260905014268245, "grad_norm": 2.0079150199890137, "learning_rate": 0.0007468406033428455, "loss": 3.3872, "step": 29820 }, { "epoch": 2.026430221497486, "grad_norm": 2.1661598682403564, "learning_rate": 0.0007467981383340128, "loss": 3.5118, "step": 29825 }, { "epoch": 2.0267699415681477, "grad_norm": 1.9868141412734985, "learning_rate": 0.00074675567332518, "loss": 3.3161, "step": 29830 }, { "epoch": 2.02710966163881, "grad_norm": 2.170015335083008, "learning_rate": 0.0007467132083163473, "loss": 3.308, "step": 29835 }, { "epoch": 2.0274493817094714, "grad_norm": 2.4898862838745117, "learning_rate": 0.0007466707433075146, "loss": 3.729, "step": 29840 }, { "epoch": 2.027789101780133, "grad_norm": 1.835547924041748, "learning_rate": 0.0007466282782986819, "loss": 3.2913, "step": 29845 }, { "epoch": 2.028128821850795, "grad_norm": 1.7968323230743408, "learning_rate": 0.0007465858132898492, "loss": 3.483, "step": 29850 }, { "epoch": 2.0284685419214568, "grad_norm": 2.053640365600586, "learning_rate": 0.0007465433482810165, "loss": 3.6013, "step": 29855 }, { "epoch": 2.0288082619921184, "grad_norm": 1.6038769483566284, "learning_rate": 0.0007465008832721837, "loss": 3.4701, "step": 29860 }, { "epoch": 2.0291479820627805, "grad_norm": 2.5497143268585205, "learning_rate": 0.000746458418263351, "loss": 3.6308, "step": 29865 }, { "epoch": 2.029487702133442, "grad_norm": 2.6271626949310303, "learning_rate": 0.0007464159532545182, "loss": 3.5757, "step": 29870 }, { "epoch": 2.0298274222041037, "grad_norm": 1.719165563583374, "learning_rate": 0.0007463734882456855, "loss": 3.6789, "step": 29875 }, { "epoch": 2.030167142274766, "grad_norm": 2.4561519622802734, "learning_rate": 0.0007463310232368529, "loss": 3.5974, "step": 29880 }, { "epoch": 2.0305068623454274, "grad_norm": 2.131194829940796, "learning_rate": 0.0007462885582280201, "loss": 3.5224, "step": 29885 }, { "epoch": 2.030846582416089, "grad_norm": 1.8053797483444214, "learning_rate": 0.0007462460932191874, "loss": 3.5391, "step": 29890 }, { "epoch": 2.031186302486751, "grad_norm": 2.501491069793701, "learning_rate": 0.0007462036282103547, "loss": 3.2961, "step": 29895 }, { "epoch": 2.0315260225574128, "grad_norm": 2.0607926845550537, "learning_rate": 0.0007461611632015219, "loss": 3.1755, "step": 29900 }, { "epoch": 2.0318657426280744, "grad_norm": 1.9378175735473633, "learning_rate": 0.0007461186981926893, "loss": 3.7298, "step": 29905 }, { "epoch": 2.032205462698736, "grad_norm": 1.566620111465454, "learning_rate": 0.0007460762331838566, "loss": 3.6276, "step": 29910 }, { "epoch": 2.032545182769398, "grad_norm": 2.1024813652038574, "learning_rate": 0.0007460337681750238, "loss": 3.5312, "step": 29915 }, { "epoch": 2.0328849028400597, "grad_norm": 1.8818049430847168, "learning_rate": 0.0007459913031661911, "loss": 3.3979, "step": 29920 }, { "epoch": 2.0332246229107214, "grad_norm": 2.11055850982666, "learning_rate": 0.0007459488381573584, "loss": 3.5777, "step": 29925 }, { "epoch": 2.0335643429813834, "grad_norm": 1.7475764751434326, "learning_rate": 0.0007459063731485256, "loss": 3.5523, "step": 29930 }, { "epoch": 2.033904063052045, "grad_norm": 2.3801770210266113, "learning_rate": 0.0007458639081396929, "loss": 3.3013, "step": 29935 }, { "epoch": 2.0342437831227067, "grad_norm": 2.0008373260498047, "learning_rate": 0.0007458214431308602, "loss": 3.684, "step": 29940 }, { "epoch": 2.034583503193369, "grad_norm": 2.1501612663269043, "learning_rate": 0.0007457789781220275, "loss": 3.4031, "step": 29945 }, { "epoch": 2.0349232232640304, "grad_norm": 1.5680617094039917, "learning_rate": 0.0007457365131131948, "loss": 3.3977, "step": 29950 }, { "epoch": 2.035262943334692, "grad_norm": 2.2577691078186035, "learning_rate": 0.000745694048104362, "loss": 3.5255, "step": 29955 }, { "epoch": 2.035602663405354, "grad_norm": 1.784816026687622, "learning_rate": 0.0007456515830955293, "loss": 3.3524, "step": 29960 }, { "epoch": 2.0359423834760157, "grad_norm": 1.8621132373809814, "learning_rate": 0.0007456091180866966, "loss": 3.3727, "step": 29965 }, { "epoch": 2.0362821035466774, "grad_norm": 2.736624240875244, "learning_rate": 0.0007455666530778638, "loss": 3.5956, "step": 29970 }, { "epoch": 2.0366218236173395, "grad_norm": 1.7124147415161133, "learning_rate": 0.0007455241880690311, "loss": 3.5352, "step": 29975 }, { "epoch": 2.036961543688001, "grad_norm": 2.4186031818389893, "learning_rate": 0.0007454817230601985, "loss": 3.4291, "step": 29980 }, { "epoch": 2.0373012637586627, "grad_norm": 2.019585132598877, "learning_rate": 0.0007454392580513657, "loss": 3.422, "step": 29985 }, { "epoch": 2.037640983829325, "grad_norm": 1.9460220336914062, "learning_rate": 0.000745396793042533, "loss": 3.6516, "step": 29990 }, { "epoch": 2.0379807038999864, "grad_norm": 1.6360708475112915, "learning_rate": 0.0007453543280337003, "loss": 3.7406, "step": 29995 }, { "epoch": 2.038320423970648, "grad_norm": 1.786370038986206, "learning_rate": 0.0007453118630248675, "loss": 3.6503, "step": 30000 }, { "epoch": 2.03866014404131, "grad_norm": 1.8348329067230225, "learning_rate": 0.0007452693980160347, "loss": 3.5836, "step": 30005 }, { "epoch": 2.0389998641119718, "grad_norm": 1.7562785148620605, "learning_rate": 0.0007452269330072021, "loss": 3.6883, "step": 30010 }, { "epoch": 2.0393395841826334, "grad_norm": 1.8996809720993042, "learning_rate": 0.0007451844679983694, "loss": 3.7403, "step": 30015 }, { "epoch": 2.0396793042532955, "grad_norm": 1.6671456098556519, "learning_rate": 0.0007451420029895366, "loss": 3.5412, "step": 30020 }, { "epoch": 2.040019024323957, "grad_norm": 1.9128341674804688, "learning_rate": 0.000745099537980704, "loss": 3.5556, "step": 30025 }, { "epoch": 2.0403587443946187, "grad_norm": 1.6985582113265991, "learning_rate": 0.0007450570729718712, "loss": 3.5131, "step": 30030 }, { "epoch": 2.040698464465281, "grad_norm": 2.2000842094421387, "learning_rate": 0.0007450146079630384, "loss": 3.5825, "step": 30035 }, { "epoch": 2.0410381845359424, "grad_norm": 2.4364421367645264, "learning_rate": 0.0007449721429542058, "loss": 3.4365, "step": 30040 }, { "epoch": 2.041377904606604, "grad_norm": 1.8586729764938354, "learning_rate": 0.000744929677945373, "loss": 3.4478, "step": 30045 }, { "epoch": 2.041717624677266, "grad_norm": 2.150681495666504, "learning_rate": 0.0007448872129365403, "loss": 3.4239, "step": 30050 }, { "epoch": 2.0420573447479278, "grad_norm": 1.8546961545944214, "learning_rate": 0.0007448447479277077, "loss": 3.2423, "step": 30055 }, { "epoch": 2.0423970648185894, "grad_norm": 2.361347198486328, "learning_rate": 0.0007448022829188749, "loss": 3.5732, "step": 30060 }, { "epoch": 2.042736784889251, "grad_norm": 1.9771698713302612, "learning_rate": 0.0007447598179100421, "loss": 3.375, "step": 30065 }, { "epoch": 2.043076504959913, "grad_norm": 1.600304365158081, "learning_rate": 0.0007447173529012094, "loss": 3.7342, "step": 30070 }, { "epoch": 2.0434162250305747, "grad_norm": 1.8773279190063477, "learning_rate": 0.0007446748878923767, "loss": 3.372, "step": 30075 }, { "epoch": 2.0437559451012364, "grad_norm": 1.8336410522460938, "learning_rate": 0.0007446324228835439, "loss": 3.4258, "step": 30080 }, { "epoch": 2.0440956651718984, "grad_norm": 1.8946192264556885, "learning_rate": 0.0007445899578747113, "loss": 3.564, "step": 30085 }, { "epoch": 2.04443538524256, "grad_norm": 1.4984006881713867, "learning_rate": 0.0007445474928658786, "loss": 3.5118, "step": 30090 }, { "epoch": 2.0447751053132217, "grad_norm": 1.621522068977356, "learning_rate": 0.0007445050278570458, "loss": 3.3614, "step": 30095 }, { "epoch": 2.045114825383884, "grad_norm": 2.2369384765625, "learning_rate": 0.0007444625628482131, "loss": 3.4763, "step": 30100 }, { "epoch": 2.0454545454545454, "grad_norm": 1.8803410530090332, "learning_rate": 0.0007444200978393803, "loss": 3.5092, "step": 30105 }, { "epoch": 2.045794265525207, "grad_norm": 2.2968952655792236, "learning_rate": 0.0007443776328305476, "loss": 3.4234, "step": 30110 }, { "epoch": 2.046133985595869, "grad_norm": 1.6747807264328003, "learning_rate": 0.0007443351678217149, "loss": 3.6795, "step": 30115 }, { "epoch": 2.0464737056665308, "grad_norm": 1.9534659385681152, "learning_rate": 0.0007442927028128822, "loss": 3.4568, "step": 30120 }, { "epoch": 2.0468134257371924, "grad_norm": 1.9038405418395996, "learning_rate": 0.0007442502378040495, "loss": 3.496, "step": 30125 }, { "epoch": 2.0471531458078545, "grad_norm": 1.8665223121643066, "learning_rate": 0.0007442077727952168, "loss": 3.4741, "step": 30130 }, { "epoch": 2.047492865878516, "grad_norm": 1.7198312282562256, "learning_rate": 0.000744165307786384, "loss": 3.3162, "step": 30135 }, { "epoch": 2.0478325859491777, "grad_norm": 1.7000908851623535, "learning_rate": 0.0007441228427775513, "loss": 3.6794, "step": 30140 }, { "epoch": 2.04817230601984, "grad_norm": 2.3662357330322266, "learning_rate": 0.0007440803777687186, "loss": 3.6027, "step": 30145 }, { "epoch": 2.0485120260905014, "grad_norm": 2.09205961227417, "learning_rate": 0.0007440379127598858, "loss": 3.4385, "step": 30150 }, { "epoch": 2.048851746161163, "grad_norm": 1.8276464939117432, "learning_rate": 0.0007439954477510531, "loss": 3.3897, "step": 30155 }, { "epoch": 2.049191466231825, "grad_norm": 1.8453526496887207, "learning_rate": 0.0007439529827422205, "loss": 3.5359, "step": 30160 }, { "epoch": 2.0495311863024868, "grad_norm": 1.7875738143920898, "learning_rate": 0.0007439105177333877, "loss": 3.4237, "step": 30165 }, { "epoch": 2.0498709063731484, "grad_norm": 1.9680275917053223, "learning_rate": 0.0007438680527245549, "loss": 3.4375, "step": 30170 }, { "epoch": 2.0502106264438105, "grad_norm": 1.8498371839523315, "learning_rate": 0.0007438255877157223, "loss": 3.4944, "step": 30175 }, { "epoch": 2.050550346514472, "grad_norm": 1.659705638885498, "learning_rate": 0.0007437831227068895, "loss": 3.6156, "step": 30180 }, { "epoch": 2.0508900665851337, "grad_norm": 2.571960926055908, "learning_rate": 0.0007437406576980567, "loss": 3.3515, "step": 30185 }, { "epoch": 2.051229786655796, "grad_norm": 1.8847464323043823, "learning_rate": 0.0007436981926892242, "loss": 3.3827, "step": 30190 }, { "epoch": 2.0515695067264574, "grad_norm": 2.085775375366211, "learning_rate": 0.0007436557276803914, "loss": 3.2667, "step": 30195 }, { "epoch": 2.051909226797119, "grad_norm": 2.281207323074341, "learning_rate": 0.0007436132626715586, "loss": 3.752, "step": 30200 }, { "epoch": 2.052248946867781, "grad_norm": 1.4367952346801758, "learning_rate": 0.000743570797662726, "loss": 3.4961, "step": 30205 }, { "epoch": 2.0525886669384428, "grad_norm": 2.650442600250244, "learning_rate": 0.0007435283326538932, "loss": 3.5171, "step": 30210 }, { "epoch": 2.0529283870091044, "grad_norm": 2.0357344150543213, "learning_rate": 0.0007434858676450604, "loss": 3.5518, "step": 30215 }, { "epoch": 2.0532681070797665, "grad_norm": 1.6430822610855103, "learning_rate": 0.0007434434026362277, "loss": 3.6812, "step": 30220 }, { "epoch": 2.053607827150428, "grad_norm": 1.5442622900009155, "learning_rate": 0.0007434009376273951, "loss": 3.1952, "step": 30225 }, { "epoch": 2.0539475472210897, "grad_norm": 2.2859456539154053, "learning_rate": 0.0007433584726185623, "loss": 3.3098, "step": 30230 }, { "epoch": 2.054287267291752, "grad_norm": 1.9817028045654297, "learning_rate": 0.0007433160076097296, "loss": 3.613, "step": 30235 }, { "epoch": 2.0546269873624134, "grad_norm": 1.820459246635437, "learning_rate": 0.0007432735426008969, "loss": 3.4296, "step": 30240 }, { "epoch": 2.054966707433075, "grad_norm": 1.8120876550674438, "learning_rate": 0.0007432310775920642, "loss": 3.588, "step": 30245 }, { "epoch": 2.0553064275037367, "grad_norm": 1.7695478200912476, "learning_rate": 0.0007431886125832314, "loss": 3.5134, "step": 30250 }, { "epoch": 2.055646147574399, "grad_norm": 1.9321861267089844, "learning_rate": 0.0007431461475743986, "loss": 3.7359, "step": 30255 }, { "epoch": 2.0559858676450604, "grad_norm": 1.8134777545928955, "learning_rate": 0.0007431036825655661, "loss": 3.8141, "step": 30260 }, { "epoch": 2.056325587715722, "grad_norm": 1.8394792079925537, "learning_rate": 0.0007430612175567333, "loss": 3.645, "step": 30265 }, { "epoch": 2.056665307786384, "grad_norm": 1.9357569217681885, "learning_rate": 0.0007430187525479005, "loss": 3.3042, "step": 30270 }, { "epoch": 2.0570050278570458, "grad_norm": 1.9597339630126953, "learning_rate": 0.0007429762875390679, "loss": 3.5907, "step": 30275 }, { "epoch": 2.0573447479277074, "grad_norm": 2.6538360118865967, "learning_rate": 0.0007429338225302351, "loss": 3.2477, "step": 30280 }, { "epoch": 2.0576844679983695, "grad_norm": 1.9859758615493774, "learning_rate": 0.0007428913575214023, "loss": 3.5743, "step": 30285 }, { "epoch": 2.058024188069031, "grad_norm": 1.859808325767517, "learning_rate": 0.0007428488925125697, "loss": 3.532, "step": 30290 }, { "epoch": 2.0583639081396927, "grad_norm": 2.425037384033203, "learning_rate": 0.000742806427503737, "loss": 3.4142, "step": 30295 }, { "epoch": 2.058703628210355, "grad_norm": 2.5952794551849365, "learning_rate": 0.0007427639624949042, "loss": 3.5817, "step": 30300 }, { "epoch": 2.0590433482810164, "grad_norm": 1.979926347732544, "learning_rate": 0.0007427214974860715, "loss": 3.5225, "step": 30305 }, { "epoch": 2.059383068351678, "grad_norm": 2.319232225418091, "learning_rate": 0.0007426790324772388, "loss": 3.3919, "step": 30310 }, { "epoch": 2.05972278842234, "grad_norm": 1.9280160665512085, "learning_rate": 0.000742636567468406, "loss": 3.53, "step": 30315 }, { "epoch": 2.0600625084930018, "grad_norm": 1.8750584125518799, "learning_rate": 0.0007425941024595733, "loss": 3.57, "step": 30320 }, { "epoch": 2.0604022285636634, "grad_norm": 1.9088917970657349, "learning_rate": 0.0007425516374507406, "loss": 3.5746, "step": 30325 }, { "epoch": 2.0607419486343255, "grad_norm": 2.012566089630127, "learning_rate": 0.0007425091724419079, "loss": 3.397, "step": 30330 }, { "epoch": 2.061081668704987, "grad_norm": 1.892799735069275, "learning_rate": 0.0007424667074330752, "loss": 3.3747, "step": 30335 }, { "epoch": 2.0614213887756487, "grad_norm": 1.92022705078125, "learning_rate": 0.0007424242424242425, "loss": 3.414, "step": 30340 }, { "epoch": 2.061761108846311, "grad_norm": 1.9819622039794922, "learning_rate": 0.0007423817774154097, "loss": 3.3135, "step": 30345 }, { "epoch": 2.0621008289169724, "grad_norm": 1.900918960571289, "learning_rate": 0.000742339312406577, "loss": 3.5796, "step": 30350 }, { "epoch": 2.062440548987634, "grad_norm": 1.8777730464935303, "learning_rate": 0.0007422968473977442, "loss": 3.5674, "step": 30355 }, { "epoch": 2.062780269058296, "grad_norm": 2.0889976024627686, "learning_rate": 0.0007422543823889115, "loss": 3.5893, "step": 30360 }, { "epoch": 2.0631199891289578, "grad_norm": 2.1242752075195312, "learning_rate": 0.0007422119173800789, "loss": 3.2416, "step": 30365 }, { "epoch": 2.0634597091996194, "grad_norm": 2.077690362930298, "learning_rate": 0.0007421694523712461, "loss": 3.3815, "step": 30370 }, { "epoch": 2.0637994292702815, "grad_norm": 2.1760494709014893, "learning_rate": 0.0007421269873624134, "loss": 3.4147, "step": 30375 }, { "epoch": 2.064139149340943, "grad_norm": 1.5859174728393555, "learning_rate": 0.0007420845223535807, "loss": 3.4151, "step": 30380 }, { "epoch": 2.0644788694116047, "grad_norm": 1.8031126260757446, "learning_rate": 0.0007420420573447479, "loss": 3.4946, "step": 30385 }, { "epoch": 2.064818589482267, "grad_norm": 2.2558465003967285, "learning_rate": 0.0007419995923359151, "loss": 3.6133, "step": 30390 }, { "epoch": 2.0651583095529285, "grad_norm": 2.040922164916992, "learning_rate": 0.0007419571273270826, "loss": 3.6472, "step": 30395 }, { "epoch": 2.06549802962359, "grad_norm": 2.275667428970337, "learning_rate": 0.0007419146623182498, "loss": 3.2833, "step": 30400 }, { "epoch": 2.0658377496942517, "grad_norm": 2.2380120754241943, "learning_rate": 0.000741872197309417, "loss": 3.4386, "step": 30405 }, { "epoch": 2.066177469764914, "grad_norm": 1.9590144157409668, "learning_rate": 0.0007418297323005844, "loss": 3.4933, "step": 30410 }, { "epoch": 2.0665171898355754, "grad_norm": 1.4563283920288086, "learning_rate": 0.0007417872672917516, "loss": 3.3766, "step": 30415 }, { "epoch": 2.066856909906237, "grad_norm": 3.0660250186920166, "learning_rate": 0.0007417448022829188, "loss": 3.3206, "step": 30420 }, { "epoch": 2.067196629976899, "grad_norm": 1.9124330282211304, "learning_rate": 0.0007417023372740862, "loss": 3.3642, "step": 30425 }, { "epoch": 2.0675363500475608, "grad_norm": 1.6650110483169556, "learning_rate": 0.0007416598722652535, "loss": 3.5609, "step": 30430 }, { "epoch": 2.0678760701182224, "grad_norm": 1.4088740348815918, "learning_rate": 0.0007416174072564207, "loss": 3.551, "step": 30435 }, { "epoch": 2.0682157901888845, "grad_norm": 2.960362672805786, "learning_rate": 0.0007415749422475881, "loss": 3.6278, "step": 30440 }, { "epoch": 2.068555510259546, "grad_norm": 1.944541573524475, "learning_rate": 0.0007415324772387553, "loss": 3.5742, "step": 30445 }, { "epoch": 2.0688952303302077, "grad_norm": 1.5660881996154785, "learning_rate": 0.0007414900122299225, "loss": 3.7454, "step": 30450 }, { "epoch": 2.06923495040087, "grad_norm": 1.7535724639892578, "learning_rate": 0.0007414475472210898, "loss": 3.3353, "step": 30455 }, { "epoch": 2.0695746704715314, "grad_norm": 2.0362536907196045, "learning_rate": 0.0007414050822122571, "loss": 3.6577, "step": 30460 }, { "epoch": 2.069914390542193, "grad_norm": 1.7999886274337769, "learning_rate": 0.0007413626172034244, "loss": 3.6077, "step": 30465 }, { "epoch": 2.070254110612855, "grad_norm": 1.906367301940918, "learning_rate": 0.0007413201521945917, "loss": 3.5015, "step": 30470 }, { "epoch": 2.0705938306835168, "grad_norm": 2.200892210006714, "learning_rate": 0.000741277687185759, "loss": 3.3754, "step": 30475 }, { "epoch": 2.0709335507541784, "grad_norm": 2.732346296310425, "learning_rate": 0.0007412352221769262, "loss": 3.5968, "step": 30480 }, { "epoch": 2.0712732708248405, "grad_norm": 2.031486988067627, "learning_rate": 0.0007411927571680935, "loss": 3.3808, "step": 30485 }, { "epoch": 2.071612990895502, "grad_norm": 1.9306610822677612, "learning_rate": 0.0007411502921592608, "loss": 3.507, "step": 30490 }, { "epoch": 2.0719527109661637, "grad_norm": 1.9027440547943115, "learning_rate": 0.000741107827150428, "loss": 3.5653, "step": 30495 }, { "epoch": 2.072292431036826, "grad_norm": 2.2939186096191406, "learning_rate": 0.0007410653621415954, "loss": 3.3206, "step": 30500 }, { "epoch": 2.0726321511074874, "grad_norm": 1.9530009031295776, "learning_rate": 0.0007410228971327626, "loss": 3.5746, "step": 30505 }, { "epoch": 2.072971871178149, "grad_norm": 1.8617311716079712, "learning_rate": 0.0007409804321239299, "loss": 3.3688, "step": 30510 }, { "epoch": 2.073311591248811, "grad_norm": 2.2576634883880615, "learning_rate": 0.0007409379671150972, "loss": 3.3132, "step": 30515 }, { "epoch": 2.073651311319473, "grad_norm": 1.9809447526931763, "learning_rate": 0.0007408955021062644, "loss": 3.5281, "step": 30520 }, { "epoch": 2.0739910313901344, "grad_norm": 1.7951769828796387, "learning_rate": 0.0007408530370974317, "loss": 3.7599, "step": 30525 }, { "epoch": 2.0743307514607965, "grad_norm": 1.7738325595855713, "learning_rate": 0.000740810572088599, "loss": 3.5633, "step": 30530 }, { "epoch": 2.074670471531458, "grad_norm": 1.9253051280975342, "learning_rate": 0.0007407681070797663, "loss": 3.2633, "step": 30535 }, { "epoch": 2.0750101916021197, "grad_norm": 1.9637740850448608, "learning_rate": 0.0007407256420709336, "loss": 3.2953, "step": 30540 }, { "epoch": 2.075349911672782, "grad_norm": 1.6579042673110962, "learning_rate": 0.0007406831770621009, "loss": 3.4668, "step": 30545 }, { "epoch": 2.0756896317434435, "grad_norm": 1.85837721824646, "learning_rate": 0.0007406407120532681, "loss": 3.4118, "step": 30550 }, { "epoch": 2.076029351814105, "grad_norm": 2.091175079345703, "learning_rate": 0.0007405982470444353, "loss": 3.5693, "step": 30555 }, { "epoch": 2.076369071884767, "grad_norm": 1.9666852951049805, "learning_rate": 0.0007405557820356027, "loss": 3.487, "step": 30560 }, { "epoch": 2.076708791955429, "grad_norm": 2.047792911529541, "learning_rate": 0.0007405133170267699, "loss": 3.5405, "step": 30565 }, { "epoch": 2.0770485120260904, "grad_norm": 1.5702121257781982, "learning_rate": 0.0007404708520179372, "loss": 3.2791, "step": 30570 }, { "epoch": 2.0773882320967525, "grad_norm": 1.9571086168289185, "learning_rate": 0.0007404283870091046, "loss": 3.5139, "step": 30575 }, { "epoch": 2.077727952167414, "grad_norm": 1.9231312274932861, "learning_rate": 0.0007403859220002718, "loss": 3.6347, "step": 30580 }, { "epoch": 2.0780676722380758, "grad_norm": 2.019996404647827, "learning_rate": 0.0007403434569914391, "loss": 3.2905, "step": 30585 }, { "epoch": 2.0784073923087374, "grad_norm": 2.0129177570343018, "learning_rate": 0.0007403009919826064, "loss": 3.3579, "step": 30590 }, { "epoch": 2.0787471123793995, "grad_norm": 1.911144495010376, "learning_rate": 0.0007402585269737736, "loss": 3.4629, "step": 30595 }, { "epoch": 2.079086832450061, "grad_norm": 2.2965967655181885, "learning_rate": 0.0007402160619649409, "loss": 3.585, "step": 30600 }, { "epoch": 2.0794265525207227, "grad_norm": 1.9356422424316406, "learning_rate": 0.0007401735969561082, "loss": 3.6017, "step": 30605 }, { "epoch": 2.079766272591385, "grad_norm": 1.6498095989227295, "learning_rate": 0.0007401311319472755, "loss": 3.6318, "step": 30610 }, { "epoch": 2.0801059926620464, "grad_norm": 1.9683713912963867, "learning_rate": 0.0007400886669384428, "loss": 3.5535, "step": 30615 }, { "epoch": 2.080445712732708, "grad_norm": 2.029987335205078, "learning_rate": 0.00074004620192961, "loss": 3.2978, "step": 30620 }, { "epoch": 2.08078543280337, "grad_norm": 1.614084005355835, "learning_rate": 0.0007400037369207773, "loss": 3.598, "step": 30625 }, { "epoch": 2.0811251528740318, "grad_norm": 2.2121152877807617, "learning_rate": 0.0007399612719119446, "loss": 3.5165, "step": 30630 }, { "epoch": 2.0814648729446934, "grad_norm": 2.5340464115142822, "learning_rate": 0.0007399188069031118, "loss": 3.5053, "step": 30635 }, { "epoch": 2.0818045930153555, "grad_norm": 2.1792500019073486, "learning_rate": 0.0007398763418942792, "loss": 3.5271, "step": 30640 }, { "epoch": 2.082144313086017, "grad_norm": 2.525747776031494, "learning_rate": 0.0007398338768854465, "loss": 3.8251, "step": 30645 }, { "epoch": 2.0824840331566787, "grad_norm": 1.984029769897461, "learning_rate": 0.0007397914118766137, "loss": 3.652, "step": 30650 }, { "epoch": 2.082823753227341, "grad_norm": 1.5086020231246948, "learning_rate": 0.0007397489468677809, "loss": 3.6997, "step": 30655 }, { "epoch": 2.0831634732980024, "grad_norm": 2.0879464149475098, "learning_rate": 0.0007397064818589483, "loss": 3.5883, "step": 30660 }, { "epoch": 2.083503193368664, "grad_norm": 1.5598366260528564, "learning_rate": 0.0007396640168501155, "loss": 3.4413, "step": 30665 }, { "epoch": 2.083842913439326, "grad_norm": 2.4348998069763184, "learning_rate": 0.0007396215518412827, "loss": 3.4902, "step": 30670 }, { "epoch": 2.084182633509988, "grad_norm": 1.8863259553909302, "learning_rate": 0.0007395790868324502, "loss": 3.4606, "step": 30675 }, { "epoch": 2.0845223535806494, "grad_norm": 1.793752670288086, "learning_rate": 0.0007395366218236174, "loss": 3.425, "step": 30680 }, { "epoch": 2.0848620736513115, "grad_norm": 2.792020797729492, "learning_rate": 0.0007394941568147846, "loss": 3.5954, "step": 30685 }, { "epoch": 2.085201793721973, "grad_norm": 2.002582550048828, "learning_rate": 0.000739451691805952, "loss": 3.4782, "step": 30690 }, { "epoch": 2.0855415137926347, "grad_norm": 1.8907955884933472, "learning_rate": 0.0007394092267971192, "loss": 3.5186, "step": 30695 }, { "epoch": 2.085881233863297, "grad_norm": 1.968109369277954, "learning_rate": 0.0007393667617882864, "loss": 3.2831, "step": 30700 }, { "epoch": 2.0862209539339585, "grad_norm": 1.797014594078064, "learning_rate": 0.0007393242967794537, "loss": 3.4403, "step": 30705 }, { "epoch": 2.08656067400462, "grad_norm": 2.6783316135406494, "learning_rate": 0.0007392818317706211, "loss": 3.466, "step": 30710 }, { "epoch": 2.086900394075282, "grad_norm": 1.9357614517211914, "learning_rate": 0.0007392393667617883, "loss": 3.4753, "step": 30715 }, { "epoch": 2.087240114145944, "grad_norm": 1.781619906425476, "learning_rate": 0.0007391969017529556, "loss": 3.5234, "step": 30720 }, { "epoch": 2.0875798342166054, "grad_norm": 1.7760636806488037, "learning_rate": 0.0007391544367441229, "loss": 3.7786, "step": 30725 }, { "epoch": 2.0879195542872675, "grad_norm": 1.9045113325119019, "learning_rate": 0.0007391119717352901, "loss": 3.5787, "step": 30730 }, { "epoch": 2.088259274357929, "grad_norm": 2.2940425872802734, "learning_rate": 0.0007390695067264574, "loss": 3.4709, "step": 30735 }, { "epoch": 2.0885989944285908, "grad_norm": 1.577188491821289, "learning_rate": 0.0007390270417176246, "loss": 3.356, "step": 30740 }, { "epoch": 2.0889387144992524, "grad_norm": 1.6276273727416992, "learning_rate": 0.000738984576708792, "loss": 3.532, "step": 30745 }, { "epoch": 2.0892784345699145, "grad_norm": 1.6320316791534424, "learning_rate": 0.0007389421116999593, "loss": 3.5803, "step": 30750 }, { "epoch": 2.089618154640576, "grad_norm": 1.8638200759887695, "learning_rate": 0.0007388996466911265, "loss": 3.8012, "step": 30755 }, { "epoch": 2.0899578747112377, "grad_norm": 2.0668601989746094, "learning_rate": 0.0007388571816822938, "loss": 3.4433, "step": 30760 }, { "epoch": 2.0902975947819, "grad_norm": 1.5485219955444336, "learning_rate": 0.0007388147166734611, "loss": 3.9709, "step": 30765 }, { "epoch": 2.0906373148525614, "grad_norm": 1.5649738311767578, "learning_rate": 0.0007387722516646283, "loss": 3.5043, "step": 30770 }, { "epoch": 2.090977034923223, "grad_norm": 1.4882382154464722, "learning_rate": 0.0007387297866557956, "loss": 3.4263, "step": 30775 }, { "epoch": 2.091316754993885, "grad_norm": 1.6568490266799927, "learning_rate": 0.000738687321646963, "loss": 3.5196, "step": 30780 }, { "epoch": 2.0916564750645468, "grad_norm": 1.8525081872940063, "learning_rate": 0.0007386448566381302, "loss": 3.6311, "step": 30785 }, { "epoch": 2.0919961951352084, "grad_norm": 1.5253804922103882, "learning_rate": 0.0007386023916292974, "loss": 3.5746, "step": 30790 }, { "epoch": 2.0923359152058705, "grad_norm": 2.368086576461792, "learning_rate": 0.0007385599266204648, "loss": 3.6871, "step": 30795 }, { "epoch": 2.092675635276532, "grad_norm": 1.5461509227752686, "learning_rate": 0.000738517461611632, "loss": 3.4564, "step": 30800 }, { "epoch": 2.0930153553471937, "grad_norm": 2.914423704147339, "learning_rate": 0.0007384749966027992, "loss": 3.4482, "step": 30805 }, { "epoch": 2.093355075417856, "grad_norm": 2.1868083477020264, "learning_rate": 0.0007384325315939666, "loss": 3.5485, "step": 30810 }, { "epoch": 2.0936947954885174, "grad_norm": 1.6107031106948853, "learning_rate": 0.0007383900665851339, "loss": 3.4955, "step": 30815 }, { "epoch": 2.094034515559179, "grad_norm": 1.8981162309646606, "learning_rate": 0.0007383476015763011, "loss": 3.3874, "step": 30820 }, { "epoch": 2.094374235629841, "grad_norm": 1.976198673248291, "learning_rate": 0.0007383051365674685, "loss": 3.5802, "step": 30825 }, { "epoch": 2.094713955700503, "grad_norm": 2.381622314453125, "learning_rate": 0.0007382626715586357, "loss": 3.5412, "step": 30830 }, { "epoch": 2.0950536757711644, "grad_norm": 1.8705140352249146, "learning_rate": 0.0007382202065498029, "loss": 3.4854, "step": 30835 }, { "epoch": 2.0953933958418265, "grad_norm": 1.8076430559158325, "learning_rate": 0.0007381777415409702, "loss": 3.4397, "step": 30840 }, { "epoch": 2.095733115912488, "grad_norm": 2.0691611766815186, "learning_rate": 0.0007381352765321375, "loss": 3.5806, "step": 30845 }, { "epoch": 2.0960728359831498, "grad_norm": 1.562029242515564, "learning_rate": 0.0007380928115233048, "loss": 3.7368, "step": 30850 }, { "epoch": 2.096412556053812, "grad_norm": 1.7920349836349487, "learning_rate": 0.0007380503465144721, "loss": 3.4676, "step": 30855 }, { "epoch": 2.0967522761244735, "grad_norm": 1.957507610321045, "learning_rate": 0.0007380078815056394, "loss": 3.6317, "step": 30860 }, { "epoch": 2.097091996195135, "grad_norm": 2.2116284370422363, "learning_rate": 0.0007379654164968066, "loss": 3.2271, "step": 30865 }, { "epoch": 2.097431716265797, "grad_norm": 1.847569227218628, "learning_rate": 0.0007379229514879739, "loss": 3.6302, "step": 30870 }, { "epoch": 2.097771436336459, "grad_norm": 1.6995593309402466, "learning_rate": 0.0007378804864791412, "loss": 3.4146, "step": 30875 }, { "epoch": 2.0981111564071204, "grad_norm": 2.1272799968719482, "learning_rate": 0.0007378380214703084, "loss": 3.4012, "step": 30880 }, { "epoch": 2.0984508764777825, "grad_norm": 1.9827845096588135, "learning_rate": 0.0007377955564614758, "loss": 3.7248, "step": 30885 }, { "epoch": 2.098790596548444, "grad_norm": 1.6852699518203735, "learning_rate": 0.000737753091452643, "loss": 3.6678, "step": 30890 }, { "epoch": 2.0991303166191058, "grad_norm": 2.1013638973236084, "learning_rate": 0.0007377106264438103, "loss": 3.4166, "step": 30895 }, { "epoch": 2.099470036689768, "grad_norm": 2.1252877712249756, "learning_rate": 0.0007376681614349776, "loss": 3.5915, "step": 30900 }, { "epoch": 2.0998097567604295, "grad_norm": 1.687497854232788, "learning_rate": 0.0007376256964261448, "loss": 3.7311, "step": 30905 }, { "epoch": 2.100149476831091, "grad_norm": 1.7964993715286255, "learning_rate": 0.0007375832314173121, "loss": 3.5179, "step": 30910 }, { "epoch": 2.100489196901753, "grad_norm": 1.9630186557769775, "learning_rate": 0.0007375407664084794, "loss": 3.6109, "step": 30915 }, { "epoch": 2.100828916972415, "grad_norm": 2.0496325492858887, "learning_rate": 0.0007374983013996467, "loss": 3.2647, "step": 30920 }, { "epoch": 2.1011686370430764, "grad_norm": 2.231839895248413, "learning_rate": 0.0007374558363908141, "loss": 3.7014, "step": 30925 }, { "epoch": 2.101508357113738, "grad_norm": 1.7164933681488037, "learning_rate": 0.0007374133713819813, "loss": 3.4003, "step": 30930 }, { "epoch": 2.1018480771844, "grad_norm": 2.5328891277313232, "learning_rate": 0.0007373709063731485, "loss": 3.4998, "step": 30935 }, { "epoch": 2.1021877972550618, "grad_norm": 1.5325473546981812, "learning_rate": 0.0007373284413643158, "loss": 3.597, "step": 30940 }, { "epoch": 2.1025275173257234, "grad_norm": 2.0655548572540283, "learning_rate": 0.0007372859763554831, "loss": 3.244, "step": 30945 }, { "epoch": 2.1028672373963855, "grad_norm": 2.1431472301483154, "learning_rate": 0.0007372435113466503, "loss": 3.4169, "step": 30950 }, { "epoch": 2.103206957467047, "grad_norm": 2.8612165451049805, "learning_rate": 0.0007372010463378177, "loss": 3.5632, "step": 30955 }, { "epoch": 2.1035466775377087, "grad_norm": 1.9974379539489746, "learning_rate": 0.000737158581328985, "loss": 3.808, "step": 30960 }, { "epoch": 2.103886397608371, "grad_norm": 2.0292022228240967, "learning_rate": 0.0007371161163201522, "loss": 3.4567, "step": 30965 }, { "epoch": 2.1042261176790324, "grad_norm": 2.230933904647827, "learning_rate": 0.0007370736513113195, "loss": 3.4483, "step": 30970 }, { "epoch": 2.104565837749694, "grad_norm": 2.0662596225738525, "learning_rate": 0.0007370311863024868, "loss": 3.4692, "step": 30975 }, { "epoch": 2.104905557820356, "grad_norm": 1.7616220712661743, "learning_rate": 0.000736988721293654, "loss": 3.3938, "step": 30980 }, { "epoch": 2.105245277891018, "grad_norm": 1.629616618156433, "learning_rate": 0.0007369462562848214, "loss": 3.4855, "step": 30985 }, { "epoch": 2.1055849979616794, "grad_norm": 1.9831851720809937, "learning_rate": 0.0007369037912759886, "loss": 3.4378, "step": 30990 }, { "epoch": 2.1059247180323415, "grad_norm": 1.9959938526153564, "learning_rate": 0.0007368613262671559, "loss": 3.6474, "step": 30995 }, { "epoch": 2.106264438103003, "grad_norm": 1.9048781394958496, "learning_rate": 0.0007368188612583232, "loss": 3.5945, "step": 31000 }, { "epoch": 2.1066041581736648, "grad_norm": 1.6925939321517944, "learning_rate": 0.0007367763962494904, "loss": 3.5053, "step": 31005 }, { "epoch": 2.106943878244327, "grad_norm": 1.7292495965957642, "learning_rate": 0.0007367339312406577, "loss": 3.4476, "step": 31010 }, { "epoch": 2.1072835983149885, "grad_norm": 1.61594557762146, "learning_rate": 0.000736691466231825, "loss": 3.7247, "step": 31015 }, { "epoch": 2.10762331838565, "grad_norm": 1.650230884552002, "learning_rate": 0.0007366490012229923, "loss": 3.3616, "step": 31020 }, { "epoch": 2.107963038456312, "grad_norm": 1.9673179388046265, "learning_rate": 0.0007366065362141596, "loss": 3.4524, "step": 31025 }, { "epoch": 2.108302758526974, "grad_norm": 1.7110573053359985, "learning_rate": 0.0007365640712053269, "loss": 3.5475, "step": 31030 }, { "epoch": 2.1086424785976354, "grad_norm": 2.4728434085845947, "learning_rate": 0.0007365216061964941, "loss": 3.3273, "step": 31035 }, { "epoch": 2.1089821986682975, "grad_norm": 1.9059362411499023, "learning_rate": 0.0007364791411876613, "loss": 3.4966, "step": 31040 }, { "epoch": 2.109321918738959, "grad_norm": 1.641140341758728, "learning_rate": 0.0007364366761788287, "loss": 3.637, "step": 31045 }, { "epoch": 2.1096616388096208, "grad_norm": 2.735410451889038, "learning_rate": 0.0007363942111699959, "loss": 3.537, "step": 31050 }, { "epoch": 2.110001358880283, "grad_norm": 1.8277658224105835, "learning_rate": 0.0007363517461611632, "loss": 3.8302, "step": 31055 }, { "epoch": 2.1103410789509445, "grad_norm": 2.26867938041687, "learning_rate": 0.0007363092811523306, "loss": 3.5605, "step": 31060 }, { "epoch": 2.110680799021606, "grad_norm": 2.1069021224975586, "learning_rate": 0.0007362668161434978, "loss": 3.6003, "step": 31065 }, { "epoch": 2.111020519092268, "grad_norm": 2.2098166942596436, "learning_rate": 0.000736224351134665, "loss": 3.6172, "step": 31070 }, { "epoch": 2.11136023916293, "grad_norm": 1.616003155708313, "learning_rate": 0.0007361818861258324, "loss": 3.7885, "step": 31075 }, { "epoch": 2.1116999592335914, "grad_norm": 2.561115026473999, "learning_rate": 0.0007361394211169996, "loss": 3.4405, "step": 31080 }, { "epoch": 2.112039679304253, "grad_norm": 2.442678928375244, "learning_rate": 0.0007360969561081668, "loss": 3.5312, "step": 31085 }, { "epoch": 2.112379399374915, "grad_norm": 1.9218376874923706, "learning_rate": 0.0007360544910993343, "loss": 3.415, "step": 31090 }, { "epoch": 2.1127191194455768, "grad_norm": 1.464421272277832, "learning_rate": 0.0007360120260905015, "loss": 3.5269, "step": 31095 }, { "epoch": 2.1130588395162384, "grad_norm": 1.4347100257873535, "learning_rate": 0.0007359695610816687, "loss": 3.2386, "step": 31100 }, { "epoch": 2.1133985595869005, "grad_norm": 2.168793201446533, "learning_rate": 0.000735927096072836, "loss": 3.5165, "step": 31105 }, { "epoch": 2.113738279657562, "grad_norm": 2.127394199371338, "learning_rate": 0.0007358846310640033, "loss": 3.7249, "step": 31110 }, { "epoch": 2.1140779997282237, "grad_norm": 1.6829040050506592, "learning_rate": 0.0007358421660551705, "loss": 3.3765, "step": 31115 }, { "epoch": 2.114417719798886, "grad_norm": 1.7156579494476318, "learning_rate": 0.0007357997010463378, "loss": 3.4706, "step": 31120 }, { "epoch": 2.1147574398695475, "grad_norm": 2.1130359172821045, "learning_rate": 0.0007357572360375052, "loss": 3.6747, "step": 31125 }, { "epoch": 2.115097159940209, "grad_norm": 1.8499000072479248, "learning_rate": 0.0007357147710286724, "loss": 3.4264, "step": 31130 }, { "epoch": 2.115436880010871, "grad_norm": 1.8350439071655273, "learning_rate": 0.0007356723060198397, "loss": 3.1721, "step": 31135 }, { "epoch": 2.115776600081533, "grad_norm": 1.9848757982254028, "learning_rate": 0.0007356298410110069, "loss": 3.3047, "step": 31140 }, { "epoch": 2.1161163201521944, "grad_norm": 1.724480152130127, "learning_rate": 0.0007355873760021742, "loss": 3.7807, "step": 31145 }, { "epoch": 2.1164560402228565, "grad_norm": 2.779534101486206, "learning_rate": 0.0007355449109933415, "loss": 3.8684, "step": 31150 }, { "epoch": 2.116795760293518, "grad_norm": 1.8459608554840088, "learning_rate": 0.0007355024459845087, "loss": 3.1649, "step": 31155 }, { "epoch": 2.1171354803641798, "grad_norm": 2.0859971046447754, "learning_rate": 0.0007354599809756761, "loss": 3.4082, "step": 31160 }, { "epoch": 2.117475200434842, "grad_norm": 1.7585326433181763, "learning_rate": 0.0007354175159668434, "loss": 3.6562, "step": 31165 }, { "epoch": 2.1178149205055035, "grad_norm": 2.4225313663482666, "learning_rate": 0.0007353750509580106, "loss": 3.4621, "step": 31170 }, { "epoch": 2.118154640576165, "grad_norm": 1.9027719497680664, "learning_rate": 0.0007353325859491778, "loss": 3.5835, "step": 31175 }, { "epoch": 2.118494360646827, "grad_norm": 1.7590821981430054, "learning_rate": 0.0007352901209403452, "loss": 3.5312, "step": 31180 }, { "epoch": 2.118834080717489, "grad_norm": 1.6355688571929932, "learning_rate": 0.0007352476559315124, "loss": 3.3672, "step": 31185 }, { "epoch": 2.1191738007881504, "grad_norm": 2.036062002182007, "learning_rate": 0.0007352051909226796, "loss": 3.3666, "step": 31190 }, { "epoch": 2.1195135208588125, "grad_norm": 1.8573660850524902, "learning_rate": 0.0007351627259138471, "loss": 3.4908, "step": 31195 }, { "epoch": 2.119853240929474, "grad_norm": 1.906957983970642, "learning_rate": 0.0007351202609050143, "loss": 3.6085, "step": 31200 }, { "epoch": 2.1201929610001358, "grad_norm": 2.054673194885254, "learning_rate": 0.0007350777958961815, "loss": 3.7439, "step": 31205 }, { "epoch": 2.120532681070798, "grad_norm": 2.1973936557769775, "learning_rate": 0.0007350353308873489, "loss": 3.4521, "step": 31210 }, { "epoch": 2.1208724011414595, "grad_norm": 2.359530448913574, "learning_rate": 0.0007349928658785161, "loss": 3.6178, "step": 31215 }, { "epoch": 2.121212121212121, "grad_norm": 2.2101263999938965, "learning_rate": 0.0007349504008696833, "loss": 3.565, "step": 31220 }, { "epoch": 2.121551841282783, "grad_norm": 2.3145711421966553, "learning_rate": 0.0007349079358608507, "loss": 3.4749, "step": 31225 }, { "epoch": 2.121891561353445, "grad_norm": 1.7139333486557007, "learning_rate": 0.000734865470852018, "loss": 3.4434, "step": 31230 }, { "epoch": 2.1222312814241064, "grad_norm": 2.2380340099334717, "learning_rate": 0.0007348230058431852, "loss": 3.5361, "step": 31235 }, { "epoch": 2.1225710014947685, "grad_norm": 1.9229837656021118, "learning_rate": 0.0007347805408343525, "loss": 3.5313, "step": 31240 }, { "epoch": 2.12291072156543, "grad_norm": 1.880764126777649, "learning_rate": 0.0007347465688272863, "loss": 3.6523, "step": 31245 }, { "epoch": 2.123250441636092, "grad_norm": 1.991605520248413, "learning_rate": 0.0007347041038184537, "loss": 3.6652, "step": 31250 }, { "epoch": 2.123590161706754, "grad_norm": 2.482461929321289, "learning_rate": 0.0007346616388096209, "loss": 3.6645, "step": 31255 }, { "epoch": 2.1239298817774155, "grad_norm": 1.709938406944275, "learning_rate": 0.0007346191738007882, "loss": 3.3822, "step": 31260 }, { "epoch": 2.124269601848077, "grad_norm": 2.435434103012085, "learning_rate": 0.0007345767087919555, "loss": 3.2576, "step": 31265 }, { "epoch": 2.1246093219187387, "grad_norm": 2.074284315109253, "learning_rate": 0.0007345342437831227, "loss": 3.2787, "step": 31270 }, { "epoch": 2.124949041989401, "grad_norm": 2.032231092453003, "learning_rate": 0.0007344917787742899, "loss": 3.3319, "step": 31275 }, { "epoch": 2.1252887620600625, "grad_norm": 2.0273971557617188, "learning_rate": 0.0007344493137654573, "loss": 3.4692, "step": 31280 }, { "epoch": 2.125628482130724, "grad_norm": 1.770456075668335, "learning_rate": 0.0007344068487566246, "loss": 3.6565, "step": 31285 }, { "epoch": 2.125968202201386, "grad_norm": 2.2656283378601074, "learning_rate": 0.0007343643837477918, "loss": 3.597, "step": 31290 }, { "epoch": 2.126307922272048, "grad_norm": 1.7909387350082397, "learning_rate": 0.0007343219187389592, "loss": 3.1136, "step": 31295 }, { "epoch": 2.1266476423427094, "grad_norm": 2.8657803535461426, "learning_rate": 0.0007342794537301264, "loss": 3.3978, "step": 31300 }, { "epoch": 2.1269873624133715, "grad_norm": 1.9576728343963623, "learning_rate": 0.0007342369887212936, "loss": 3.3538, "step": 31305 }, { "epoch": 2.127327082484033, "grad_norm": 1.666089653968811, "learning_rate": 0.000734194523712461, "loss": 3.4491, "step": 31310 }, { "epoch": 2.1276668025546948, "grad_norm": 2.0161640644073486, "learning_rate": 0.0007341520587036282, "loss": 3.5054, "step": 31315 }, { "epoch": 2.128006522625357, "grad_norm": 2.5254039764404297, "learning_rate": 0.0007341095936947955, "loss": 3.406, "step": 31320 }, { "epoch": 2.1283462426960185, "grad_norm": 2.142876148223877, "learning_rate": 0.0007340671286859629, "loss": 3.4869, "step": 31325 }, { "epoch": 2.12868596276668, "grad_norm": 2.4478580951690674, "learning_rate": 0.0007340246636771301, "loss": 3.4394, "step": 31330 }, { "epoch": 2.129025682837342, "grad_norm": 1.8370002508163452, "learning_rate": 0.0007339821986682973, "loss": 3.638, "step": 31335 }, { "epoch": 2.129365402908004, "grad_norm": 1.9702823162078857, "learning_rate": 0.0007339397336594646, "loss": 3.4761, "step": 31340 }, { "epoch": 2.1297051229786654, "grad_norm": 2.8294458389282227, "learning_rate": 0.0007338972686506319, "loss": 3.53, "step": 31345 }, { "epoch": 2.1300448430493275, "grad_norm": 1.8418323993682861, "learning_rate": 0.0007338548036417991, "loss": 3.569, "step": 31350 }, { "epoch": 2.130384563119989, "grad_norm": 2.149756669998169, "learning_rate": 0.0007338123386329665, "loss": 3.4777, "step": 31355 }, { "epoch": 2.1307242831906508, "grad_norm": 1.928232192993164, "learning_rate": 0.0007337698736241338, "loss": 3.2876, "step": 31360 }, { "epoch": 2.131064003261313, "grad_norm": 1.6928715705871582, "learning_rate": 0.000733727408615301, "loss": 3.3875, "step": 31365 }, { "epoch": 2.1314037233319745, "grad_norm": 1.5746076107025146, "learning_rate": 0.0007336849436064683, "loss": 3.3845, "step": 31370 }, { "epoch": 2.131743443402636, "grad_norm": 2.155488967895508, "learning_rate": 0.0007336424785976355, "loss": 3.4645, "step": 31375 }, { "epoch": 2.132083163473298, "grad_norm": 2.3816072940826416, "learning_rate": 0.0007336000135888028, "loss": 3.5124, "step": 31380 }, { "epoch": 2.13242288354396, "grad_norm": 1.6727570295333862, "learning_rate": 0.0007335575485799701, "loss": 3.6576, "step": 31385 }, { "epoch": 2.1327626036146214, "grad_norm": 3.2880399227142334, "learning_rate": 0.0007335150835711374, "loss": 3.6923, "step": 31390 }, { "epoch": 2.1331023236852835, "grad_norm": 2.5158677101135254, "learning_rate": 0.0007334726185623047, "loss": 3.4224, "step": 31395 }, { "epoch": 2.133442043755945, "grad_norm": 1.999468445777893, "learning_rate": 0.000733430153553472, "loss": 3.5858, "step": 31400 }, { "epoch": 2.133781763826607, "grad_norm": 2.122394323348999, "learning_rate": 0.0007333876885446392, "loss": 3.3953, "step": 31405 }, { "epoch": 2.134121483897269, "grad_norm": 1.647600531578064, "learning_rate": 0.0007333452235358064, "loss": 3.661, "step": 31410 }, { "epoch": 2.1344612039679305, "grad_norm": 2.375694990158081, "learning_rate": 0.0007333027585269738, "loss": 3.4559, "step": 31415 }, { "epoch": 2.134800924038592, "grad_norm": 2.1645610332489014, "learning_rate": 0.000733260293518141, "loss": 3.566, "step": 31420 }, { "epoch": 2.1351406441092537, "grad_norm": 1.9842959642410278, "learning_rate": 0.0007332178285093083, "loss": 3.5344, "step": 31425 }, { "epoch": 2.135480364179916, "grad_norm": 2.529308795928955, "learning_rate": 0.0007331753635004757, "loss": 3.6396, "step": 31430 }, { "epoch": 2.1358200842505775, "grad_norm": 2.6589126586914062, "learning_rate": 0.0007331328984916429, "loss": 3.3968, "step": 31435 }, { "epoch": 2.136159804321239, "grad_norm": 1.9891870021820068, "learning_rate": 0.0007330904334828101, "loss": 3.4972, "step": 31440 }, { "epoch": 2.136499524391901, "grad_norm": 1.7945271730422974, "learning_rate": 0.0007330479684739775, "loss": 3.2965, "step": 31445 }, { "epoch": 2.136839244462563, "grad_norm": 1.8419179916381836, "learning_rate": 0.0007330055034651447, "loss": 3.4556, "step": 31450 }, { "epoch": 2.1371789645332244, "grad_norm": 2.1266753673553467, "learning_rate": 0.0007329630384563119, "loss": 3.538, "step": 31455 }, { "epoch": 2.1375186846038865, "grad_norm": 1.6117894649505615, "learning_rate": 0.0007329205734474794, "loss": 3.4784, "step": 31460 }, { "epoch": 2.137858404674548, "grad_norm": 1.7176740169525146, "learning_rate": 0.0007328781084386466, "loss": 3.3616, "step": 31465 }, { "epoch": 2.1381981247452098, "grad_norm": 2.184112787246704, "learning_rate": 0.0007328356434298139, "loss": 3.2176, "step": 31470 }, { "epoch": 2.138537844815872, "grad_norm": 2.0836243629455566, "learning_rate": 0.0007327931784209811, "loss": 3.2497, "step": 31475 }, { "epoch": 2.1388775648865335, "grad_norm": 2.0103068351745605, "learning_rate": 0.0007327507134121484, "loss": 3.2868, "step": 31480 }, { "epoch": 2.139217284957195, "grad_norm": 2.0147132873535156, "learning_rate": 0.0007327082484033157, "loss": 3.5022, "step": 31485 }, { "epoch": 2.139557005027857, "grad_norm": 2.4973762035369873, "learning_rate": 0.0007326657833944829, "loss": 3.6273, "step": 31490 }, { "epoch": 2.139896725098519, "grad_norm": 2.98848032951355, "learning_rate": 0.0007326233183856503, "loss": 3.5326, "step": 31495 }, { "epoch": 2.1402364451691804, "grad_norm": 1.91409432888031, "learning_rate": 0.0007325808533768176, "loss": 3.2623, "step": 31500 }, { "epoch": 2.1405761652398425, "grad_norm": 1.8764970302581787, "learning_rate": 0.0007325383883679848, "loss": 3.4992, "step": 31505 }, { "epoch": 2.140915885310504, "grad_norm": 2.307950496673584, "learning_rate": 0.000732495923359152, "loss": 3.4855, "step": 31510 }, { "epoch": 2.1412556053811658, "grad_norm": 1.7935361862182617, "learning_rate": 0.0007324534583503194, "loss": 3.5416, "step": 31515 }, { "epoch": 2.141595325451828, "grad_norm": 1.9427428245544434, "learning_rate": 0.0007324109933414866, "loss": 3.4359, "step": 31520 }, { "epoch": 2.1419350455224895, "grad_norm": 2.1295759677886963, "learning_rate": 0.0007323685283326538, "loss": 3.3051, "step": 31525 }, { "epoch": 2.142274765593151, "grad_norm": 2.2781870365142822, "learning_rate": 0.0007323260633238213, "loss": 3.6135, "step": 31530 }, { "epoch": 2.142614485663813, "grad_norm": 1.6946823596954346, "learning_rate": 0.0007322835983149885, "loss": 3.4767, "step": 31535 }, { "epoch": 2.142954205734475, "grad_norm": 1.822527289390564, "learning_rate": 0.0007322411333061557, "loss": 3.5403, "step": 31540 }, { "epoch": 2.1432939258051364, "grad_norm": 2.239241123199463, "learning_rate": 0.0007321986682973231, "loss": 3.3729, "step": 31545 }, { "epoch": 2.1436336458757985, "grad_norm": 1.4404770135879517, "learning_rate": 0.0007321562032884903, "loss": 3.6359, "step": 31550 }, { "epoch": 2.14397336594646, "grad_norm": 2.942965507507324, "learning_rate": 0.0007321137382796575, "loss": 3.63, "step": 31555 }, { "epoch": 2.144313086017122, "grad_norm": 2.138770341873169, "learning_rate": 0.0007320712732708249, "loss": 3.7102, "step": 31560 }, { "epoch": 2.144652806087784, "grad_norm": 2.5840656757354736, "learning_rate": 0.0007320288082619922, "loss": 3.661, "step": 31565 }, { "epoch": 2.1449925261584455, "grad_norm": 2.025609016418457, "learning_rate": 0.0007319863432531594, "loss": 3.3652, "step": 31570 }, { "epoch": 2.145332246229107, "grad_norm": 2.15425443649292, "learning_rate": 0.0007319438782443267, "loss": 3.4985, "step": 31575 }, { "epoch": 2.145671966299769, "grad_norm": 2.5769636631011963, "learning_rate": 0.000731901413235494, "loss": 3.4678, "step": 31580 }, { "epoch": 2.146011686370431, "grad_norm": 2.0567729473114014, "learning_rate": 0.0007318589482266612, "loss": 3.4438, "step": 31585 }, { "epoch": 2.1463514064410925, "grad_norm": 2.4687445163726807, "learning_rate": 0.0007318164832178285, "loss": 3.6298, "step": 31590 }, { "epoch": 2.1466911265117545, "grad_norm": 2.6909048557281494, "learning_rate": 0.0007317740182089958, "loss": 3.4388, "step": 31595 }, { "epoch": 2.147030846582416, "grad_norm": 1.723893404006958, "learning_rate": 0.0007317315532001631, "loss": 3.4115, "step": 31600 }, { "epoch": 2.147370566653078, "grad_norm": 2.504516124725342, "learning_rate": 0.0007316890881913304, "loss": 3.3353, "step": 31605 }, { "epoch": 2.14771028672374, "grad_norm": 2.050429105758667, "learning_rate": 0.0007316466231824977, "loss": 3.3564, "step": 31610 }, { "epoch": 2.1480500067944015, "grad_norm": 2.0300543308258057, "learning_rate": 0.0007316041581736649, "loss": 3.4798, "step": 31615 }, { "epoch": 2.148389726865063, "grad_norm": 2.441667318344116, "learning_rate": 0.0007315616931648322, "loss": 3.4328, "step": 31620 }, { "epoch": 2.1487294469357248, "grad_norm": 2.8989293575286865, "learning_rate": 0.0007315192281559994, "loss": 3.7156, "step": 31625 }, { "epoch": 2.149069167006387, "grad_norm": 2.4652016162872314, "learning_rate": 0.0007314767631471667, "loss": 3.432, "step": 31630 }, { "epoch": 2.1494088870770485, "grad_norm": 2.609016180038452, "learning_rate": 0.0007314342981383341, "loss": 3.3642, "step": 31635 }, { "epoch": 2.14974860714771, "grad_norm": 2.4144113063812256, "learning_rate": 0.0007313918331295013, "loss": 3.6612, "step": 31640 }, { "epoch": 2.150088327218372, "grad_norm": 2.352363109588623, "learning_rate": 0.0007313493681206686, "loss": 3.4663, "step": 31645 }, { "epoch": 2.150428047289034, "grad_norm": 2.1893181800842285, "learning_rate": 0.0007313069031118359, "loss": 3.7985, "step": 31650 }, { "epoch": 2.1507677673596954, "grad_norm": 1.7321609258651733, "learning_rate": 0.0007312644381030031, "loss": 3.5969, "step": 31655 }, { "epoch": 2.1511074874303575, "grad_norm": 1.5662204027175903, "learning_rate": 0.0007312219730941703, "loss": 3.7065, "step": 31660 }, { "epoch": 2.151447207501019, "grad_norm": 1.8015422821044922, "learning_rate": 0.0007311795080853377, "loss": 3.6156, "step": 31665 }, { "epoch": 2.1517869275716808, "grad_norm": 2.045257806777954, "learning_rate": 0.000731137043076505, "loss": 3.5662, "step": 31670 }, { "epoch": 2.152126647642343, "grad_norm": 2.06219744682312, "learning_rate": 0.0007310945780676722, "loss": 3.647, "step": 31675 }, { "epoch": 2.1524663677130045, "grad_norm": 2.862262487411499, "learning_rate": 0.0007310521130588396, "loss": 3.4245, "step": 31680 }, { "epoch": 2.152806087783666, "grad_norm": 2.311401844024658, "learning_rate": 0.0007310096480500068, "loss": 3.5724, "step": 31685 }, { "epoch": 2.153145807854328, "grad_norm": 2.176304340362549, "learning_rate": 0.000730967183041174, "loss": 3.6294, "step": 31690 }, { "epoch": 2.15348552792499, "grad_norm": 1.8339205980300903, "learning_rate": 0.0007309247180323414, "loss": 3.231, "step": 31695 }, { "epoch": 2.1538252479956514, "grad_norm": 1.9788737297058105, "learning_rate": 0.0007308822530235086, "loss": 3.6426, "step": 31700 }, { "epoch": 2.1541649680663135, "grad_norm": 2.27178955078125, "learning_rate": 0.0007308397880146759, "loss": 3.5922, "step": 31705 }, { "epoch": 2.154504688136975, "grad_norm": 2.095815658569336, "learning_rate": 0.0007307973230058433, "loss": 3.5352, "step": 31710 }, { "epoch": 2.154844408207637, "grad_norm": 1.5558130741119385, "learning_rate": 0.0007307548579970105, "loss": 3.3452, "step": 31715 }, { "epoch": 2.155184128278299, "grad_norm": 2.753406047821045, "learning_rate": 0.0007307123929881777, "loss": 3.455, "step": 31720 }, { "epoch": 2.1555238483489605, "grad_norm": 1.6829010248184204, "learning_rate": 0.000730669927979345, "loss": 3.3948, "step": 31725 }, { "epoch": 2.155863568419622, "grad_norm": 1.7581210136413574, "learning_rate": 0.0007306274629705123, "loss": 3.669, "step": 31730 }, { "epoch": 2.156203288490284, "grad_norm": 2.3595194816589355, "learning_rate": 0.0007305849979616795, "loss": 3.2448, "step": 31735 }, { "epoch": 2.156543008560946, "grad_norm": 2.0106000900268555, "learning_rate": 0.0007305425329528469, "loss": 3.7535, "step": 31740 }, { "epoch": 2.1568827286316075, "grad_norm": 1.7747687101364136, "learning_rate": 0.0007305000679440142, "loss": 3.5849, "step": 31745 }, { "epoch": 2.1572224487022695, "grad_norm": 1.902112364768982, "learning_rate": 0.0007304576029351814, "loss": 3.2679, "step": 31750 }, { "epoch": 2.157562168772931, "grad_norm": 1.9956185817718506, "learning_rate": 0.0007304151379263487, "loss": 3.5079, "step": 31755 }, { "epoch": 2.157901888843593, "grad_norm": 1.9128007888793945, "learning_rate": 0.0007303726729175159, "loss": 3.5279, "step": 31760 }, { "epoch": 2.1582416089142544, "grad_norm": 2.384352922439575, "learning_rate": 0.0007303302079086832, "loss": 3.4261, "step": 31765 }, { "epoch": 2.1585813289849165, "grad_norm": 1.8186942338943481, "learning_rate": 0.0007302877428998506, "loss": 3.7072, "step": 31770 }, { "epoch": 2.158921049055578, "grad_norm": 2.28842830657959, "learning_rate": 0.0007302452778910178, "loss": 3.2189, "step": 31775 }, { "epoch": 2.1592607691262398, "grad_norm": 2.0460891723632812, "learning_rate": 0.0007302028128821851, "loss": 3.4527, "step": 31780 }, { "epoch": 2.159600489196902, "grad_norm": 1.8540159463882446, "learning_rate": 0.0007301603478733524, "loss": 3.4931, "step": 31785 }, { "epoch": 2.1599402092675635, "grad_norm": 2.047757387161255, "learning_rate": 0.0007301178828645196, "loss": 3.6908, "step": 31790 }, { "epoch": 2.160279929338225, "grad_norm": 2.070885419845581, "learning_rate": 0.0007300754178556869, "loss": 3.6882, "step": 31795 }, { "epoch": 2.160619649408887, "grad_norm": 1.8842277526855469, "learning_rate": 0.0007300329528468542, "loss": 3.3214, "step": 31800 }, { "epoch": 2.160959369479549, "grad_norm": 2.603020668029785, "learning_rate": 0.0007299904878380215, "loss": 3.5926, "step": 31805 }, { "epoch": 2.1612990895502104, "grad_norm": 1.9411576986312866, "learning_rate": 0.0007299480228291889, "loss": 3.8304, "step": 31810 }, { "epoch": 2.1616388096208725, "grad_norm": 1.5878115892410278, "learning_rate": 0.0007299055578203561, "loss": 3.5691, "step": 31815 }, { "epoch": 2.161978529691534, "grad_norm": 1.7822965383529663, "learning_rate": 0.0007298630928115233, "loss": 3.4724, "step": 31820 }, { "epoch": 2.1623182497621958, "grad_norm": 2.0712685585021973, "learning_rate": 0.0007298206278026906, "loss": 3.6763, "step": 31825 }, { "epoch": 2.162657969832858, "grad_norm": 2.2046055793762207, "learning_rate": 0.0007297781627938579, "loss": 3.4917, "step": 31830 }, { "epoch": 2.1629976899035195, "grad_norm": 2.067904472351074, "learning_rate": 0.0007297356977850251, "loss": 3.3646, "step": 31835 }, { "epoch": 2.163337409974181, "grad_norm": 1.9550610780715942, "learning_rate": 0.0007296932327761925, "loss": 3.6387, "step": 31840 }, { "epoch": 2.163677130044843, "grad_norm": 2.069378614425659, "learning_rate": 0.0007296507677673598, "loss": 3.6622, "step": 31845 }, { "epoch": 2.164016850115505, "grad_norm": 2.0191752910614014, "learning_rate": 0.000729608302758527, "loss": 3.4122, "step": 31850 }, { "epoch": 2.1643565701861665, "grad_norm": 1.538977026939392, "learning_rate": 0.0007295658377496943, "loss": 3.5148, "step": 31855 }, { "epoch": 2.1646962902568285, "grad_norm": 1.941361665725708, "learning_rate": 0.0007295233727408615, "loss": 3.5544, "step": 31860 }, { "epoch": 2.16503601032749, "grad_norm": 1.8250254392623901, "learning_rate": 0.0007294809077320288, "loss": 3.6166, "step": 31865 }, { "epoch": 2.165375730398152, "grad_norm": 1.9417420625686646, "learning_rate": 0.0007294384427231961, "loss": 3.5866, "step": 31870 }, { "epoch": 2.165715450468814, "grad_norm": 1.8763558864593506, "learning_rate": 0.0007293959777143634, "loss": 3.471, "step": 31875 }, { "epoch": 2.1660551705394755, "grad_norm": 1.9526180028915405, "learning_rate": 0.0007293535127055307, "loss": 3.3695, "step": 31880 }, { "epoch": 2.166394890610137, "grad_norm": 1.637765884399414, "learning_rate": 0.000729311047696698, "loss": 3.5673, "step": 31885 }, { "epoch": 2.166734610680799, "grad_norm": 1.688631296157837, "learning_rate": 0.0007292685826878652, "loss": 3.5588, "step": 31890 }, { "epoch": 2.167074330751461, "grad_norm": 1.835400938987732, "learning_rate": 0.0007292261176790325, "loss": 3.4254, "step": 31895 }, { "epoch": 2.1674140508221225, "grad_norm": 1.9182204008102417, "learning_rate": 0.0007291836526701998, "loss": 3.4572, "step": 31900 }, { "epoch": 2.1677537708927845, "grad_norm": 1.6991329193115234, "learning_rate": 0.000729141187661367, "loss": 3.5816, "step": 31905 }, { "epoch": 2.168093490963446, "grad_norm": 2.19191837310791, "learning_rate": 0.0007290987226525343, "loss": 3.688, "step": 31910 }, { "epoch": 2.168433211034108, "grad_norm": 2.581120729446411, "learning_rate": 0.0007290562576437017, "loss": 3.6539, "step": 31915 }, { "epoch": 2.16877293110477, "grad_norm": 2.171811580657959, "learning_rate": 0.0007290137926348689, "loss": 3.4634, "step": 31920 }, { "epoch": 2.1691126511754315, "grad_norm": 1.8627681732177734, "learning_rate": 0.0007289713276260361, "loss": 3.5101, "step": 31925 }, { "epoch": 2.169452371246093, "grad_norm": 1.6818797588348389, "learning_rate": 0.0007289288626172035, "loss": 3.3417, "step": 31930 }, { "epoch": 2.169792091316755, "grad_norm": 1.8603241443634033, "learning_rate": 0.0007288863976083707, "loss": 3.5891, "step": 31935 }, { "epoch": 2.170131811387417, "grad_norm": 1.8868449926376343, "learning_rate": 0.0007288439325995379, "loss": 3.5352, "step": 31940 }, { "epoch": 2.1704715314580785, "grad_norm": 2.0740091800689697, "learning_rate": 0.0007288014675907054, "loss": 3.5833, "step": 31945 }, { "epoch": 2.1708112515287405, "grad_norm": 2.035118579864502, "learning_rate": 0.0007287590025818726, "loss": 3.4103, "step": 31950 }, { "epoch": 2.171150971599402, "grad_norm": 1.9209880828857422, "learning_rate": 0.0007287165375730398, "loss": 3.5338, "step": 31955 }, { "epoch": 2.171490691670064, "grad_norm": 1.874112844467163, "learning_rate": 0.0007286740725642071, "loss": 3.4796, "step": 31960 }, { "epoch": 2.1718304117407254, "grad_norm": 2.01766300201416, "learning_rate": 0.0007286316075553744, "loss": 3.5656, "step": 31965 }, { "epoch": 2.1721701318113875, "grad_norm": 1.757036566734314, "learning_rate": 0.0007285891425465416, "loss": 3.5097, "step": 31970 }, { "epoch": 2.172509851882049, "grad_norm": 2.452785015106201, "learning_rate": 0.0007285466775377089, "loss": 3.5286, "step": 31975 }, { "epoch": 2.172849571952711, "grad_norm": 2.336061716079712, "learning_rate": 0.0007285042125288763, "loss": 3.3823, "step": 31980 }, { "epoch": 2.173189292023373, "grad_norm": 2.3505263328552246, "learning_rate": 0.0007284617475200435, "loss": 3.4873, "step": 31985 }, { "epoch": 2.1735290120940345, "grad_norm": 2.1236910820007324, "learning_rate": 0.0007284192825112108, "loss": 3.3557, "step": 31990 }, { "epoch": 2.173868732164696, "grad_norm": 2.1356701850891113, "learning_rate": 0.0007283768175023781, "loss": 3.4534, "step": 31995 }, { "epoch": 2.174208452235358, "grad_norm": 1.9564887285232544, "learning_rate": 0.0007283343524935453, "loss": 3.3502, "step": 32000 }, { "epoch": 2.17454817230602, "grad_norm": 2.6027183532714844, "learning_rate": 0.0007282918874847126, "loss": 3.8106, "step": 32005 }, { "epoch": 2.1748878923766815, "grad_norm": 2.5471432209014893, "learning_rate": 0.0007282494224758798, "loss": 3.5307, "step": 32010 }, { "epoch": 2.1752276124473435, "grad_norm": 2.2635180950164795, "learning_rate": 0.0007282069574670472, "loss": 3.5588, "step": 32015 }, { "epoch": 2.175567332518005, "grad_norm": 2.2269396781921387, "learning_rate": 0.0007281644924582145, "loss": 3.3702, "step": 32020 }, { "epoch": 2.175907052588667, "grad_norm": 2.05295467376709, "learning_rate": 0.0007281220274493817, "loss": 3.5838, "step": 32025 }, { "epoch": 2.176246772659329, "grad_norm": 2.0671451091766357, "learning_rate": 0.000728079562440549, "loss": 3.5019, "step": 32030 }, { "epoch": 2.1765864927299905, "grad_norm": 2.26637864112854, "learning_rate": 0.0007280370974317163, "loss": 3.5556, "step": 32035 }, { "epoch": 2.176926212800652, "grad_norm": 1.663122296333313, "learning_rate": 0.0007279946324228835, "loss": 3.2746, "step": 32040 }, { "epoch": 2.177265932871314, "grad_norm": 1.7353596687316895, "learning_rate": 0.0007279521674140507, "loss": 3.5283, "step": 32045 }, { "epoch": 2.177605652941976, "grad_norm": 2.289757251739502, "learning_rate": 0.0007279097024052182, "loss": 3.5056, "step": 32050 }, { "epoch": 2.1779453730126375, "grad_norm": 2.326582193374634, "learning_rate": 0.0007278672373963854, "loss": 3.454, "step": 32055 }, { "epoch": 2.1782850930832995, "grad_norm": 1.8206464052200317, "learning_rate": 0.0007278247723875526, "loss": 3.5208, "step": 32060 }, { "epoch": 2.178624813153961, "grad_norm": 2.268554210662842, "learning_rate": 0.00072778230737872, "loss": 3.6362, "step": 32065 }, { "epoch": 2.178964533224623, "grad_norm": 1.6284620761871338, "learning_rate": 0.0007277398423698872, "loss": 3.6938, "step": 32070 }, { "epoch": 2.179304253295285, "grad_norm": 2.135491132736206, "learning_rate": 0.0007276973773610544, "loss": 3.54, "step": 32075 }, { "epoch": 2.1796439733659465, "grad_norm": 1.8236631155014038, "learning_rate": 0.0007276549123522218, "loss": 3.5576, "step": 32080 }, { "epoch": 2.179983693436608, "grad_norm": 1.7811888456344604, "learning_rate": 0.0007276124473433891, "loss": 3.7269, "step": 32085 }, { "epoch": 2.18032341350727, "grad_norm": 1.9281091690063477, "learning_rate": 0.0007275699823345563, "loss": 3.1974, "step": 32090 }, { "epoch": 2.180663133577932, "grad_norm": 2.4111056327819824, "learning_rate": 0.0007275275173257237, "loss": 3.5166, "step": 32095 }, { "epoch": 2.1810028536485935, "grad_norm": 1.7076207399368286, "learning_rate": 0.0007274850523168909, "loss": 3.6327, "step": 32100 }, { "epoch": 2.181342573719255, "grad_norm": 2.3989086151123047, "learning_rate": 0.0007274425873080581, "loss": 3.4285, "step": 32105 }, { "epoch": 2.181682293789917, "grad_norm": 1.8388969898223877, "learning_rate": 0.0007274001222992254, "loss": 3.4912, "step": 32110 }, { "epoch": 2.182022013860579, "grad_norm": 2.7795872688293457, "learning_rate": 0.0007273576572903927, "loss": 3.4144, "step": 32115 }, { "epoch": 2.1823617339312404, "grad_norm": 2.3642499446868896, "learning_rate": 0.00072731519228156, "loss": 3.5902, "step": 32120 }, { "epoch": 2.1827014540019025, "grad_norm": 1.892283320426941, "learning_rate": 0.0007272727272727273, "loss": 3.7077, "step": 32125 }, { "epoch": 2.183041174072564, "grad_norm": 1.8522586822509766, "learning_rate": 0.0007272302622638946, "loss": 3.3716, "step": 32130 }, { "epoch": 2.183380894143226, "grad_norm": 2.2604684829711914, "learning_rate": 0.0007271877972550618, "loss": 3.545, "step": 32135 }, { "epoch": 2.183720614213888, "grad_norm": 1.9327763319015503, "learning_rate": 0.0007271453322462291, "loss": 3.5151, "step": 32140 }, { "epoch": 2.1840603342845495, "grad_norm": 2.027543306350708, "learning_rate": 0.0007271028672373963, "loss": 3.3286, "step": 32145 }, { "epoch": 2.184400054355211, "grad_norm": 2.073009490966797, "learning_rate": 0.0007270604022285637, "loss": 3.4693, "step": 32150 }, { "epoch": 2.184739774425873, "grad_norm": 2.1107561588287354, "learning_rate": 0.000727017937219731, "loss": 3.3043, "step": 32155 }, { "epoch": 2.185079494496535, "grad_norm": 1.8398222923278809, "learning_rate": 0.0007269754722108982, "loss": 3.4482, "step": 32160 }, { "epoch": 2.1854192145671965, "grad_norm": 1.8374078273773193, "learning_rate": 0.0007269330072020656, "loss": 3.4457, "step": 32165 }, { "epoch": 2.1857589346378585, "grad_norm": 2.2368040084838867, "learning_rate": 0.0007268905421932328, "loss": 3.5535, "step": 32170 }, { "epoch": 2.18609865470852, "grad_norm": 2.276715040206909, "learning_rate": 0.0007268480771844, "loss": 3.2913, "step": 32175 }, { "epoch": 2.186438374779182, "grad_norm": 2.120241165161133, "learning_rate": 0.0007268056121755674, "loss": 3.5068, "step": 32180 }, { "epoch": 2.186778094849844, "grad_norm": 1.8677741289138794, "learning_rate": 0.0007267631471667346, "loss": 3.5886, "step": 32185 }, { "epoch": 2.1871178149205055, "grad_norm": 1.6850948333740234, "learning_rate": 0.0007267206821579019, "loss": 3.6338, "step": 32190 }, { "epoch": 2.187457534991167, "grad_norm": 1.8147932291030884, "learning_rate": 0.0007266782171490693, "loss": 3.6481, "step": 32195 }, { "epoch": 2.187797255061829, "grad_norm": 2.2929933071136475, "learning_rate": 0.0007266357521402365, "loss": 3.307, "step": 32200 }, { "epoch": 2.188136975132491, "grad_norm": 2.3218119144439697, "learning_rate": 0.0007265932871314037, "loss": 3.6063, "step": 32205 }, { "epoch": 2.1884766952031525, "grad_norm": 2.1714582443237305, "learning_rate": 0.000726550822122571, "loss": 3.673, "step": 32210 }, { "epoch": 2.1888164152738145, "grad_norm": 1.8797446489334106, "learning_rate": 0.0007265083571137383, "loss": 3.4757, "step": 32215 }, { "epoch": 2.189156135344476, "grad_norm": 2.0303444862365723, "learning_rate": 0.0007264658921049055, "loss": 3.6025, "step": 32220 }, { "epoch": 2.189495855415138, "grad_norm": 1.8821901082992554, "learning_rate": 0.0007264234270960729, "loss": 3.4528, "step": 32225 }, { "epoch": 2.1898355754858, "grad_norm": 2.3891236782073975, "learning_rate": 0.0007263809620872402, "loss": 3.5208, "step": 32230 }, { "epoch": 2.1901752955564615, "grad_norm": 2.108907461166382, "learning_rate": 0.0007263384970784074, "loss": 3.7178, "step": 32235 }, { "epoch": 2.190515015627123, "grad_norm": 1.9990744590759277, "learning_rate": 0.0007262960320695747, "loss": 3.5603, "step": 32240 }, { "epoch": 2.190854735697785, "grad_norm": 1.8886175155639648, "learning_rate": 0.000726253567060742, "loss": 3.2384, "step": 32245 }, { "epoch": 2.191194455768447, "grad_norm": 1.680765151977539, "learning_rate": 0.0007262111020519092, "loss": 3.5759, "step": 32250 }, { "epoch": 2.1915341758391085, "grad_norm": 1.8532227277755737, "learning_rate": 0.0007261686370430765, "loss": 3.5572, "step": 32255 }, { "epoch": 2.1918738959097706, "grad_norm": 1.852370023727417, "learning_rate": 0.0007261261720342438, "loss": 3.4125, "step": 32260 }, { "epoch": 2.192213615980432, "grad_norm": 1.9911030530929565, "learning_rate": 0.0007260837070254111, "loss": 3.7318, "step": 32265 }, { "epoch": 2.192553336051094, "grad_norm": 1.7841830253601074, "learning_rate": 0.0007260412420165784, "loss": 3.5647, "step": 32270 }, { "epoch": 2.192893056121756, "grad_norm": 1.779420018196106, "learning_rate": 0.0007259987770077456, "loss": 3.5438, "step": 32275 }, { "epoch": 2.1932327761924175, "grad_norm": 2.400216579437256, "learning_rate": 0.0007259563119989129, "loss": 3.5334, "step": 32280 }, { "epoch": 2.193572496263079, "grad_norm": 2.365549087524414, "learning_rate": 0.0007259138469900802, "loss": 3.1484, "step": 32285 }, { "epoch": 2.1939122163337412, "grad_norm": 2.2592508792877197, "learning_rate": 0.0007258713819812474, "loss": 3.536, "step": 32290 }, { "epoch": 2.194251936404403, "grad_norm": 2.397402763366699, "learning_rate": 0.0007258289169724148, "loss": 3.7001, "step": 32295 }, { "epoch": 2.1945916564750645, "grad_norm": 2.0341813564300537, "learning_rate": 0.0007257864519635821, "loss": 3.4483, "step": 32300 }, { "epoch": 2.194931376545726, "grad_norm": 2.2529914379119873, "learning_rate": 0.0007257439869547493, "loss": 3.5626, "step": 32305 }, { "epoch": 2.195271096616388, "grad_norm": 3.173032522201538, "learning_rate": 0.0007257015219459165, "loss": 3.4165, "step": 32310 }, { "epoch": 2.19561081668705, "grad_norm": 1.9407901763916016, "learning_rate": 0.0007256590569370839, "loss": 3.2986, "step": 32315 }, { "epoch": 2.1959505367577115, "grad_norm": 2.153759479522705, "learning_rate": 0.0007256165919282511, "loss": 3.2215, "step": 32320 }, { "epoch": 2.1962902568283735, "grad_norm": 1.8168061971664429, "learning_rate": 0.0007255741269194183, "loss": 3.615, "step": 32325 }, { "epoch": 2.196629976899035, "grad_norm": 1.7690485715866089, "learning_rate": 0.0007255316619105858, "loss": 3.3991, "step": 32330 }, { "epoch": 2.196969696969697, "grad_norm": 2.0844624042510986, "learning_rate": 0.000725489196901753, "loss": 3.5585, "step": 32335 }, { "epoch": 2.197309417040359, "grad_norm": 2.2398970127105713, "learning_rate": 0.0007254467318929202, "loss": 3.6672, "step": 32340 }, { "epoch": 2.1976491371110205, "grad_norm": 1.9341375827789307, "learning_rate": 0.0007254042668840876, "loss": 3.6792, "step": 32345 }, { "epoch": 2.197988857181682, "grad_norm": 1.6542978286743164, "learning_rate": 0.0007253618018752548, "loss": 3.6245, "step": 32350 }, { "epoch": 2.198328577252344, "grad_norm": 2.306727409362793, "learning_rate": 0.000725319336866422, "loss": 3.5986, "step": 32355 }, { "epoch": 2.198668297323006, "grad_norm": 1.7424843311309814, "learning_rate": 0.0007252768718575894, "loss": 3.5388, "step": 32360 }, { "epoch": 2.1990080173936675, "grad_norm": 1.8625476360321045, "learning_rate": 0.0007252344068487567, "loss": 3.2362, "step": 32365 }, { "epoch": 2.1993477374643295, "grad_norm": 1.7709788084030151, "learning_rate": 0.0007251919418399239, "loss": 3.3662, "step": 32370 }, { "epoch": 2.199687457534991, "grad_norm": 1.8333666324615479, "learning_rate": 0.0007251494768310912, "loss": 3.4096, "step": 32375 }, { "epoch": 2.200027177605653, "grad_norm": 1.7369096279144287, "learning_rate": 0.0007251070118222585, "loss": 3.5219, "step": 32380 }, { "epoch": 2.200366897676315, "grad_norm": 2.0454893112182617, "learning_rate": 0.0007250645468134257, "loss": 3.571, "step": 32385 }, { "epoch": 2.2007066177469765, "grad_norm": 2.0105056762695312, "learning_rate": 0.000725022081804593, "loss": 3.7134, "step": 32390 }, { "epoch": 2.201046337817638, "grad_norm": 2.3710646629333496, "learning_rate": 0.0007249796167957604, "loss": 3.3828, "step": 32395 }, { "epoch": 2.2013860578883, "grad_norm": 1.6614782810211182, "learning_rate": 0.0007249371517869276, "loss": 3.5855, "step": 32400 }, { "epoch": 2.201725777958962, "grad_norm": 1.7957817316055298, "learning_rate": 0.0007248946867780949, "loss": 3.6439, "step": 32405 }, { "epoch": 2.2020654980296235, "grad_norm": 1.9889403581619263, "learning_rate": 0.0007248522217692621, "loss": 3.4381, "step": 32410 }, { "epoch": 2.2024052181002856, "grad_norm": 1.8304178714752197, "learning_rate": 0.0007248097567604294, "loss": 3.6575, "step": 32415 }, { "epoch": 2.202744938170947, "grad_norm": 2.1752240657806396, "learning_rate": 0.0007247672917515967, "loss": 3.4623, "step": 32420 }, { "epoch": 2.203084658241609, "grad_norm": 1.9519784450531006, "learning_rate": 0.0007247248267427639, "loss": 3.6475, "step": 32425 }, { "epoch": 2.203424378312271, "grad_norm": 2.1077187061309814, "learning_rate": 0.0007246823617339313, "loss": 3.1918, "step": 32430 }, { "epoch": 2.2037640983829325, "grad_norm": 1.7120636701583862, "learning_rate": 0.0007246398967250986, "loss": 3.4649, "step": 32435 }, { "epoch": 2.204103818453594, "grad_norm": 1.8140513896942139, "learning_rate": 0.0007245974317162658, "loss": 3.7549, "step": 32440 }, { "epoch": 2.204443538524256, "grad_norm": 1.7296785116195679, "learning_rate": 0.000724554966707433, "loss": 3.6339, "step": 32445 }, { "epoch": 2.204783258594918, "grad_norm": 2.4156882762908936, "learning_rate": 0.0007245125016986004, "loss": 3.4042, "step": 32450 }, { "epoch": 2.2051229786655795, "grad_norm": 1.6754677295684814, "learning_rate": 0.0007244700366897676, "loss": 3.3883, "step": 32455 }, { "epoch": 2.205462698736241, "grad_norm": 2.0601770877838135, "learning_rate": 0.0007244275716809348, "loss": 3.4256, "step": 32460 }, { "epoch": 2.205802418806903, "grad_norm": 1.9510769844055176, "learning_rate": 0.0007243851066721023, "loss": 3.7092, "step": 32465 }, { "epoch": 2.206142138877565, "grad_norm": 1.973329782485962, "learning_rate": 0.0007243426416632695, "loss": 3.5002, "step": 32470 }, { "epoch": 2.2064818589482265, "grad_norm": 1.893977165222168, "learning_rate": 0.0007243001766544367, "loss": 3.7033, "step": 32475 }, { "epoch": 2.2068215790188885, "grad_norm": 3.099139451980591, "learning_rate": 0.0007242577116456041, "loss": 3.1673, "step": 32480 }, { "epoch": 2.20716129908955, "grad_norm": 1.7078722715377808, "learning_rate": 0.0007242152466367713, "loss": 3.7363, "step": 32485 }, { "epoch": 2.207501019160212, "grad_norm": 2.231443405151367, "learning_rate": 0.0007241727816279386, "loss": 3.453, "step": 32490 }, { "epoch": 2.207840739230874, "grad_norm": 1.8679388761520386, "learning_rate": 0.0007241303166191058, "loss": 3.6973, "step": 32495 }, { "epoch": 2.2081804593015355, "grad_norm": 1.7652217149734497, "learning_rate": 0.0007240878516102732, "loss": 3.338, "step": 32500 }, { "epoch": 2.208520179372197, "grad_norm": 2.3125178813934326, "learning_rate": 0.0007240453866014405, "loss": 3.5221, "step": 32505 }, { "epoch": 2.208859899442859, "grad_norm": 2.0741090774536133, "learning_rate": 0.0007240029215926077, "loss": 3.6288, "step": 32510 }, { "epoch": 2.209199619513521, "grad_norm": 2.1583211421966553, "learning_rate": 0.000723960456583775, "loss": 3.6377, "step": 32515 }, { "epoch": 2.2095393395841825, "grad_norm": 2.057396173477173, "learning_rate": 0.0007239179915749423, "loss": 3.5019, "step": 32520 }, { "epoch": 2.2098790596548445, "grad_norm": 2.179729461669922, "learning_rate": 0.0007238755265661095, "loss": 3.6759, "step": 32525 }, { "epoch": 2.210218779725506, "grad_norm": 1.8470757007598877, "learning_rate": 0.0007238330615572768, "loss": 3.5947, "step": 32530 }, { "epoch": 2.210558499796168, "grad_norm": 1.762826919555664, "learning_rate": 0.0007237905965484442, "loss": 3.5188, "step": 32535 }, { "epoch": 2.21089821986683, "grad_norm": 1.7362220287322998, "learning_rate": 0.0007237481315396114, "loss": 3.6187, "step": 32540 }, { "epoch": 2.2112379399374915, "grad_norm": 2.146134376525879, "learning_rate": 0.0007237056665307786, "loss": 3.4857, "step": 32545 }, { "epoch": 2.211577660008153, "grad_norm": 1.562083125114441, "learning_rate": 0.000723663201521946, "loss": 3.5812, "step": 32550 }, { "epoch": 2.211917380078815, "grad_norm": 2.0332672595977783, "learning_rate": 0.0007236207365131132, "loss": 3.5149, "step": 32555 }, { "epoch": 2.212257100149477, "grad_norm": 2.0839648246765137, "learning_rate": 0.0007235782715042804, "loss": 3.4452, "step": 32560 }, { "epoch": 2.2125968202201385, "grad_norm": 1.5179280042648315, "learning_rate": 0.0007235358064954478, "loss": 3.5239, "step": 32565 }, { "epoch": 2.2129365402908006, "grad_norm": 2.0949437618255615, "learning_rate": 0.0007234933414866151, "loss": 3.3417, "step": 32570 }, { "epoch": 2.213276260361462, "grad_norm": 1.7432771921157837, "learning_rate": 0.0007234508764777823, "loss": 3.3126, "step": 32575 }, { "epoch": 2.213615980432124, "grad_norm": 1.9803763628005981, "learning_rate": 0.0007234084114689497, "loss": 3.5038, "step": 32580 }, { "epoch": 2.213955700502786, "grad_norm": 1.9290709495544434, "learning_rate": 0.0007233659464601169, "loss": 3.8812, "step": 32585 }, { "epoch": 2.2142954205734475, "grad_norm": 1.8633595705032349, "learning_rate": 0.0007233234814512841, "loss": 3.3226, "step": 32590 }, { "epoch": 2.214635140644109, "grad_norm": 2.5690808296203613, "learning_rate": 0.0007232810164424514, "loss": 3.4362, "step": 32595 }, { "epoch": 2.2149748607147712, "grad_norm": 2.131807565689087, "learning_rate": 0.0007232385514336187, "loss": 3.5887, "step": 32600 }, { "epoch": 2.215314580785433, "grad_norm": 1.9096654653549194, "learning_rate": 0.000723196086424786, "loss": 3.4664, "step": 32605 }, { "epoch": 2.2156543008560945, "grad_norm": 2.206644058227539, "learning_rate": 0.0007231536214159533, "loss": 3.3359, "step": 32610 }, { "epoch": 2.2159940209267566, "grad_norm": 2.2535877227783203, "learning_rate": 0.0007231111564071206, "loss": 3.7369, "step": 32615 }, { "epoch": 2.216333740997418, "grad_norm": 2.4788658618927, "learning_rate": 0.0007230686913982878, "loss": 3.5704, "step": 32620 }, { "epoch": 2.21667346106808, "grad_norm": 2.148340940475464, "learning_rate": 0.0007230262263894551, "loss": 3.6164, "step": 32625 }, { "epoch": 2.217013181138742, "grad_norm": 1.8737443685531616, "learning_rate": 0.0007229837613806224, "loss": 3.4865, "step": 32630 }, { "epoch": 2.2173529012094035, "grad_norm": 2.05550217628479, "learning_rate": 0.0007229412963717896, "loss": 3.4586, "step": 32635 }, { "epoch": 2.217692621280065, "grad_norm": 1.8310651779174805, "learning_rate": 0.000722898831362957, "loss": 3.5027, "step": 32640 }, { "epoch": 2.218032341350727, "grad_norm": 2.176222801208496, "learning_rate": 0.0007228563663541242, "loss": 3.4762, "step": 32645 }, { "epoch": 2.218372061421389, "grad_norm": 1.6067299842834473, "learning_rate": 0.0007228139013452915, "loss": 3.3793, "step": 32650 }, { "epoch": 2.2187117814920505, "grad_norm": 3.0138895511627197, "learning_rate": 0.0007227714363364588, "loss": 3.5308, "step": 32655 }, { "epoch": 2.219051501562712, "grad_norm": 2.5294852256774902, "learning_rate": 0.000722728971327626, "loss": 3.5017, "step": 32660 }, { "epoch": 2.219391221633374, "grad_norm": 1.9554604291915894, "learning_rate": 0.0007226865063187933, "loss": 3.2629, "step": 32665 }, { "epoch": 2.219730941704036, "grad_norm": 2.10353684425354, "learning_rate": 0.0007226440413099606, "loss": 3.6342, "step": 32670 }, { "epoch": 2.2200706617746975, "grad_norm": 2.4063055515289307, "learning_rate": 0.0007226015763011279, "loss": 3.5516, "step": 32675 }, { "epoch": 2.2204103818453595, "grad_norm": 2.0334839820861816, "learning_rate": 0.0007225591112922952, "loss": 3.3269, "step": 32680 }, { "epoch": 2.220750101916021, "grad_norm": 1.9759482145309448, "learning_rate": 0.0007225166462834625, "loss": 3.4811, "step": 32685 }, { "epoch": 2.221089821986683, "grad_norm": 2.4098782539367676, "learning_rate": 0.0007224741812746297, "loss": 3.2673, "step": 32690 }, { "epoch": 2.221429542057345, "grad_norm": 2.4442367553710938, "learning_rate": 0.0007224317162657969, "loss": 3.4339, "step": 32695 }, { "epoch": 2.2217692621280065, "grad_norm": 1.6251873970031738, "learning_rate": 0.0007223892512569643, "loss": 3.561, "step": 32700 }, { "epoch": 2.222108982198668, "grad_norm": 1.6005674600601196, "learning_rate": 0.0007223467862481315, "loss": 3.3433, "step": 32705 }, { "epoch": 2.2224487022693302, "grad_norm": 1.5991554260253906, "learning_rate": 0.0007223043212392988, "loss": 3.5585, "step": 32710 }, { "epoch": 2.222788422339992, "grad_norm": 1.9235302209854126, "learning_rate": 0.0007222618562304662, "loss": 3.5352, "step": 32715 }, { "epoch": 2.2231281424106535, "grad_norm": 2.5276050567626953, "learning_rate": 0.0007222193912216334, "loss": 3.4569, "step": 32720 }, { "epoch": 2.2234678624813156, "grad_norm": 1.8397812843322754, "learning_rate": 0.0007221769262128006, "loss": 3.4222, "step": 32725 }, { "epoch": 2.223807582551977, "grad_norm": 2.0844779014587402, "learning_rate": 0.000722134461203968, "loss": 3.615, "step": 32730 }, { "epoch": 2.224147302622639, "grad_norm": 2.125979423522949, "learning_rate": 0.0007220919961951352, "loss": 3.551, "step": 32735 }, { "epoch": 2.224487022693301, "grad_norm": 1.838605284690857, "learning_rate": 0.0007220495311863024, "loss": 3.5561, "step": 32740 }, { "epoch": 2.2248267427639625, "grad_norm": 2.0276966094970703, "learning_rate": 0.0007220070661774698, "loss": 3.6089, "step": 32745 }, { "epoch": 2.225166462834624, "grad_norm": 2.5079281330108643, "learning_rate": 0.0007219646011686371, "loss": 3.2813, "step": 32750 }, { "epoch": 2.2255061829052862, "grad_norm": 2.2134366035461426, "learning_rate": 0.0007219221361598043, "loss": 3.5634, "step": 32755 }, { "epoch": 2.225845902975948, "grad_norm": 1.8511149883270264, "learning_rate": 0.0007218796711509716, "loss": 3.6018, "step": 32760 }, { "epoch": 2.2261856230466095, "grad_norm": 2.355083465576172, "learning_rate": 0.0007218372061421389, "loss": 3.4489, "step": 32765 }, { "epoch": 2.2265253431172716, "grad_norm": 2.0718438625335693, "learning_rate": 0.0007217947411333061, "loss": 3.6022, "step": 32770 }, { "epoch": 2.226865063187933, "grad_norm": 3.0236477851867676, "learning_rate": 0.0007217522761244734, "loss": 3.5278, "step": 32775 }, { "epoch": 2.227204783258595, "grad_norm": 1.9817389249801636, "learning_rate": 0.0007217098111156408, "loss": 3.6485, "step": 32780 }, { "epoch": 2.2275445033292565, "grad_norm": 2.0619943141937256, "learning_rate": 0.000721667346106808, "loss": 3.3154, "step": 32785 }, { "epoch": 2.2278842233999185, "grad_norm": 1.6528451442718506, "learning_rate": 0.0007216248810979753, "loss": 3.4605, "step": 32790 }, { "epoch": 2.22822394347058, "grad_norm": 1.8897037506103516, "learning_rate": 0.0007215824160891425, "loss": 3.6327, "step": 32795 }, { "epoch": 2.228563663541242, "grad_norm": 2.0128278732299805, "learning_rate": 0.0007215399510803098, "loss": 3.7228, "step": 32800 }, { "epoch": 2.228903383611904, "grad_norm": 2.2442123889923096, "learning_rate": 0.0007214974860714771, "loss": 3.4974, "step": 32805 }, { "epoch": 2.2292431036825655, "grad_norm": 1.8995417356491089, "learning_rate": 0.0007214550210626443, "loss": 3.5209, "step": 32810 }, { "epoch": 2.229582823753227, "grad_norm": 1.8925120830535889, "learning_rate": 0.0007214125560538117, "loss": 3.6731, "step": 32815 }, { "epoch": 2.229922543823889, "grad_norm": 2.5257227420806885, "learning_rate": 0.000721370091044979, "loss": 3.6447, "step": 32820 }, { "epoch": 2.230262263894551, "grad_norm": 1.6963608264923096, "learning_rate": 0.0007213276260361462, "loss": 3.6182, "step": 32825 }, { "epoch": 2.2306019839652125, "grad_norm": 2.001617908477783, "learning_rate": 0.0007212851610273136, "loss": 3.6901, "step": 32830 }, { "epoch": 2.2309417040358746, "grad_norm": 2.266151189804077, "learning_rate": 0.0007212426960184808, "loss": 3.4817, "step": 32835 }, { "epoch": 2.231281424106536, "grad_norm": 2.293717622756958, "learning_rate": 0.000721200231009648, "loss": 3.4521, "step": 32840 }, { "epoch": 2.231621144177198, "grad_norm": 2.4461774826049805, "learning_rate": 0.0007211577660008153, "loss": 3.4639, "step": 32845 }, { "epoch": 2.23196086424786, "grad_norm": 1.9225420951843262, "learning_rate": 0.0007211153009919827, "loss": 3.5537, "step": 32850 }, { "epoch": 2.2323005843185215, "grad_norm": 1.7730556726455688, "learning_rate": 0.0007210728359831499, "loss": 3.4292, "step": 32855 }, { "epoch": 2.232640304389183, "grad_norm": 2.247865676879883, "learning_rate": 0.0007210303709743172, "loss": 3.368, "step": 32860 }, { "epoch": 2.2329800244598452, "grad_norm": 2.2248849868774414, "learning_rate": 0.0007209879059654845, "loss": 3.2751, "step": 32865 }, { "epoch": 2.233319744530507, "grad_norm": 1.6469711065292358, "learning_rate": 0.0007209454409566517, "loss": 3.6203, "step": 32870 }, { "epoch": 2.2336594646011685, "grad_norm": 2.282653331756592, "learning_rate": 0.000720902975947819, "loss": 3.6307, "step": 32875 }, { "epoch": 2.2339991846718306, "grad_norm": 1.9182759523391724, "learning_rate": 0.0007208605109389862, "loss": 3.5477, "step": 32880 }, { "epoch": 2.234338904742492, "grad_norm": 1.9026601314544678, "learning_rate": 0.0007208180459301536, "loss": 3.5519, "step": 32885 }, { "epoch": 2.234678624813154, "grad_norm": 1.8825656175613403, "learning_rate": 0.0007207755809213209, "loss": 3.5815, "step": 32890 }, { "epoch": 2.235018344883816, "grad_norm": 2.381009817123413, "learning_rate": 0.0007207331159124881, "loss": 3.489, "step": 32895 }, { "epoch": 2.2353580649544775, "grad_norm": 1.9584739208221436, "learning_rate": 0.0007206906509036554, "loss": 3.4844, "step": 32900 }, { "epoch": 2.235697785025139, "grad_norm": 2.107208490371704, "learning_rate": 0.0007206481858948227, "loss": 3.5741, "step": 32905 }, { "epoch": 2.2360375050958012, "grad_norm": 1.916852355003357, "learning_rate": 0.0007206057208859899, "loss": 3.7895, "step": 32910 }, { "epoch": 2.236377225166463, "grad_norm": 2.4962191581726074, "learning_rate": 0.0007205632558771572, "loss": 3.7463, "step": 32915 }, { "epoch": 2.2367169452371245, "grad_norm": 2.0117104053497314, "learning_rate": 0.0007205207908683246, "loss": 3.4718, "step": 32920 }, { "epoch": 2.2370566653077866, "grad_norm": 2.1375315189361572, "learning_rate": 0.0007204783258594918, "loss": 3.6186, "step": 32925 }, { "epoch": 2.237396385378448, "grad_norm": 2.0641863346099854, "learning_rate": 0.000720435860850659, "loss": 3.6567, "step": 32930 }, { "epoch": 2.23773610544911, "grad_norm": 1.9263622760772705, "learning_rate": 0.0007203933958418264, "loss": 3.2715, "step": 32935 }, { "epoch": 2.238075825519772, "grad_norm": 2.1767356395721436, "learning_rate": 0.0007203509308329936, "loss": 3.2866, "step": 32940 }, { "epoch": 2.2384155455904335, "grad_norm": 2.459496259689331, "learning_rate": 0.0007203084658241608, "loss": 3.5673, "step": 32945 }, { "epoch": 2.238755265661095, "grad_norm": 1.6849472522735596, "learning_rate": 0.0007202660008153283, "loss": 3.6673, "step": 32950 }, { "epoch": 2.2390949857317572, "grad_norm": 1.8761342763900757, "learning_rate": 0.0007202235358064955, "loss": 3.4977, "step": 32955 }, { "epoch": 2.239434705802419, "grad_norm": 1.518898606300354, "learning_rate": 0.0007201810707976627, "loss": 3.4831, "step": 32960 }, { "epoch": 2.2397744258730805, "grad_norm": 2.3276147842407227, "learning_rate": 0.0007201386057888301, "loss": 3.4549, "step": 32965 }, { "epoch": 2.2401141459437426, "grad_norm": 2.13808536529541, "learning_rate": 0.0007200961407799973, "loss": 3.5632, "step": 32970 }, { "epoch": 2.240453866014404, "grad_norm": 2.4175684452056885, "learning_rate": 0.0007200536757711645, "loss": 3.4348, "step": 32975 }, { "epoch": 2.240793586085066, "grad_norm": 1.9217784404754639, "learning_rate": 0.0007200112107623319, "loss": 3.5064, "step": 32980 }, { "epoch": 2.2411333061557275, "grad_norm": 2.7045979499816895, "learning_rate": 0.0007199687457534992, "loss": 3.5617, "step": 32985 }, { "epoch": 2.2414730262263896, "grad_norm": 2.063260555267334, "learning_rate": 0.0007199262807446664, "loss": 3.4463, "step": 32990 }, { "epoch": 2.241812746297051, "grad_norm": 1.9452533721923828, "learning_rate": 0.0007198838157358337, "loss": 3.4576, "step": 32995 }, { "epoch": 2.242152466367713, "grad_norm": 2.0136942863464355, "learning_rate": 0.000719841350727001, "loss": 3.4161, "step": 33000 }, { "epoch": 2.242492186438375, "grad_norm": 1.8394380807876587, "learning_rate": 0.0007197988857181682, "loss": 3.4801, "step": 33005 }, { "epoch": 2.2428319065090365, "grad_norm": 2.018526792526245, "learning_rate": 0.0007197564207093355, "loss": 3.2676, "step": 33010 }, { "epoch": 2.243171626579698, "grad_norm": 2.091550350189209, "learning_rate": 0.0007197139557005028, "loss": 3.623, "step": 33015 }, { "epoch": 2.2435113466503602, "grad_norm": 2.0017364025115967, "learning_rate": 0.0007196714906916701, "loss": 3.5031, "step": 33020 }, { "epoch": 2.243851066721022, "grad_norm": 1.6665035486221313, "learning_rate": 0.0007196290256828374, "loss": 3.4831, "step": 33025 }, { "epoch": 2.2441907867916835, "grad_norm": 1.8801870346069336, "learning_rate": 0.0007195865606740047, "loss": 3.5311, "step": 33030 }, { "epoch": 2.2445305068623456, "grad_norm": 1.7907925844192505, "learning_rate": 0.0007195440956651719, "loss": 3.4758, "step": 33035 }, { "epoch": 2.244870226933007, "grad_norm": 1.7819275856018066, "learning_rate": 0.0007195016306563392, "loss": 3.5171, "step": 33040 }, { "epoch": 2.245209947003669, "grad_norm": 1.9980865716934204, "learning_rate": 0.0007194591656475064, "loss": 3.5406, "step": 33045 }, { "epoch": 2.245549667074331, "grad_norm": 1.7968864440917969, "learning_rate": 0.0007194167006386737, "loss": 3.733, "step": 33050 }, { "epoch": 2.2458893871449925, "grad_norm": 1.7920968532562256, "learning_rate": 0.0007193742356298411, "loss": 3.7167, "step": 33055 }, { "epoch": 2.246229107215654, "grad_norm": 1.777008295059204, "learning_rate": 0.0007193317706210083, "loss": 3.4166, "step": 33060 }, { "epoch": 2.2465688272863162, "grad_norm": 2.372445583343506, "learning_rate": 0.0007192893056121756, "loss": 3.4794, "step": 33065 }, { "epoch": 2.246908547356978, "grad_norm": 2.3166167736053467, "learning_rate": 0.0007192468406033429, "loss": 3.6152, "step": 33070 }, { "epoch": 2.2472482674276395, "grad_norm": 2.0562355518341064, "learning_rate": 0.0007192043755945101, "loss": 3.772, "step": 33075 }, { "epoch": 2.2475879874983016, "grad_norm": 1.8478800058364868, "learning_rate": 0.0007191619105856773, "loss": 3.5455, "step": 33080 }, { "epoch": 2.247927707568963, "grad_norm": 2.511880397796631, "learning_rate": 0.0007191194455768447, "loss": 3.5547, "step": 33085 }, { "epoch": 2.248267427639625, "grad_norm": 1.5330730676651, "learning_rate": 0.000719076980568012, "loss": 3.3211, "step": 33090 }, { "epoch": 2.248607147710287, "grad_norm": 1.8279441595077515, "learning_rate": 0.0007190345155591792, "loss": 3.1919, "step": 33095 }, { "epoch": 2.2489468677809485, "grad_norm": 2.1895241737365723, "learning_rate": 0.0007189920505503466, "loss": 3.443, "step": 33100 }, { "epoch": 2.24928658785161, "grad_norm": 2.00182843208313, "learning_rate": 0.0007189495855415138, "loss": 3.2572, "step": 33105 }, { "epoch": 2.2496263079222723, "grad_norm": 2.1515963077545166, "learning_rate": 0.000718907120532681, "loss": 3.4175, "step": 33110 }, { "epoch": 2.249966027992934, "grad_norm": 2.004338502883911, "learning_rate": 0.0007188646555238484, "loss": 3.5814, "step": 33115 }, { "epoch": 2.2503057480635955, "grad_norm": 2.250920295715332, "learning_rate": 0.0007188221905150156, "loss": 3.5293, "step": 33120 }, { "epoch": 2.250645468134257, "grad_norm": 2.0166432857513428, "learning_rate": 0.0007187797255061829, "loss": 3.7425, "step": 33125 }, { "epoch": 2.250985188204919, "grad_norm": 1.8620790243148804, "learning_rate": 0.0007187372604973503, "loss": 3.7267, "step": 33130 }, { "epoch": 2.251324908275581, "grad_norm": 2.0935370922088623, "learning_rate": 0.0007186947954885175, "loss": 3.6078, "step": 33135 }, { "epoch": 2.2516646283462425, "grad_norm": 3.3607797622680664, "learning_rate": 0.0007186523304796847, "loss": 3.5577, "step": 33140 }, { "epoch": 2.2520043484169046, "grad_norm": 3.56052303314209, "learning_rate": 0.000718609865470852, "loss": 3.5273, "step": 33145 }, { "epoch": 2.252344068487566, "grad_norm": 1.941611647605896, "learning_rate": 0.0007185674004620193, "loss": 3.2843, "step": 33150 }, { "epoch": 2.252683788558228, "grad_norm": 2.260474443435669, "learning_rate": 0.0007185249354531865, "loss": 3.7434, "step": 33155 }, { "epoch": 2.25302350862889, "grad_norm": 1.7928378582000732, "learning_rate": 0.0007184824704443539, "loss": 3.577, "step": 33160 }, { "epoch": 2.2533632286995515, "grad_norm": 1.8342139720916748, "learning_rate": 0.0007184400054355212, "loss": 3.7774, "step": 33165 }, { "epoch": 2.253702948770213, "grad_norm": 2.5248546600341797, "learning_rate": 0.0007183975404266885, "loss": 3.3688, "step": 33170 }, { "epoch": 2.2540426688408752, "grad_norm": 1.9744402170181274, "learning_rate": 0.0007183550754178557, "loss": 3.4774, "step": 33175 }, { "epoch": 2.254382388911537, "grad_norm": 2.0861124992370605, "learning_rate": 0.0007183126104090229, "loss": 3.3869, "step": 33180 }, { "epoch": 2.2547221089821985, "grad_norm": 1.806399941444397, "learning_rate": 0.0007182701454001903, "loss": 3.5854, "step": 33185 }, { "epoch": 2.2550618290528606, "grad_norm": 1.8263423442840576, "learning_rate": 0.0007182276803913575, "loss": 3.5438, "step": 33190 }, { "epoch": 2.255401549123522, "grad_norm": 2.2206897735595703, "learning_rate": 0.0007181852153825248, "loss": 3.6757, "step": 33195 }, { "epoch": 2.255741269194184, "grad_norm": 1.729876160621643, "learning_rate": 0.0007181427503736922, "loss": 3.5195, "step": 33200 }, { "epoch": 2.256080989264846, "grad_norm": 1.741610050201416, "learning_rate": 0.0007181002853648594, "loss": 3.6768, "step": 33205 }, { "epoch": 2.2564207093355075, "grad_norm": 1.9022151231765747, "learning_rate": 0.0007180578203560266, "loss": 3.5776, "step": 33210 }, { "epoch": 2.256760429406169, "grad_norm": 2.0507829189300537, "learning_rate": 0.000718015355347194, "loss": 3.3182, "step": 33215 }, { "epoch": 2.2571001494768312, "grad_norm": 2.074234962463379, "learning_rate": 0.0007179728903383612, "loss": 3.6638, "step": 33220 }, { "epoch": 2.257439869547493, "grad_norm": 2.6467201709747314, "learning_rate": 0.000717938918331295, "loss": 3.3986, "step": 33225 }, { "epoch": 2.2577795896181545, "grad_norm": 1.8865636587142944, "learning_rate": 0.0007178964533224623, "loss": 3.4644, "step": 33230 }, { "epoch": 2.2581193096888166, "grad_norm": 1.573000192642212, "learning_rate": 0.0007178539883136296, "loss": 3.5202, "step": 33235 }, { "epoch": 2.258459029759478, "grad_norm": 2.1915433406829834, "learning_rate": 0.0007178115233047968, "loss": 3.521, "step": 33240 }, { "epoch": 2.25879874983014, "grad_norm": 2.5065982341766357, "learning_rate": 0.0007177690582959641, "loss": 3.3622, "step": 33245 }, { "epoch": 2.259138469900802, "grad_norm": 2.2500157356262207, "learning_rate": 0.0007177265932871315, "loss": 3.5151, "step": 33250 }, { "epoch": 2.2594781899714635, "grad_norm": 1.7967653274536133, "learning_rate": 0.0007176841282782987, "loss": 3.2275, "step": 33255 }, { "epoch": 2.259817910042125, "grad_norm": 1.6134370565414429, "learning_rate": 0.000717641663269466, "loss": 3.579, "step": 33260 }, { "epoch": 2.2601576301127873, "grad_norm": 2.946643352508545, "learning_rate": 0.0007175991982606333, "loss": 3.7675, "step": 33265 }, { "epoch": 2.260497350183449, "grad_norm": 2.047178268432617, "learning_rate": 0.0007175567332518005, "loss": 3.4407, "step": 33270 }, { "epoch": 2.2608370702541105, "grad_norm": 2.045985460281372, "learning_rate": 0.0007175142682429678, "loss": 3.5745, "step": 33275 }, { "epoch": 2.2611767903247726, "grad_norm": 1.94210946559906, "learning_rate": 0.000717471803234135, "loss": 3.4698, "step": 33280 }, { "epoch": 2.261516510395434, "grad_norm": 2.5216546058654785, "learning_rate": 0.0007174293382253024, "loss": 3.6334, "step": 33285 }, { "epoch": 2.261856230466096, "grad_norm": 1.6250183582305908, "learning_rate": 0.0007173868732164697, "loss": 3.5383, "step": 33290 }, { "epoch": 2.262195950536758, "grad_norm": 1.9940663576126099, "learning_rate": 0.0007173444082076369, "loss": 3.5, "step": 33295 }, { "epoch": 2.2625356706074196, "grad_norm": 1.757631540298462, "learning_rate": 0.0007173019431988042, "loss": 3.517, "step": 33300 }, { "epoch": 2.262875390678081, "grad_norm": 1.9901783466339111, "learning_rate": 0.0007172594781899715, "loss": 3.5713, "step": 33305 }, { "epoch": 2.2632151107487433, "grad_norm": 2.2031285762786865, "learning_rate": 0.0007172170131811387, "loss": 3.6582, "step": 33310 }, { "epoch": 2.263554830819405, "grad_norm": 2.3526039123535156, "learning_rate": 0.0007171745481723059, "loss": 3.585, "step": 33315 }, { "epoch": 2.2638945508900665, "grad_norm": 2.7102811336517334, "learning_rate": 0.0007171320831634734, "loss": 3.5607, "step": 33320 }, { "epoch": 2.2642342709607286, "grad_norm": 1.8158587217330933, "learning_rate": 0.0007170896181546406, "loss": 3.3182, "step": 33325 }, { "epoch": 2.2645739910313902, "grad_norm": 2.5098142623901367, "learning_rate": 0.0007170471531458078, "loss": 3.4849, "step": 33330 }, { "epoch": 2.264913711102052, "grad_norm": 2.397136926651001, "learning_rate": 0.0007170046881369752, "loss": 3.5328, "step": 33335 }, { "epoch": 2.2652534311727135, "grad_norm": 1.8623361587524414, "learning_rate": 0.0007169622231281424, "loss": 3.4243, "step": 33340 }, { "epoch": 2.2655931512433756, "grad_norm": 1.7838119268417358, "learning_rate": 0.0007169197581193096, "loss": 3.4026, "step": 33345 }, { "epoch": 2.265932871314037, "grad_norm": 1.6423816680908203, "learning_rate": 0.000716877293110477, "loss": 3.6333, "step": 33350 }, { "epoch": 2.266272591384699, "grad_norm": 2.100998640060425, "learning_rate": 0.0007168348281016443, "loss": 3.6239, "step": 33355 }, { "epoch": 2.266612311455361, "grad_norm": 1.4712872505187988, "learning_rate": 0.0007167923630928115, "loss": 3.7313, "step": 33360 }, { "epoch": 2.2669520315260225, "grad_norm": 1.9806228876113892, "learning_rate": 0.0007167498980839789, "loss": 3.5721, "step": 33365 }, { "epoch": 2.267291751596684, "grad_norm": 2.164584159851074, "learning_rate": 0.0007167074330751461, "loss": 3.5462, "step": 33370 }, { "epoch": 2.2676314716673462, "grad_norm": 1.8754149675369263, "learning_rate": 0.0007166649680663134, "loss": 3.2944, "step": 33375 }, { "epoch": 2.267971191738008, "grad_norm": 1.7499946355819702, "learning_rate": 0.0007166225030574806, "loss": 3.6445, "step": 33380 }, { "epoch": 2.2683109118086695, "grad_norm": 1.8589437007904053, "learning_rate": 0.0007165800380486479, "loss": 3.266, "step": 33385 }, { "epoch": 2.2686506318793316, "grad_norm": 2.1487882137298584, "learning_rate": 0.0007165375730398153, "loss": 3.5181, "step": 33390 }, { "epoch": 2.268990351949993, "grad_norm": 1.859143853187561, "learning_rate": 0.0007164951080309825, "loss": 3.4831, "step": 33395 }, { "epoch": 2.269330072020655, "grad_norm": 1.6978890895843506, "learning_rate": 0.0007164526430221498, "loss": 3.2698, "step": 33400 }, { "epoch": 2.269669792091317, "grad_norm": 2.1596102714538574, "learning_rate": 0.0007164101780133171, "loss": 3.3556, "step": 33405 }, { "epoch": 2.2700095121619785, "grad_norm": 2.1239514350891113, "learning_rate": 0.0007163677130044843, "loss": 3.4828, "step": 33410 }, { "epoch": 2.27034923223264, "grad_norm": 1.9925737380981445, "learning_rate": 0.0007163252479956515, "loss": 3.6538, "step": 33415 }, { "epoch": 2.2706889523033023, "grad_norm": 1.8856236934661865, "learning_rate": 0.0007162827829868189, "loss": 3.5535, "step": 33420 }, { "epoch": 2.271028672373964, "grad_norm": 1.8141018152236938, "learning_rate": 0.0007162403179779862, "loss": 3.2422, "step": 33425 }, { "epoch": 2.2713683924446255, "grad_norm": 1.973220705986023, "learning_rate": 0.0007161978529691534, "loss": 3.3855, "step": 33430 }, { "epoch": 2.2717081125152876, "grad_norm": 1.612234354019165, "learning_rate": 0.0007161553879603208, "loss": 3.4489, "step": 33435 }, { "epoch": 2.2720478325859492, "grad_norm": 1.7672868967056274, "learning_rate": 0.000716112922951488, "loss": 3.4341, "step": 33440 }, { "epoch": 2.272387552656611, "grad_norm": 1.5971261262893677, "learning_rate": 0.0007160704579426552, "loss": 3.4804, "step": 33445 }, { "epoch": 2.2727272727272725, "grad_norm": 2.4388267993927, "learning_rate": 0.0007160279929338226, "loss": 3.3455, "step": 33450 }, { "epoch": 2.2730669927979346, "grad_norm": 1.9187442064285278, "learning_rate": 0.0007159855279249898, "loss": 3.6714, "step": 33455 }, { "epoch": 2.273406712868596, "grad_norm": 2.6897244453430176, "learning_rate": 0.0007159430629161571, "loss": 3.5185, "step": 33460 }, { "epoch": 2.273746432939258, "grad_norm": 2.3165011405944824, "learning_rate": 0.0007159005979073245, "loss": 3.4736, "step": 33465 }, { "epoch": 2.27408615300992, "grad_norm": 2.7821707725524902, "learning_rate": 0.0007158581328984917, "loss": 3.5233, "step": 33470 }, { "epoch": 2.2744258730805815, "grad_norm": 1.6622408628463745, "learning_rate": 0.0007158156678896589, "loss": 3.58, "step": 33475 }, { "epoch": 2.274765593151243, "grad_norm": 2.1137094497680664, "learning_rate": 0.0007157732028808262, "loss": 3.4401, "step": 33480 }, { "epoch": 2.2751053132219052, "grad_norm": 1.8912596702575684, "learning_rate": 0.0007157307378719935, "loss": 3.5288, "step": 33485 }, { "epoch": 2.275445033292567, "grad_norm": 1.7199885845184326, "learning_rate": 0.0007156882728631607, "loss": 3.6223, "step": 33490 }, { "epoch": 2.2757847533632285, "grad_norm": 1.8822907209396362, "learning_rate": 0.0007156458078543281, "loss": 3.5569, "step": 33495 }, { "epoch": 2.2761244734338906, "grad_norm": 1.7077116966247559, "learning_rate": 0.0007156033428454954, "loss": 3.7028, "step": 33500 }, { "epoch": 2.276464193504552, "grad_norm": 1.7784490585327148, "learning_rate": 0.0007155608778366626, "loss": 3.6476, "step": 33505 }, { "epoch": 2.276803913575214, "grad_norm": 1.9872992038726807, "learning_rate": 0.0007155184128278299, "loss": 3.3094, "step": 33510 }, { "epoch": 2.277143633645876, "grad_norm": 2.229910135269165, "learning_rate": 0.0007154759478189971, "loss": 3.5808, "step": 33515 }, { "epoch": 2.2774833537165375, "grad_norm": 2.7466957569122314, "learning_rate": 0.0007154334828101644, "loss": 3.4855, "step": 33520 }, { "epoch": 2.277823073787199, "grad_norm": 1.9431400299072266, "learning_rate": 0.0007153910178013317, "loss": 3.402, "step": 33525 }, { "epoch": 2.2781627938578612, "grad_norm": 1.9037861824035645, "learning_rate": 0.000715348552792499, "loss": 3.5033, "step": 33530 }, { "epoch": 2.278502513928523, "grad_norm": 1.9500634670257568, "learning_rate": 0.0007153060877836663, "loss": 3.342, "step": 33535 }, { "epoch": 2.2788422339991845, "grad_norm": 2.4620304107666016, "learning_rate": 0.0007152636227748336, "loss": 3.7972, "step": 33540 }, { "epoch": 2.2791819540698466, "grad_norm": 1.9685462713241577, "learning_rate": 0.0007152211577660008, "loss": 3.4421, "step": 33545 }, { "epoch": 2.279521674140508, "grad_norm": 1.7865756750106812, "learning_rate": 0.000715178692757168, "loss": 3.446, "step": 33550 }, { "epoch": 2.27986139421117, "grad_norm": 2.7477591037750244, "learning_rate": 0.0007151362277483354, "loss": 3.4886, "step": 33555 }, { "epoch": 2.280201114281832, "grad_norm": 2.501831531524658, "learning_rate": 0.0007150937627395026, "loss": 3.4443, "step": 33560 }, { "epoch": 2.2805408343524936, "grad_norm": 1.664844036102295, "learning_rate": 0.00071505129773067, "loss": 3.369, "step": 33565 }, { "epoch": 2.280880554423155, "grad_norm": 1.4424546957015991, "learning_rate": 0.0007150088327218373, "loss": 3.5819, "step": 33570 }, { "epoch": 2.2812202744938173, "grad_norm": 1.9877679347991943, "learning_rate": 0.0007149663677130045, "loss": 3.5927, "step": 33575 }, { "epoch": 2.281559994564479, "grad_norm": 2.907392740249634, "learning_rate": 0.0007149239027041717, "loss": 3.4502, "step": 33580 }, { "epoch": 2.2818997146351405, "grad_norm": 1.9282513856887817, "learning_rate": 0.0007148814376953391, "loss": 3.6315, "step": 33585 }, { "epoch": 2.2822394347058026, "grad_norm": 1.8568400144577026, "learning_rate": 0.0007148389726865063, "loss": 3.3563, "step": 33590 }, { "epoch": 2.2825791547764642, "grad_norm": 2.643345832824707, "learning_rate": 0.0007147965076776735, "loss": 3.4747, "step": 33595 }, { "epoch": 2.282918874847126, "grad_norm": 2.0390660762786865, "learning_rate": 0.000714754042668841, "loss": 3.5533, "step": 33600 }, { "epoch": 2.283258594917788, "grad_norm": 2.0634756088256836, "learning_rate": 0.0007147115776600082, "loss": 3.3591, "step": 33605 }, { "epoch": 2.2835983149884496, "grad_norm": 2.2354965209960938, "learning_rate": 0.0007146691126511754, "loss": 3.6092, "step": 33610 }, { "epoch": 2.283938035059111, "grad_norm": 1.9779633283615112, "learning_rate": 0.0007146266476423427, "loss": 3.5041, "step": 33615 }, { "epoch": 2.2842777551297733, "grad_norm": 2.095409631729126, "learning_rate": 0.00071458418263351, "loss": 3.5711, "step": 33620 }, { "epoch": 2.284617475200435, "grad_norm": 2.1008098125457764, "learning_rate": 0.0007145417176246772, "loss": 3.7386, "step": 33625 }, { "epoch": 2.2849571952710965, "grad_norm": 2.279766321182251, "learning_rate": 0.0007144992526158445, "loss": 3.4695, "step": 33630 }, { "epoch": 2.2852969153417586, "grad_norm": 2.4519786834716797, "learning_rate": 0.0007144567876070119, "loss": 3.4637, "step": 33635 }, { "epoch": 2.2856366354124202, "grad_norm": 1.7043871879577637, "learning_rate": 0.0007144143225981791, "loss": 3.7145, "step": 33640 }, { "epoch": 2.285976355483082, "grad_norm": 1.9714694023132324, "learning_rate": 0.0007143718575893464, "loss": 3.7138, "step": 33645 }, { "epoch": 2.286316075553744, "grad_norm": 1.8411955833435059, "learning_rate": 0.0007143293925805137, "loss": 3.3766, "step": 33650 }, { "epoch": 2.2866557956244056, "grad_norm": 2.3340158462524414, "learning_rate": 0.0007142869275716809, "loss": 3.5039, "step": 33655 }, { "epoch": 2.286995515695067, "grad_norm": 1.518235445022583, "learning_rate": 0.0007142444625628482, "loss": 3.5488, "step": 33660 }, { "epoch": 2.2873352357657293, "grad_norm": 2.0473437309265137, "learning_rate": 0.0007142019975540154, "loss": 3.4522, "step": 33665 }, { "epoch": 2.287674955836391, "grad_norm": 1.7731108665466309, "learning_rate": 0.0007141595325451828, "loss": 3.4433, "step": 33670 }, { "epoch": 2.2880146759070525, "grad_norm": 2.071009874343872, "learning_rate": 0.0007141170675363501, "loss": 3.2317, "step": 33675 }, { "epoch": 2.288354395977714, "grad_norm": 2.1066274642944336, "learning_rate": 0.0007140746025275173, "loss": 3.4134, "step": 33680 }, { "epoch": 2.2886941160483762, "grad_norm": 2.0443246364593506, "learning_rate": 0.0007140321375186846, "loss": 3.6723, "step": 33685 }, { "epoch": 2.289033836119038, "grad_norm": 2.0353033542633057, "learning_rate": 0.0007139896725098519, "loss": 3.5604, "step": 33690 }, { "epoch": 2.2893735561896995, "grad_norm": 1.8696649074554443, "learning_rate": 0.0007139472075010191, "loss": 3.5026, "step": 33695 }, { "epoch": 2.2897132762603616, "grad_norm": 1.8705835342407227, "learning_rate": 0.0007139047424921863, "loss": 3.6515, "step": 33700 }, { "epoch": 2.290052996331023, "grad_norm": 1.8505743741989136, "learning_rate": 0.0007138622774833538, "loss": 3.5741, "step": 33705 }, { "epoch": 2.290392716401685, "grad_norm": 2.768477201461792, "learning_rate": 0.000713819812474521, "loss": 3.3799, "step": 33710 }, { "epoch": 2.290732436472347, "grad_norm": 2.2261621952056885, "learning_rate": 0.0007137773474656883, "loss": 3.3751, "step": 33715 }, { "epoch": 2.2910721565430086, "grad_norm": 1.7151200771331787, "learning_rate": 0.0007137348824568556, "loss": 3.4201, "step": 33720 }, { "epoch": 2.29141187661367, "grad_norm": 1.5492867231369019, "learning_rate": 0.0007136924174480228, "loss": 3.4407, "step": 33725 }, { "epoch": 2.2917515966843323, "grad_norm": 1.9124139547348022, "learning_rate": 0.0007136499524391901, "loss": 3.8114, "step": 33730 }, { "epoch": 2.292091316754994, "grad_norm": 1.9434789419174194, "learning_rate": 0.0007136074874303575, "loss": 3.4472, "step": 33735 }, { "epoch": 2.2924310368256555, "grad_norm": 1.960012435913086, "learning_rate": 0.0007135650224215247, "loss": 3.4702, "step": 33740 }, { "epoch": 2.2927707568963176, "grad_norm": 1.8785014152526855, "learning_rate": 0.000713522557412692, "loss": 3.4172, "step": 33745 }, { "epoch": 2.2931104769669792, "grad_norm": 1.9549678564071655, "learning_rate": 0.0007134800924038593, "loss": 3.611, "step": 33750 }, { "epoch": 2.293450197037641, "grad_norm": 1.9727333784103394, "learning_rate": 0.0007134376273950265, "loss": 3.6838, "step": 33755 }, { "epoch": 2.293789917108303, "grad_norm": 1.716071367263794, "learning_rate": 0.0007133951623861938, "loss": 3.3149, "step": 33760 }, { "epoch": 2.2941296371789646, "grad_norm": 1.7786520719528198, "learning_rate": 0.000713352697377361, "loss": 3.2121, "step": 33765 }, { "epoch": 2.294469357249626, "grad_norm": 2.0660407543182373, "learning_rate": 0.0007133102323685284, "loss": 3.6223, "step": 33770 }, { "epoch": 2.2948090773202883, "grad_norm": 2.1330084800720215, "learning_rate": 0.0007132677673596957, "loss": 3.4631, "step": 33775 }, { "epoch": 2.29514879739095, "grad_norm": 1.9406535625457764, "learning_rate": 0.0007132253023508629, "loss": 3.4968, "step": 33780 }, { "epoch": 2.2954885174616115, "grad_norm": 2.4268500804901123, "learning_rate": 0.0007131828373420302, "loss": 3.546, "step": 33785 }, { "epoch": 2.295828237532273, "grad_norm": 2.3756794929504395, "learning_rate": 0.0007131403723331975, "loss": 3.5703, "step": 33790 }, { "epoch": 2.2961679576029352, "grad_norm": 2.3881332874298096, "learning_rate": 0.0007130979073243647, "loss": 3.6207, "step": 33795 }, { "epoch": 2.296507677673597, "grad_norm": 1.5907987356185913, "learning_rate": 0.000713055442315532, "loss": 3.5494, "step": 33800 }, { "epoch": 2.2968473977442585, "grad_norm": 2.8335342407226562, "learning_rate": 0.0007130129773066994, "loss": 3.5363, "step": 33805 }, { "epoch": 2.2971871178149206, "grad_norm": 2.0107569694519043, "learning_rate": 0.0007129705122978666, "loss": 3.3762, "step": 33810 }, { "epoch": 2.297526837885582, "grad_norm": 1.6488744020462036, "learning_rate": 0.0007129280472890338, "loss": 3.5335, "step": 33815 }, { "epoch": 2.297866557956244, "grad_norm": 1.9376587867736816, "learning_rate": 0.0007128855822802012, "loss": 3.465, "step": 33820 }, { "epoch": 2.298206278026906, "grad_norm": 2.055166482925415, "learning_rate": 0.0007128431172713684, "loss": 3.3207, "step": 33825 }, { "epoch": 2.2985459980975675, "grad_norm": 1.9401886463165283, "learning_rate": 0.0007128006522625356, "loss": 3.6182, "step": 33830 }, { "epoch": 2.298885718168229, "grad_norm": 1.8669536113739014, "learning_rate": 0.000712758187253703, "loss": 3.3536, "step": 33835 }, { "epoch": 2.2992254382388913, "grad_norm": 2.253632068634033, "learning_rate": 0.0007127157222448703, "loss": 3.4499, "step": 33840 }, { "epoch": 2.299565158309553, "grad_norm": 1.9575047492980957, "learning_rate": 0.0007126732572360375, "loss": 3.7228, "step": 33845 }, { "epoch": 2.2999048783802145, "grad_norm": 1.7627900838851929, "learning_rate": 0.0007126307922272049, "loss": 3.5722, "step": 33850 }, { "epoch": 2.3002445984508766, "grad_norm": 1.7421963214874268, "learning_rate": 0.0007125883272183721, "loss": 3.5506, "step": 33855 }, { "epoch": 2.300584318521538, "grad_norm": 1.5818297863006592, "learning_rate": 0.0007125458622095393, "loss": 3.1344, "step": 33860 }, { "epoch": 2.3009240385922, "grad_norm": 1.8507596254348755, "learning_rate": 0.0007125033972007066, "loss": 3.6578, "step": 33865 }, { "epoch": 2.301263758662862, "grad_norm": 1.9905837774276733, "learning_rate": 0.0007124609321918739, "loss": 3.27, "step": 33870 }, { "epoch": 2.3016034787335236, "grad_norm": 1.532478928565979, "learning_rate": 0.0007124184671830412, "loss": 3.3864, "step": 33875 }, { "epoch": 2.301943198804185, "grad_norm": 1.939253807067871, "learning_rate": 0.0007123760021742085, "loss": 3.5346, "step": 33880 }, { "epoch": 2.3022829188748473, "grad_norm": 1.8791645765304565, "learning_rate": 0.0007123335371653758, "loss": 3.3906, "step": 33885 }, { "epoch": 2.302622638945509, "grad_norm": 1.638719081878662, "learning_rate": 0.000712291072156543, "loss": 3.5497, "step": 33890 }, { "epoch": 2.3029623590161705, "grad_norm": 1.804824948310852, "learning_rate": 0.0007122486071477103, "loss": 3.5029, "step": 33895 }, { "epoch": 2.3033020790868326, "grad_norm": 2.0842745304107666, "learning_rate": 0.0007122061421388775, "loss": 3.4351, "step": 33900 }, { "epoch": 2.3036417991574942, "grad_norm": 1.8567535877227783, "learning_rate": 0.0007121636771300448, "loss": 3.641, "step": 33905 }, { "epoch": 2.303981519228156, "grad_norm": 1.872025966644287, "learning_rate": 0.0007121212121212122, "loss": 3.4498, "step": 33910 }, { "epoch": 2.304321239298818, "grad_norm": 1.9859822988510132, "learning_rate": 0.0007120787471123794, "loss": 3.719, "step": 33915 }, { "epoch": 2.3046609593694796, "grad_norm": 1.9624242782592773, "learning_rate": 0.0007120362821035467, "loss": 3.6654, "step": 33920 }, { "epoch": 2.305000679440141, "grad_norm": 2.015174627304077, "learning_rate": 0.000711993817094714, "loss": 3.6824, "step": 33925 }, { "epoch": 2.3053403995108033, "grad_norm": 1.7880918979644775, "learning_rate": 0.0007119513520858812, "loss": 3.3746, "step": 33930 }, { "epoch": 2.305680119581465, "grad_norm": 2.0493500232696533, "learning_rate": 0.0007119088870770485, "loss": 3.5236, "step": 33935 }, { "epoch": 2.3060198396521265, "grad_norm": 1.6862071752548218, "learning_rate": 0.0007118664220682158, "loss": 3.4101, "step": 33940 }, { "epoch": 2.3063595597227886, "grad_norm": 2.1005141735076904, "learning_rate": 0.0007118239570593831, "loss": 3.5853, "step": 33945 }, { "epoch": 2.3066992797934502, "grad_norm": 1.9302781820297241, "learning_rate": 0.0007117814920505504, "loss": 3.5637, "step": 33950 }, { "epoch": 2.307038999864112, "grad_norm": 1.6741688251495361, "learning_rate": 0.0007117390270417177, "loss": 3.3202, "step": 33955 }, { "epoch": 2.307378719934774, "grad_norm": 1.9339046478271484, "learning_rate": 0.0007116965620328849, "loss": 3.5155, "step": 33960 }, { "epoch": 2.3077184400054356, "grad_norm": 2.4213578701019287, "learning_rate": 0.0007116540970240521, "loss": 3.4693, "step": 33965 }, { "epoch": 2.308058160076097, "grad_norm": 2.2124521732330322, "learning_rate": 0.0007116116320152195, "loss": 3.2578, "step": 33970 }, { "epoch": 2.3083978801467593, "grad_norm": 1.7827404737472534, "learning_rate": 0.0007115691670063867, "loss": 3.6076, "step": 33975 }, { "epoch": 2.308737600217421, "grad_norm": 2.3875722885131836, "learning_rate": 0.000711526701997554, "loss": 3.3645, "step": 33980 }, { "epoch": 2.3090773202880825, "grad_norm": 3.508021593093872, "learning_rate": 0.0007114842369887214, "loss": 3.678, "step": 33985 }, { "epoch": 2.3094170403587446, "grad_norm": 2.3246517181396484, "learning_rate": 0.0007114417719798886, "loss": 3.4589, "step": 33990 }, { "epoch": 2.3097567604294063, "grad_norm": 2.292243719100952, "learning_rate": 0.0007113993069710558, "loss": 3.7509, "step": 33995 }, { "epoch": 2.310096480500068, "grad_norm": 2.9320993423461914, "learning_rate": 0.0007113568419622232, "loss": 3.4831, "step": 34000 }, { "epoch": 2.31043620057073, "grad_norm": 1.9775863885879517, "learning_rate": 0.0007113143769533904, "loss": 3.5831, "step": 34005 }, { "epoch": 2.3107759206413916, "grad_norm": 1.891771912574768, "learning_rate": 0.0007112719119445576, "loss": 3.6043, "step": 34010 }, { "epoch": 2.311115640712053, "grad_norm": 2.1406350135803223, "learning_rate": 0.000711229446935725, "loss": 3.5409, "step": 34015 }, { "epoch": 2.311455360782715, "grad_norm": 1.6915061473846436, "learning_rate": 0.0007111869819268923, "loss": 3.588, "step": 34020 }, { "epoch": 2.311795080853377, "grad_norm": 1.7291736602783203, "learning_rate": 0.0007111445169180595, "loss": 3.534, "step": 34025 }, { "epoch": 2.3121348009240386, "grad_norm": 1.942555546760559, "learning_rate": 0.0007111020519092268, "loss": 3.2395, "step": 34030 }, { "epoch": 2.3124745209947, "grad_norm": 2.2373106479644775, "learning_rate": 0.0007110595869003941, "loss": 3.5339, "step": 34035 }, { "epoch": 2.3128142410653623, "grad_norm": 1.897459864616394, "learning_rate": 0.0007110171218915613, "loss": 3.7161, "step": 34040 }, { "epoch": 2.313153961136024, "grad_norm": 1.7452938556671143, "learning_rate": 0.0007109746568827286, "loss": 3.672, "step": 34045 }, { "epoch": 2.3134936812066855, "grad_norm": 2.128248691558838, "learning_rate": 0.000710932191873896, "loss": 3.3662, "step": 34050 }, { "epoch": 2.3138334012773476, "grad_norm": 2.624242067337036, "learning_rate": 0.0007108897268650633, "loss": 3.332, "step": 34055 }, { "epoch": 2.3141731213480092, "grad_norm": 1.8061189651489258, "learning_rate": 0.0007108472618562305, "loss": 3.318, "step": 34060 }, { "epoch": 2.314512841418671, "grad_norm": 1.7279044389724731, "learning_rate": 0.0007108047968473977, "loss": 3.5524, "step": 34065 }, { "epoch": 2.314852561489333, "grad_norm": 2.3256540298461914, "learning_rate": 0.0007107623318385651, "loss": 3.4726, "step": 34070 }, { "epoch": 2.3151922815599946, "grad_norm": 1.8824150562286377, "learning_rate": 0.0007107198668297323, "loss": 3.3106, "step": 34075 }, { "epoch": 2.315532001630656, "grad_norm": 2.2570230960845947, "learning_rate": 0.0007106774018208995, "loss": 3.701, "step": 34080 }, { "epoch": 2.3158717217013183, "grad_norm": 1.637373447418213, "learning_rate": 0.000710634936812067, "loss": 3.7805, "step": 34085 }, { "epoch": 2.31621144177198, "grad_norm": 1.7727952003479004, "learning_rate": 0.0007105924718032342, "loss": 3.6831, "step": 34090 }, { "epoch": 2.3165511618426415, "grad_norm": 1.7624982595443726, "learning_rate": 0.0007105500067944014, "loss": 3.7081, "step": 34095 }, { "epoch": 2.3168908819133036, "grad_norm": 1.5300043821334839, "learning_rate": 0.0007105075417855688, "loss": 3.6057, "step": 34100 }, { "epoch": 2.3172306019839652, "grad_norm": 1.9608665704727173, "learning_rate": 0.000710465076776736, "loss": 3.2821, "step": 34105 }, { "epoch": 2.317570322054627, "grad_norm": 1.6399017572402954, "learning_rate": 0.0007104226117679032, "loss": 3.6235, "step": 34110 }, { "epoch": 2.317910042125289, "grad_norm": 2.296961784362793, "learning_rate": 0.0007103801467590705, "loss": 3.7077, "step": 34115 }, { "epoch": 2.3182497621959506, "grad_norm": 2.634289264678955, "learning_rate": 0.0007103376817502379, "loss": 3.7341, "step": 34120 }, { "epoch": 2.318589482266612, "grad_norm": 1.8992996215820312, "learning_rate": 0.0007102952167414051, "loss": 3.4779, "step": 34125 }, { "epoch": 2.318929202337274, "grad_norm": 2.1884682178497314, "learning_rate": 0.0007102527517325724, "loss": 3.5209, "step": 34130 }, { "epoch": 2.319268922407936, "grad_norm": 1.744889736175537, "learning_rate": 0.0007102102867237397, "loss": 3.6654, "step": 34135 }, { "epoch": 2.3196086424785975, "grad_norm": 1.6168051958084106, "learning_rate": 0.0007101678217149069, "loss": 3.5746, "step": 34140 }, { "epoch": 2.319948362549259, "grad_norm": 2.2371060848236084, "learning_rate": 0.0007101253567060742, "loss": 3.4014, "step": 34145 }, { "epoch": 2.3202880826199213, "grad_norm": 2.10324764251709, "learning_rate": 0.0007100828916972414, "loss": 3.5416, "step": 34150 }, { "epoch": 2.320627802690583, "grad_norm": 2.0911216735839844, "learning_rate": 0.0007100404266884088, "loss": 3.4274, "step": 34155 }, { "epoch": 2.3209675227612445, "grad_norm": 2.705932378768921, "learning_rate": 0.0007099979616795761, "loss": 3.3898, "step": 34160 }, { "epoch": 2.3213072428319066, "grad_norm": 1.9002385139465332, "learning_rate": 0.0007099554966707433, "loss": 3.3001, "step": 34165 }, { "epoch": 2.3216469629025682, "grad_norm": 1.571339726448059, "learning_rate": 0.0007099130316619106, "loss": 3.3891, "step": 34170 }, { "epoch": 2.32198668297323, "grad_norm": 1.9078528881072998, "learning_rate": 0.0007098705666530779, "loss": 3.3267, "step": 34175 }, { "epoch": 2.322326403043892, "grad_norm": 2.1099889278411865, "learning_rate": 0.0007098281016442451, "loss": 3.3185, "step": 34180 }, { "epoch": 2.3226661231145536, "grad_norm": 2.419194221496582, "learning_rate": 0.0007097856366354124, "loss": 3.4542, "step": 34185 }, { "epoch": 2.323005843185215, "grad_norm": 2.5089094638824463, "learning_rate": 0.0007097431716265798, "loss": 3.3334, "step": 34190 }, { "epoch": 2.3233455632558773, "grad_norm": 2.1689441204071045, "learning_rate": 0.000709700706617747, "loss": 3.7095, "step": 34195 }, { "epoch": 2.323685283326539, "grad_norm": 2.0560226440429688, "learning_rate": 0.0007096582416089142, "loss": 3.4935, "step": 34200 }, { "epoch": 2.3240250033972005, "grad_norm": 2.307888984680176, "learning_rate": 0.0007096157766000816, "loss": 3.4234, "step": 34205 }, { "epoch": 2.3243647234678626, "grad_norm": 1.4710586071014404, "learning_rate": 0.0007095733115912488, "loss": 3.837, "step": 34210 }, { "epoch": 2.3247044435385242, "grad_norm": 1.4282187223434448, "learning_rate": 0.000709530846582416, "loss": 3.4525, "step": 34215 }, { "epoch": 2.325044163609186, "grad_norm": 1.9489408731460571, "learning_rate": 0.0007094883815735834, "loss": 3.7377, "step": 34220 }, { "epoch": 2.325383883679848, "grad_norm": 2.205334424972534, "learning_rate": 0.0007094459165647507, "loss": 3.4317, "step": 34225 }, { "epoch": 2.3257236037505096, "grad_norm": 2.109504461288452, "learning_rate": 0.0007094034515559179, "loss": 3.634, "step": 34230 }, { "epoch": 2.326063323821171, "grad_norm": 1.8216087818145752, "learning_rate": 0.0007093609865470853, "loss": 3.4065, "step": 34235 }, { "epoch": 2.3264030438918333, "grad_norm": 1.7251875400543213, "learning_rate": 0.0007093185215382525, "loss": 3.5505, "step": 34240 }, { "epoch": 2.326742763962495, "grad_norm": 1.8596128225326538, "learning_rate": 0.0007092760565294197, "loss": 3.5239, "step": 34245 }, { "epoch": 2.3270824840331565, "grad_norm": 2.3320693969726562, "learning_rate": 0.000709233591520587, "loss": 3.5223, "step": 34250 }, { "epoch": 2.3274222041038186, "grad_norm": 1.8786749839782715, "learning_rate": 0.0007091911265117543, "loss": 3.5883, "step": 34255 }, { "epoch": 2.3277619241744802, "grad_norm": 2.2125813961029053, "learning_rate": 0.0007091486615029216, "loss": 3.5516, "step": 34260 }, { "epoch": 2.328101644245142, "grad_norm": 1.9704515933990479, "learning_rate": 0.0007091061964940889, "loss": 3.3703, "step": 34265 }, { "epoch": 2.328441364315804, "grad_norm": 1.9924957752227783, "learning_rate": 0.0007090637314852562, "loss": 3.6092, "step": 34270 }, { "epoch": 2.3287810843864656, "grad_norm": 2.249386787414551, "learning_rate": 0.0007090212664764234, "loss": 3.296, "step": 34275 }, { "epoch": 2.329120804457127, "grad_norm": 1.6402753591537476, "learning_rate": 0.0007089788014675907, "loss": 3.8472, "step": 34280 }, { "epoch": 2.3294605245277893, "grad_norm": 1.677741289138794, "learning_rate": 0.000708936336458758, "loss": 3.5381, "step": 34285 }, { "epoch": 2.329800244598451, "grad_norm": 2.0375022888183594, "learning_rate": 0.0007088938714499252, "loss": 3.3278, "step": 34290 }, { "epoch": 2.3301399646691126, "grad_norm": 1.6116797924041748, "learning_rate": 0.0007088514064410926, "loss": 3.5864, "step": 34295 }, { "epoch": 2.3304796847397746, "grad_norm": 1.8889539241790771, "learning_rate": 0.0007088089414322598, "loss": 3.4213, "step": 34300 }, { "epoch": 2.3308194048104363, "grad_norm": 1.9801197052001953, "learning_rate": 0.0007087664764234271, "loss": 3.6753, "step": 34305 }, { "epoch": 2.331159124881098, "grad_norm": 1.960722804069519, "learning_rate": 0.0007087240114145944, "loss": 3.5813, "step": 34310 }, { "epoch": 2.33149884495176, "grad_norm": 2.26657772064209, "learning_rate": 0.0007086815464057616, "loss": 3.5813, "step": 34315 }, { "epoch": 2.3318385650224216, "grad_norm": 2.076463222503662, "learning_rate": 0.0007086390813969289, "loss": 3.2389, "step": 34320 }, { "epoch": 2.3321782850930832, "grad_norm": 2.086482286453247, "learning_rate": 0.0007085966163880963, "loss": 3.383, "step": 34325 }, { "epoch": 2.3325180051637453, "grad_norm": 2.029550790786743, "learning_rate": 0.0007085541513792635, "loss": 3.5496, "step": 34330 }, { "epoch": 2.332857725234407, "grad_norm": 1.948332667350769, "learning_rate": 0.0007085116863704308, "loss": 3.3498, "step": 34335 }, { "epoch": 2.3331974453050686, "grad_norm": 2.2594540119171143, "learning_rate": 0.0007084692213615981, "loss": 3.5035, "step": 34340 }, { "epoch": 2.3335371653757306, "grad_norm": 2.103825330734253, "learning_rate": 0.0007084267563527653, "loss": 3.4706, "step": 34345 }, { "epoch": 2.3338768854463923, "grad_norm": 1.9789851903915405, "learning_rate": 0.0007083842913439325, "loss": 3.615, "step": 34350 }, { "epoch": 2.334216605517054, "grad_norm": 3.527893304824829, "learning_rate": 0.0007083418263350999, "loss": 3.6122, "step": 34355 }, { "epoch": 2.3345563255877155, "grad_norm": 1.6918271780014038, "learning_rate": 0.0007082993613262672, "loss": 3.654, "step": 34360 }, { "epoch": 2.3348960456583776, "grad_norm": 1.7627527713775635, "learning_rate": 0.0007082568963174344, "loss": 3.4844, "step": 34365 }, { "epoch": 2.3352357657290392, "grad_norm": 2.062180757522583, "learning_rate": 0.0007082144313086018, "loss": 3.6496, "step": 34370 }, { "epoch": 2.335575485799701, "grad_norm": 1.9760396480560303, "learning_rate": 0.000708171966299769, "loss": 3.5681, "step": 34375 }, { "epoch": 2.335915205870363, "grad_norm": 1.9397847652435303, "learning_rate": 0.0007081295012909362, "loss": 3.4398, "step": 34380 }, { "epoch": 2.3362549259410246, "grad_norm": 2.2817468643188477, "learning_rate": 0.0007080870362821036, "loss": 3.6264, "step": 34385 }, { "epoch": 2.336594646011686, "grad_norm": 2.6113367080688477, "learning_rate": 0.0007080445712732708, "loss": 3.5625, "step": 34390 }, { "epoch": 2.3369343660823483, "grad_norm": 2.3235974311828613, "learning_rate": 0.0007080021062644382, "loss": 3.7059, "step": 34395 }, { "epoch": 2.33727408615301, "grad_norm": 2.1492538452148438, "learning_rate": 0.0007079596412556054, "loss": 3.5718, "step": 34400 }, { "epoch": 2.3376138062236715, "grad_norm": 1.7483669519424438, "learning_rate": 0.0007079171762467727, "loss": 3.4447, "step": 34405 }, { "epoch": 2.3379535262943336, "grad_norm": 2.379417657852173, "learning_rate": 0.00070787471123794, "loss": 3.6047, "step": 34410 }, { "epoch": 2.3382932463649952, "grad_norm": 2.558142900466919, "learning_rate": 0.0007078322462291072, "loss": 3.5501, "step": 34415 }, { "epoch": 2.338632966435657, "grad_norm": 2.303607940673828, "learning_rate": 0.0007077897812202745, "loss": 3.2801, "step": 34420 }, { "epoch": 2.338972686506319, "grad_norm": 1.8708209991455078, "learning_rate": 0.0007077473162114418, "loss": 3.2079, "step": 34425 }, { "epoch": 2.3393124065769806, "grad_norm": 1.940887212753296, "learning_rate": 0.0007077048512026091, "loss": 3.5708, "step": 34430 }, { "epoch": 2.339652126647642, "grad_norm": 1.7013128995895386, "learning_rate": 0.0007076623861937764, "loss": 3.6111, "step": 34435 }, { "epoch": 2.3399918467183043, "grad_norm": 2.403005599975586, "learning_rate": 0.0007076199211849437, "loss": 3.4368, "step": 34440 }, { "epoch": 2.340331566788966, "grad_norm": 1.798152208328247, "learning_rate": 0.0007075774561761109, "loss": 3.4902, "step": 34445 }, { "epoch": 2.3406712868596276, "grad_norm": 1.818124771118164, "learning_rate": 0.0007075349911672781, "loss": 3.5354, "step": 34450 }, { "epoch": 2.3410110069302896, "grad_norm": 1.8599094152450562, "learning_rate": 0.0007074925261584455, "loss": 3.5676, "step": 34455 }, { "epoch": 2.3413507270009513, "grad_norm": 2.7818636894226074, "learning_rate": 0.0007074500611496127, "loss": 3.6429, "step": 34460 }, { "epoch": 2.341690447071613, "grad_norm": 1.9071162939071655, "learning_rate": 0.00070740759614078, "loss": 3.573, "step": 34465 }, { "epoch": 2.3420301671422745, "grad_norm": 1.483451008796692, "learning_rate": 0.0007073651311319474, "loss": 3.3688, "step": 34470 }, { "epoch": 2.3423698872129366, "grad_norm": 2.107128381729126, "learning_rate": 0.0007073226661231146, "loss": 3.2425, "step": 34475 }, { "epoch": 2.3427096072835982, "grad_norm": 2.175143241882324, "learning_rate": 0.0007072802011142818, "loss": 3.5653, "step": 34480 }, { "epoch": 2.34304932735426, "grad_norm": 2.1571295261383057, "learning_rate": 0.0007072377361054492, "loss": 3.5228, "step": 34485 }, { "epoch": 2.343389047424922, "grad_norm": 2.1177380084991455, "learning_rate": 0.0007071952710966164, "loss": 3.6462, "step": 34490 }, { "epoch": 2.3437287674955836, "grad_norm": 1.9102437496185303, "learning_rate": 0.0007071528060877836, "loss": 3.6915, "step": 34495 }, { "epoch": 2.344068487566245, "grad_norm": 1.9248100519180298, "learning_rate": 0.000707110341078951, "loss": 3.2712, "step": 34500 }, { "epoch": 2.3444082076369073, "grad_norm": 2.389169931411743, "learning_rate": 0.0007070678760701183, "loss": 3.5567, "step": 34505 }, { "epoch": 2.344747927707569, "grad_norm": 2.593984603881836, "learning_rate": 0.0007070254110612855, "loss": 3.483, "step": 34510 }, { "epoch": 2.3450876477782305, "grad_norm": 2.4395134449005127, "learning_rate": 0.0007069829460524528, "loss": 3.5053, "step": 34515 }, { "epoch": 2.3454273678488926, "grad_norm": 5.846465110778809, "learning_rate": 0.0007069404810436201, "loss": 3.2678, "step": 34520 }, { "epoch": 2.3457670879195542, "grad_norm": 1.844186544418335, "learning_rate": 0.0007068980160347873, "loss": 3.5895, "step": 34525 }, { "epoch": 2.346106807990216, "grad_norm": 1.8714550733566284, "learning_rate": 0.0007068555510259546, "loss": 3.6082, "step": 34530 }, { "epoch": 2.346446528060878, "grad_norm": 2.189897060394287, "learning_rate": 0.000706813086017122, "loss": 3.5906, "step": 34535 }, { "epoch": 2.3467862481315396, "grad_norm": 1.9855247735977173, "learning_rate": 0.0007067706210082892, "loss": 3.3434, "step": 34540 }, { "epoch": 2.347125968202201, "grad_norm": 2.1745190620422363, "learning_rate": 0.0007067281559994565, "loss": 3.5762, "step": 34545 }, { "epoch": 2.3474656882728633, "grad_norm": 2.412179470062256, "learning_rate": 0.0007066856909906237, "loss": 3.69, "step": 34550 }, { "epoch": 2.347805408343525, "grad_norm": 1.7285144329071045, "learning_rate": 0.000706643225981791, "loss": 3.535, "step": 34555 }, { "epoch": 2.3481451284141865, "grad_norm": 1.8718465566635132, "learning_rate": 0.0007066007609729583, "loss": 3.638, "step": 34560 }, { "epoch": 2.3484848484848486, "grad_norm": 3.0688915252685547, "learning_rate": 0.0007065582959641255, "loss": 3.7461, "step": 34565 }, { "epoch": 2.3488245685555103, "grad_norm": 1.6277763843536377, "learning_rate": 0.0007065158309552929, "loss": 3.4563, "step": 34570 }, { "epoch": 2.349164288626172, "grad_norm": 2.163361072540283, "learning_rate": 0.0007064733659464602, "loss": 3.4543, "step": 34575 }, { "epoch": 2.349504008696834, "grad_norm": 2.1946089267730713, "learning_rate": 0.0007064309009376274, "loss": 3.6303, "step": 34580 }, { "epoch": 2.3498437287674956, "grad_norm": 2.2327775955200195, "learning_rate": 0.0007063884359287946, "loss": 3.4211, "step": 34585 }, { "epoch": 2.350183448838157, "grad_norm": 1.5811234712600708, "learning_rate": 0.000706345970919962, "loss": 3.6696, "step": 34590 }, { "epoch": 2.3505231689088193, "grad_norm": 2.028773784637451, "learning_rate": 0.0007063035059111292, "loss": 3.3393, "step": 34595 }, { "epoch": 2.350862888979481, "grad_norm": 1.6311196088790894, "learning_rate": 0.0007062610409022964, "loss": 3.5523, "step": 34600 }, { "epoch": 2.3512026090501426, "grad_norm": 2.1475343704223633, "learning_rate": 0.0007062185758934639, "loss": 3.5756, "step": 34605 }, { "epoch": 2.3515423291208046, "grad_norm": 1.7830862998962402, "learning_rate": 0.0007061761108846311, "loss": 3.5862, "step": 34610 }, { "epoch": 2.3518820491914663, "grad_norm": 1.7080578804016113, "learning_rate": 0.0007061336458757983, "loss": 3.3495, "step": 34615 }, { "epoch": 2.352221769262128, "grad_norm": 1.5517289638519287, "learning_rate": 0.0007060911808669657, "loss": 3.4178, "step": 34620 }, { "epoch": 2.35256148933279, "grad_norm": 1.5579440593719482, "learning_rate": 0.0007060487158581329, "loss": 3.529, "step": 34625 }, { "epoch": 2.3529012094034516, "grad_norm": 2.1456780433654785, "learning_rate": 0.0007060062508493001, "loss": 3.5974, "step": 34630 }, { "epoch": 2.3532409294741132, "grad_norm": 2.6411187648773193, "learning_rate": 0.0007059637858404674, "loss": 3.7465, "step": 34635 }, { "epoch": 2.3535806495447753, "grad_norm": 1.9173314571380615, "learning_rate": 0.0007059213208316348, "loss": 3.4557, "step": 34640 }, { "epoch": 2.353920369615437, "grad_norm": 1.6841566562652588, "learning_rate": 0.000705878855822802, "loss": 3.5123, "step": 34645 }, { "epoch": 2.3542600896860986, "grad_norm": 2.010850429534912, "learning_rate": 0.0007058363908139693, "loss": 3.5185, "step": 34650 }, { "epoch": 2.3545998097567606, "grad_norm": 2.3556506633758545, "learning_rate": 0.0007057939258051366, "loss": 3.7484, "step": 34655 }, { "epoch": 2.3549395298274223, "grad_norm": 1.65440833568573, "learning_rate": 0.0007057514607963038, "loss": 3.5331, "step": 34660 }, { "epoch": 2.355279249898084, "grad_norm": 2.216373920440674, "learning_rate": 0.0007057089957874711, "loss": 3.6394, "step": 34665 }, { "epoch": 2.355618969968746, "grad_norm": 1.6998921632766724, "learning_rate": 0.0007056665307786384, "loss": 3.4978, "step": 34670 }, { "epoch": 2.3559586900394076, "grad_norm": 1.5861883163452148, "learning_rate": 0.0007056240657698057, "loss": 3.5859, "step": 34675 }, { "epoch": 2.3562984101100692, "grad_norm": 2.3174374103546143, "learning_rate": 0.000705581600760973, "loss": 3.5025, "step": 34680 }, { "epoch": 2.3566381301807313, "grad_norm": 1.7152624130249023, "learning_rate": 0.0007055391357521403, "loss": 3.4883, "step": 34685 }, { "epoch": 2.356977850251393, "grad_norm": 1.598246455192566, "learning_rate": 0.0007054966707433075, "loss": 3.5362, "step": 34690 }, { "epoch": 2.3573175703220546, "grad_norm": 1.8444340229034424, "learning_rate": 0.0007054542057344748, "loss": 3.4332, "step": 34695 }, { "epoch": 2.357657290392716, "grad_norm": 1.9186898469924927, "learning_rate": 0.000705411740725642, "loss": 3.805, "step": 34700 }, { "epoch": 2.3579970104633783, "grad_norm": 2.0605452060699463, "learning_rate": 0.0007053692757168093, "loss": 3.4123, "step": 34705 }, { "epoch": 2.35833673053404, "grad_norm": 1.8279789686203003, "learning_rate": 0.0007053268107079767, "loss": 3.3633, "step": 34710 }, { "epoch": 2.3586764506047015, "grad_norm": 2.034036636352539, "learning_rate": 0.0007052843456991439, "loss": 3.2722, "step": 34715 }, { "epoch": 2.3590161706753636, "grad_norm": 1.608142375946045, "learning_rate": 0.0007052418806903112, "loss": 3.6284, "step": 34720 }, { "epoch": 2.3593558907460253, "grad_norm": 2.0061228275299072, "learning_rate": 0.0007051994156814785, "loss": 3.6001, "step": 34725 }, { "epoch": 2.359695610816687, "grad_norm": 2.2102181911468506, "learning_rate": 0.0007051569506726457, "loss": 3.4866, "step": 34730 }, { "epoch": 2.360035330887349, "grad_norm": 1.8662172555923462, "learning_rate": 0.000705114485663813, "loss": 3.4177, "step": 34735 }, { "epoch": 2.3603750509580106, "grad_norm": 2.6544575691223145, "learning_rate": 0.0007050720206549803, "loss": 3.1679, "step": 34740 }, { "epoch": 2.360714771028672, "grad_norm": 2.619262456893921, "learning_rate": 0.0007050295556461476, "loss": 3.4128, "step": 34745 }, { "epoch": 2.3610544910993343, "grad_norm": 1.8021514415740967, "learning_rate": 0.000704987090637315, "loss": 3.5739, "step": 34750 }, { "epoch": 2.361394211169996, "grad_norm": 1.7936509847640991, "learning_rate": 0.0007049446256284822, "loss": 3.5012, "step": 34755 }, { "epoch": 2.3617339312406576, "grad_norm": 1.7317925691604614, "learning_rate": 0.0007049021606196494, "loss": 3.7045, "step": 34760 }, { "epoch": 2.3620736513113196, "grad_norm": 1.9223159551620483, "learning_rate": 0.0007048596956108167, "loss": 3.5367, "step": 34765 }, { "epoch": 2.3624133713819813, "grad_norm": 2.1390888690948486, "learning_rate": 0.000704817230601984, "loss": 3.2858, "step": 34770 }, { "epoch": 2.362753091452643, "grad_norm": 2.259385585784912, "learning_rate": 0.0007047747655931512, "loss": 3.5629, "step": 34775 }, { "epoch": 2.363092811523305, "grad_norm": 1.9637082815170288, "learning_rate": 0.0007047323005843186, "loss": 3.5182, "step": 34780 }, { "epoch": 2.3634325315939666, "grad_norm": 1.9846597909927368, "learning_rate": 0.0007046898355754859, "loss": 3.5526, "step": 34785 }, { "epoch": 2.3637722516646282, "grad_norm": 2.696272134780884, "learning_rate": 0.0007046473705666531, "loss": 3.7524, "step": 34790 }, { "epoch": 2.3641119717352903, "grad_norm": 1.4540472030639648, "learning_rate": 0.0007046049055578204, "loss": 3.4512, "step": 34795 }, { "epoch": 2.364451691805952, "grad_norm": 2.06900691986084, "learning_rate": 0.0007045624405489876, "loss": 3.6194, "step": 34800 }, { "epoch": 2.3647914118766136, "grad_norm": 1.6518833637237549, "learning_rate": 0.0007045199755401549, "loss": 3.5794, "step": 34805 }, { "epoch": 2.365131131947275, "grad_norm": 2.0230660438537598, "learning_rate": 0.0007044775105313222, "loss": 3.5486, "step": 34810 }, { "epoch": 2.3654708520179373, "grad_norm": 1.862168550491333, "learning_rate": 0.0007044350455224895, "loss": 3.2742, "step": 34815 }, { "epoch": 2.365810572088599, "grad_norm": 1.7898906469345093, "learning_rate": 0.0007043925805136568, "loss": 3.2704, "step": 34820 }, { "epoch": 2.3661502921592605, "grad_norm": 1.5742279291152954, "learning_rate": 0.0007043501155048241, "loss": 3.1288, "step": 34825 }, { "epoch": 2.3664900122299226, "grad_norm": 2.0262672901153564, "learning_rate": 0.0007043076504959913, "loss": 3.2765, "step": 34830 }, { "epoch": 2.3668297323005842, "grad_norm": 1.7487481832504272, "learning_rate": 0.0007042651854871585, "loss": 3.7742, "step": 34835 }, { "epoch": 2.367169452371246, "grad_norm": 1.7609755992889404, "learning_rate": 0.0007042227204783259, "loss": 3.7013, "step": 34840 }, { "epoch": 2.367509172441908, "grad_norm": 2.1159043312072754, "learning_rate": 0.0007041802554694931, "loss": 3.5076, "step": 34845 }, { "epoch": 2.3678488925125696, "grad_norm": 1.8568037748336792, "learning_rate": 0.0007041377904606604, "loss": 3.5948, "step": 34850 }, { "epoch": 2.368188612583231, "grad_norm": 1.8071175813674927, "learning_rate": 0.0007040953254518278, "loss": 3.4605, "step": 34855 }, { "epoch": 2.3685283326538933, "grad_norm": 1.6104813814163208, "learning_rate": 0.000704052860442995, "loss": 3.3784, "step": 34860 }, { "epoch": 2.368868052724555, "grad_norm": 1.9830524921417236, "learning_rate": 0.0007040103954341622, "loss": 3.4259, "step": 34865 }, { "epoch": 2.3692077727952165, "grad_norm": 2.3072988986968994, "learning_rate": 0.0007039679304253296, "loss": 3.6502, "step": 34870 }, { "epoch": 2.3695474928658786, "grad_norm": 1.8406791687011719, "learning_rate": 0.0007039254654164968, "loss": 3.5805, "step": 34875 }, { "epoch": 2.3698872129365403, "grad_norm": 1.8793367147445679, "learning_rate": 0.000703883000407664, "loss": 3.5678, "step": 34880 }, { "epoch": 2.370226933007202, "grad_norm": 2.1085333824157715, "learning_rate": 0.0007038405353988315, "loss": 3.6429, "step": 34885 }, { "epoch": 2.370566653077864, "grad_norm": 1.5121113061904907, "learning_rate": 0.0007037980703899987, "loss": 3.5823, "step": 34890 }, { "epoch": 2.3709063731485256, "grad_norm": 2.1265573501586914, "learning_rate": 0.0007037556053811659, "loss": 3.5735, "step": 34895 }, { "epoch": 2.3712460932191872, "grad_norm": 1.5965701341629028, "learning_rate": 0.0007037131403723332, "loss": 3.6612, "step": 34900 }, { "epoch": 2.3715858132898493, "grad_norm": 1.911404013633728, "learning_rate": 0.0007036706753635005, "loss": 3.4659, "step": 34905 }, { "epoch": 2.371925533360511, "grad_norm": 3.5649704933166504, "learning_rate": 0.0007036282103546677, "loss": 3.3094, "step": 34910 }, { "epoch": 2.3722652534311726, "grad_norm": 1.6194037199020386, "learning_rate": 0.0007035857453458351, "loss": 3.7589, "step": 34915 }, { "epoch": 2.3726049735018346, "grad_norm": 1.8955755233764648, "learning_rate": 0.0007035432803370024, "loss": 3.5317, "step": 34920 }, { "epoch": 2.3729446935724963, "grad_norm": 2.4950764179229736, "learning_rate": 0.0007035008153281696, "loss": 3.5258, "step": 34925 }, { "epoch": 2.373284413643158, "grad_norm": 2.111565589904785, "learning_rate": 0.0007034583503193369, "loss": 3.6073, "step": 34930 }, { "epoch": 2.37362413371382, "grad_norm": 2.7103285789489746, "learning_rate": 0.0007034158853105041, "loss": 3.7065, "step": 34935 }, { "epoch": 2.3739638537844816, "grad_norm": 1.9483813047409058, "learning_rate": 0.0007033734203016714, "loss": 3.5672, "step": 34940 }, { "epoch": 2.3743035738551432, "grad_norm": 2.0093181133270264, "learning_rate": 0.0007033309552928387, "loss": 3.3257, "step": 34945 }, { "epoch": 2.3746432939258053, "grad_norm": 2.551332712173462, "learning_rate": 0.000703288490284006, "loss": 3.6739, "step": 34950 }, { "epoch": 2.374983013996467, "grad_norm": 2.531780481338501, "learning_rate": 0.0007032460252751733, "loss": 3.5326, "step": 34955 }, { "epoch": 2.3753227340671286, "grad_norm": 1.8458983898162842, "learning_rate": 0.0007032035602663406, "loss": 3.5802, "step": 34960 }, { "epoch": 2.3756624541377906, "grad_norm": 1.957966923713684, "learning_rate": 0.0007031610952575078, "loss": 3.5808, "step": 34965 }, { "epoch": 2.3760021742084523, "grad_norm": 1.7159204483032227, "learning_rate": 0.000703118630248675, "loss": 3.5783, "step": 34970 }, { "epoch": 2.376341894279114, "grad_norm": 1.9480488300323486, "learning_rate": 0.0007030761652398424, "loss": 3.5961, "step": 34975 }, { "epoch": 2.376681614349776, "grad_norm": 1.9923564195632935, "learning_rate": 0.0007030337002310096, "loss": 3.385, "step": 34980 }, { "epoch": 2.3770213344204376, "grad_norm": 2.378236770629883, "learning_rate": 0.000702991235222177, "loss": 3.4872, "step": 34985 }, { "epoch": 2.3773610544910992, "grad_norm": 1.725236177444458, "learning_rate": 0.0007029487702133443, "loss": 3.7199, "step": 34990 }, { "epoch": 2.3777007745617613, "grad_norm": 2.186816692352295, "learning_rate": 0.0007029063052045115, "loss": 3.4392, "step": 34995 }, { "epoch": 2.378040494632423, "grad_norm": 1.797268271446228, "learning_rate": 0.0007028638401956787, "loss": 3.6687, "step": 35000 }, { "epoch": 2.3783802147030846, "grad_norm": 2.1388497352600098, "learning_rate": 0.0007028213751868461, "loss": 3.3561, "step": 35005 }, { "epoch": 2.3787199347737467, "grad_norm": 2.0825295448303223, "learning_rate": 0.0007027789101780133, "loss": 3.6021, "step": 35010 }, { "epoch": 2.3790596548444083, "grad_norm": 1.8206828832626343, "learning_rate": 0.0007027364451691805, "loss": 3.463, "step": 35015 }, { "epoch": 2.37939937491507, "grad_norm": 1.840709924697876, "learning_rate": 0.000702693980160348, "loss": 3.6579, "step": 35020 }, { "epoch": 2.379739094985732, "grad_norm": 2.034618854522705, "learning_rate": 0.0007026515151515152, "loss": 3.6856, "step": 35025 }, { "epoch": 2.3800788150563936, "grad_norm": 2.521925687789917, "learning_rate": 0.0007026090501426824, "loss": 3.5258, "step": 35030 }, { "epoch": 2.3804185351270553, "grad_norm": 1.678084373474121, "learning_rate": 0.0007025665851338497, "loss": 3.5475, "step": 35035 }, { "epoch": 2.380758255197717, "grad_norm": 2.777280569076538, "learning_rate": 0.000702524120125017, "loss": 3.7143, "step": 35040 }, { "epoch": 2.381097975268379, "grad_norm": 2.646782636642456, "learning_rate": 0.0007024816551161842, "loss": 3.2721, "step": 35045 }, { "epoch": 2.3814376953390406, "grad_norm": 1.843166708946228, "learning_rate": 0.0007024391901073515, "loss": 3.1771, "step": 35050 }, { "epoch": 2.3817774154097022, "grad_norm": 2.512573719024658, "learning_rate": 0.0007023967250985189, "loss": 3.5303, "step": 35055 }, { "epoch": 2.3821171354803643, "grad_norm": 1.81061851978302, "learning_rate": 0.0007023542600896861, "loss": 3.6474, "step": 35060 }, { "epoch": 2.382456855551026, "grad_norm": 1.703484058380127, "learning_rate": 0.0007023117950808534, "loss": 3.5189, "step": 35065 }, { "epoch": 2.3827965756216876, "grad_norm": 2.532001495361328, "learning_rate": 0.0007022693300720207, "loss": 3.5346, "step": 35070 }, { "epoch": 2.3831362956923496, "grad_norm": 2.0093986988067627, "learning_rate": 0.000702226865063188, "loss": 3.5071, "step": 35075 }, { "epoch": 2.3834760157630113, "grad_norm": 1.7227047681808472, "learning_rate": 0.0007021844000543552, "loss": 3.514, "step": 35080 }, { "epoch": 2.383815735833673, "grad_norm": 2.408245801925659, "learning_rate": 0.0007021419350455224, "loss": 3.4686, "step": 35085 }, { "epoch": 2.384155455904335, "grad_norm": 1.8095678091049194, "learning_rate": 0.0007020994700366899, "loss": 3.4463, "step": 35090 }, { "epoch": 2.3844951759749966, "grad_norm": 2.036375045776367, "learning_rate": 0.0007020570050278571, "loss": 3.6934, "step": 35095 }, { "epoch": 2.3848348960456582, "grad_norm": 1.6186732053756714, "learning_rate": 0.0007020145400190243, "loss": 3.7056, "step": 35100 }, { "epoch": 2.3851746161163203, "grad_norm": 1.6837347745895386, "learning_rate": 0.0007019720750101917, "loss": 3.6893, "step": 35105 }, { "epoch": 2.385514336186982, "grad_norm": 1.85063898563385, "learning_rate": 0.0007019296100013589, "loss": 3.5236, "step": 35110 }, { "epoch": 2.3858540562576436, "grad_norm": 1.9001227617263794, "learning_rate": 0.0007018871449925261, "loss": 3.5108, "step": 35115 }, { "epoch": 2.3861937763283056, "grad_norm": 2.184791326522827, "learning_rate": 0.0007018446799836935, "loss": 3.7781, "step": 35120 }, { "epoch": 2.3865334963989673, "grad_norm": 2.0151591300964355, "learning_rate": 0.0007018022149748608, "loss": 3.5804, "step": 35125 }, { "epoch": 2.386873216469629, "grad_norm": 1.849593162536621, "learning_rate": 0.000701759749966028, "loss": 3.7352, "step": 35130 }, { "epoch": 2.387212936540291, "grad_norm": 1.9484760761260986, "learning_rate": 0.0007017172849571953, "loss": 3.4917, "step": 35135 }, { "epoch": 2.3875526566109526, "grad_norm": 1.7092630863189697, "learning_rate": 0.0007016748199483626, "loss": 3.4112, "step": 35140 }, { "epoch": 2.3878923766816142, "grad_norm": 1.9380918741226196, "learning_rate": 0.0007016323549395298, "loss": 3.6313, "step": 35145 }, { "epoch": 2.388232096752276, "grad_norm": 2.456320285797119, "learning_rate": 0.0007015898899306971, "loss": 3.5174, "step": 35150 }, { "epoch": 2.388571816822938, "grad_norm": 1.8064788579940796, "learning_rate": 0.0007015474249218644, "loss": 3.4076, "step": 35155 }, { "epoch": 2.3889115368935996, "grad_norm": 1.7989118099212646, "learning_rate": 0.0007015049599130317, "loss": 3.6387, "step": 35160 }, { "epoch": 2.389251256964261, "grad_norm": 2.406550884246826, "learning_rate": 0.000701462494904199, "loss": 3.6861, "step": 35165 }, { "epoch": 2.3895909770349233, "grad_norm": 2.017207384109497, "learning_rate": 0.0007014200298953663, "loss": 3.4048, "step": 35170 }, { "epoch": 2.389930697105585, "grad_norm": 2.2493956089019775, "learning_rate": 0.0007013775648865335, "loss": 3.6637, "step": 35175 }, { "epoch": 2.3902704171762466, "grad_norm": 1.958690881729126, "learning_rate": 0.0007013350998777008, "loss": 3.4084, "step": 35180 }, { "epoch": 2.3906101372469086, "grad_norm": 2.240316152572632, "learning_rate": 0.000701292634868868, "loss": 3.5217, "step": 35185 }, { "epoch": 2.3909498573175703, "grad_norm": 1.8016092777252197, "learning_rate": 0.0007012501698600353, "loss": 3.4396, "step": 35190 }, { "epoch": 2.391289577388232, "grad_norm": 1.9924912452697754, "learning_rate": 0.0007012077048512027, "loss": 3.7059, "step": 35195 }, { "epoch": 2.391629297458894, "grad_norm": 1.9180006980895996, "learning_rate": 0.0007011652398423699, "loss": 3.6987, "step": 35200 }, { "epoch": 2.3919690175295556, "grad_norm": 2.1830801963806152, "learning_rate": 0.0007011227748335372, "loss": 3.1946, "step": 35205 }, { "epoch": 2.3923087376002172, "grad_norm": 1.9436671733856201, "learning_rate": 0.0007010803098247045, "loss": 4.0481, "step": 35210 }, { "epoch": 2.3926484576708793, "grad_norm": 2.236865282058716, "learning_rate": 0.0007010378448158717, "loss": 3.5586, "step": 35215 }, { "epoch": 2.392988177741541, "grad_norm": 1.6454622745513916, "learning_rate": 0.000700995379807039, "loss": 3.6319, "step": 35220 }, { "epoch": 2.3933278978122026, "grad_norm": 2.0325124263763428, "learning_rate": 0.0007009529147982063, "loss": 3.712, "step": 35225 }, { "epoch": 2.3936676178828646, "grad_norm": 2.191471576690674, "learning_rate": 0.0007009104497893736, "loss": 3.6705, "step": 35230 }, { "epoch": 2.3940073379535263, "grad_norm": 2.4679903984069824, "learning_rate": 0.0007008679847805408, "loss": 3.4659, "step": 35235 }, { "epoch": 2.394347058024188, "grad_norm": 2.264443874359131, "learning_rate": 0.0007008255197717082, "loss": 3.6612, "step": 35240 }, { "epoch": 2.39468677809485, "grad_norm": 1.7647898197174072, "learning_rate": 0.0007007830547628754, "loss": 3.2688, "step": 35245 }, { "epoch": 2.3950264981655116, "grad_norm": 2.3698413372039795, "learning_rate": 0.0007007405897540426, "loss": 3.5232, "step": 35250 }, { "epoch": 2.3953662182361732, "grad_norm": 1.9177385568618774, "learning_rate": 0.00070069812474521, "loss": 3.3794, "step": 35255 }, { "epoch": 2.3957059383068353, "grad_norm": 2.31424880027771, "learning_rate": 0.0007006556597363772, "loss": 3.4777, "step": 35260 }, { "epoch": 2.396045658377497, "grad_norm": 1.7221498489379883, "learning_rate": 0.0007006131947275445, "loss": 3.8067, "step": 35265 }, { "epoch": 2.3963853784481586, "grad_norm": 1.7910194396972656, "learning_rate": 0.0007005707297187119, "loss": 3.6361, "step": 35270 }, { "epoch": 2.3967250985188207, "grad_norm": 1.9175963401794434, "learning_rate": 0.0007005282647098791, "loss": 3.5156, "step": 35275 }, { "epoch": 2.3970648185894823, "grad_norm": 1.4684908390045166, "learning_rate": 0.0007004857997010463, "loss": 3.5702, "step": 35280 }, { "epoch": 2.397404538660144, "grad_norm": 1.5626041889190674, "learning_rate": 0.0007004433346922136, "loss": 3.5677, "step": 35285 }, { "epoch": 2.397744258730806, "grad_norm": 1.8076492547988892, "learning_rate": 0.0007004008696833809, "loss": 3.6631, "step": 35290 }, { "epoch": 2.3980839788014676, "grad_norm": 1.5365440845489502, "learning_rate": 0.0007003584046745481, "loss": 3.601, "step": 35295 }, { "epoch": 2.3984236988721293, "grad_norm": 2.219961404800415, "learning_rate": 0.0007003159396657155, "loss": 3.5417, "step": 35300 }, { "epoch": 2.3987634189427913, "grad_norm": 1.6675089597702026, "learning_rate": 0.0007002734746568828, "loss": 3.6326, "step": 35305 }, { "epoch": 2.399103139013453, "grad_norm": 2.2132773399353027, "learning_rate": 0.00070023100964805, "loss": 3.2592, "step": 35310 }, { "epoch": 2.3994428590841146, "grad_norm": 2.3459832668304443, "learning_rate": 0.0007001885446392173, "loss": 3.3807, "step": 35315 }, { "epoch": 2.3997825791547767, "grad_norm": 2.074960708618164, "learning_rate": 0.0007001460796303845, "loss": 3.5498, "step": 35320 }, { "epoch": 2.4001222992254383, "grad_norm": 1.8240230083465576, "learning_rate": 0.0007001036146215518, "loss": 3.5796, "step": 35325 }, { "epoch": 2.4004620192961, "grad_norm": 1.8335682153701782, "learning_rate": 0.0007000611496127191, "loss": 3.4748, "step": 35330 }, { "epoch": 2.400801739366762, "grad_norm": 1.517665982246399, "learning_rate": 0.0007000186846038864, "loss": 3.6719, "step": 35335 }, { "epoch": 2.4011414594374236, "grad_norm": 1.6717185974121094, "learning_rate": 0.0006999762195950537, "loss": 3.4444, "step": 35340 }, { "epoch": 2.4014811795080853, "grad_norm": 2.224090337753296, "learning_rate": 0.000699933754586221, "loss": 3.5478, "step": 35345 }, { "epoch": 2.4018208995787473, "grad_norm": 2.6741819381713867, "learning_rate": 0.0006998912895773882, "loss": 3.3591, "step": 35350 }, { "epoch": 2.402160619649409, "grad_norm": 2.417264699935913, "learning_rate": 0.0006998488245685555, "loss": 3.4399, "step": 35355 }, { "epoch": 2.4025003397200706, "grad_norm": 2.9679574966430664, "learning_rate": 0.0006998063595597228, "loss": 3.5167, "step": 35360 }, { "epoch": 2.4028400597907327, "grad_norm": 2.871196746826172, "learning_rate": 0.00069976389455089, "loss": 3.4987, "step": 35365 }, { "epoch": 2.4031797798613943, "grad_norm": 1.8195788860321045, "learning_rate": 0.0006997214295420574, "loss": 3.6301, "step": 35370 }, { "epoch": 2.403519499932056, "grad_norm": 1.62355375289917, "learning_rate": 0.0006996789645332247, "loss": 3.3678, "step": 35375 }, { "epoch": 2.4038592200027176, "grad_norm": 1.9957350492477417, "learning_rate": 0.0006996364995243919, "loss": 3.4377, "step": 35380 }, { "epoch": 2.4041989400733796, "grad_norm": 1.9795594215393066, "learning_rate": 0.0006995940345155591, "loss": 3.5775, "step": 35385 }, { "epoch": 2.4045386601440413, "grad_norm": 4.263587474822998, "learning_rate": 0.0006995515695067265, "loss": 3.5822, "step": 35390 }, { "epoch": 2.404878380214703, "grad_norm": 2.1471734046936035, "learning_rate": 0.0006995091044978937, "loss": 3.5242, "step": 35395 }, { "epoch": 2.405218100285365, "grad_norm": 1.7790414094924927, "learning_rate": 0.0006994666394890609, "loss": 3.5486, "step": 35400 }, { "epoch": 2.4055578203560266, "grad_norm": 2.080782175064087, "learning_rate": 0.0006994241744802284, "loss": 3.7145, "step": 35405 }, { "epoch": 2.4058975404266882, "grad_norm": 2.244824171066284, "learning_rate": 0.0006993817094713956, "loss": 3.3294, "step": 35410 }, { "epoch": 2.4062372604973503, "grad_norm": 1.7604119777679443, "learning_rate": 0.0006993392444625629, "loss": 3.3419, "step": 35415 }, { "epoch": 2.406576980568012, "grad_norm": 1.6357545852661133, "learning_rate": 0.0006992967794537302, "loss": 3.4962, "step": 35420 }, { "epoch": 2.4069167006386736, "grad_norm": 2.3346595764160156, "learning_rate": 0.0006992543144448974, "loss": 3.641, "step": 35425 }, { "epoch": 2.4072564207093357, "grad_norm": 1.7876399755477905, "learning_rate": 0.0006992118494360647, "loss": 3.5666, "step": 35430 }, { "epoch": 2.4075961407799973, "grad_norm": 3.245833396911621, "learning_rate": 0.000699169384427232, "loss": 3.4864, "step": 35435 }, { "epoch": 2.407935860850659, "grad_norm": 1.7372450828552246, "learning_rate": 0.0006991269194183993, "loss": 3.374, "step": 35440 }, { "epoch": 2.408275580921321, "grad_norm": 2.1038832664489746, "learning_rate": 0.0006990844544095666, "loss": 3.7077, "step": 35445 }, { "epoch": 2.4086153009919826, "grad_norm": 2.438380479812622, "learning_rate": 0.0006990419894007338, "loss": 3.8536, "step": 35450 }, { "epoch": 2.4089550210626443, "grad_norm": 1.8128688335418701, "learning_rate": 0.0006989995243919011, "loss": 3.5504, "step": 35455 }, { "epoch": 2.4092947411333063, "grad_norm": 2.462414026260376, "learning_rate": 0.0006989570593830684, "loss": 3.6887, "step": 35460 }, { "epoch": 2.409634461203968, "grad_norm": 2.1193766593933105, "learning_rate": 0.0006989145943742356, "loss": 3.3309, "step": 35465 }, { "epoch": 2.4099741812746296, "grad_norm": 1.6574574708938599, "learning_rate": 0.000698872129365403, "loss": 3.5625, "step": 35470 }, { "epoch": 2.4103139013452917, "grad_norm": 2.2495057582855225, "learning_rate": 0.0006988296643565703, "loss": 3.4705, "step": 35475 }, { "epoch": 2.4106536214159533, "grad_norm": 1.5459872484207153, "learning_rate": 0.0006987871993477375, "loss": 3.4562, "step": 35480 }, { "epoch": 2.410993341486615, "grad_norm": 2.129889488220215, "learning_rate": 0.0006987447343389047, "loss": 3.6172, "step": 35485 }, { "epoch": 2.4113330615572766, "grad_norm": 1.7295430898666382, "learning_rate": 0.0006987022693300721, "loss": 3.6809, "step": 35490 }, { "epoch": 2.4116727816279386, "grad_norm": 2.074018716812134, "learning_rate": 0.0006986598043212393, "loss": 3.6585, "step": 35495 }, { "epoch": 2.4120125016986003, "grad_norm": 2.146214485168457, "learning_rate": 0.0006986173393124065, "loss": 3.6472, "step": 35500 }, { "epoch": 2.412352221769262, "grad_norm": 2.766580820083618, "learning_rate": 0.000698574874303574, "loss": 3.2561, "step": 35505 }, { "epoch": 2.412691941839924, "grad_norm": 2.8579654693603516, "learning_rate": 0.0006985324092947412, "loss": 3.5305, "step": 35510 }, { "epoch": 2.4130316619105856, "grad_norm": 1.5837202072143555, "learning_rate": 0.0006984899442859084, "loss": 3.4471, "step": 35515 }, { "epoch": 2.4133713819812472, "grad_norm": 2.624573230743408, "learning_rate": 0.0006984474792770758, "loss": 3.879, "step": 35520 }, { "epoch": 2.4137111020519093, "grad_norm": 2.6846771240234375, "learning_rate": 0.000698405014268243, "loss": 3.5426, "step": 35525 }, { "epoch": 2.414050822122571, "grad_norm": 2.430037021636963, "learning_rate": 0.0006983625492594102, "loss": 3.6794, "step": 35530 }, { "epoch": 2.4143905421932326, "grad_norm": 1.5602498054504395, "learning_rate": 0.0006983200842505775, "loss": 3.6011, "step": 35535 }, { "epoch": 2.4147302622638946, "grad_norm": 1.9872967004776, "learning_rate": 0.0006982776192417449, "loss": 3.5839, "step": 35540 }, { "epoch": 2.4150699823345563, "grad_norm": 1.7084194421768188, "learning_rate": 0.0006982351542329121, "loss": 3.4655, "step": 35545 }, { "epoch": 2.415409702405218, "grad_norm": 2.0089468955993652, "learning_rate": 0.0006981926892240794, "loss": 3.2806, "step": 35550 }, { "epoch": 2.41574942247588, "grad_norm": 1.8358558416366577, "learning_rate": 0.0006981502242152467, "loss": 3.6695, "step": 35555 }, { "epoch": 2.4160891425465416, "grad_norm": 1.7341835498809814, "learning_rate": 0.0006981077592064139, "loss": 3.5885, "step": 35560 }, { "epoch": 2.4164288626172032, "grad_norm": 1.8609637022018433, "learning_rate": 0.0006980652941975812, "loss": 3.3986, "step": 35565 }, { "epoch": 2.4167685826878653, "grad_norm": 2.1177735328674316, "learning_rate": 0.0006980228291887484, "loss": 3.4709, "step": 35570 }, { "epoch": 2.417108302758527, "grad_norm": 1.508009672164917, "learning_rate": 0.0006979803641799158, "loss": 3.4372, "step": 35575 }, { "epoch": 2.4174480228291886, "grad_norm": 2.1557395458221436, "learning_rate": 0.0006979378991710831, "loss": 3.4851, "step": 35580 }, { "epoch": 2.4177877428998507, "grad_norm": 1.7508448362350464, "learning_rate": 0.0006978954341622503, "loss": 3.7215, "step": 35585 }, { "epoch": 2.4181274629705123, "grad_norm": 2.0427067279815674, "learning_rate": 0.0006978529691534176, "loss": 3.4482, "step": 35590 }, { "epoch": 2.418467183041174, "grad_norm": 2.3894474506378174, "learning_rate": 0.0006978105041445849, "loss": 3.7933, "step": 35595 }, { "epoch": 2.418806903111836, "grad_norm": 2.311103582382202, "learning_rate": 0.0006977680391357521, "loss": 3.5854, "step": 35600 }, { "epoch": 2.4191466231824976, "grad_norm": 1.4875462055206299, "learning_rate": 0.0006977255741269194, "loss": 3.4901, "step": 35605 }, { "epoch": 2.4194863432531593, "grad_norm": 1.940286636352539, "learning_rate": 0.0006976831091180868, "loss": 3.5343, "step": 35610 }, { "epoch": 2.4198260633238213, "grad_norm": 2.2043261528015137, "learning_rate": 0.000697640644109254, "loss": 3.6582, "step": 35615 }, { "epoch": 2.420165783394483, "grad_norm": 1.6480674743652344, "learning_rate": 0.0006975981791004212, "loss": 3.6653, "step": 35620 }, { "epoch": 2.4205055034651446, "grad_norm": 1.9096347093582153, "learning_rate": 0.0006975557140915886, "loss": 3.463, "step": 35625 }, { "epoch": 2.4208452235358067, "grad_norm": 2.1835429668426514, "learning_rate": 0.0006975132490827558, "loss": 3.5446, "step": 35630 }, { "epoch": 2.4211849436064683, "grad_norm": 1.5971039533615112, "learning_rate": 0.000697470784073923, "loss": 3.6537, "step": 35635 }, { "epoch": 2.42152466367713, "grad_norm": 1.8696925640106201, "learning_rate": 0.0006974283190650904, "loss": 3.3114, "step": 35640 }, { "epoch": 2.421864383747792, "grad_norm": 1.556648850440979, "learning_rate": 0.0006973858540562577, "loss": 3.4197, "step": 35645 }, { "epoch": 2.4222041038184536, "grad_norm": 2.07893443107605, "learning_rate": 0.0006973433890474249, "loss": 3.3831, "step": 35650 }, { "epoch": 2.4225438238891153, "grad_norm": 1.702962040901184, "learning_rate": 0.0006973009240385923, "loss": 3.3954, "step": 35655 }, { "epoch": 2.4228835439597773, "grad_norm": 1.9487494230270386, "learning_rate": 0.0006972584590297595, "loss": 3.6849, "step": 35660 }, { "epoch": 2.423223264030439, "grad_norm": 1.8830925226211548, "learning_rate": 0.0006972159940209267, "loss": 3.5892, "step": 35665 }, { "epoch": 2.4235629841011006, "grad_norm": 2.130437135696411, "learning_rate": 0.000697173529012094, "loss": 3.6337, "step": 35670 }, { "epoch": 2.4239027041717627, "grad_norm": 2.821589231491089, "learning_rate": 0.0006971310640032613, "loss": 3.5215, "step": 35675 }, { "epoch": 2.4242424242424243, "grad_norm": 2.13604474067688, "learning_rate": 0.0006970885989944286, "loss": 3.5519, "step": 35680 }, { "epoch": 2.424582144313086, "grad_norm": 1.748557686805725, "learning_rate": 0.0006970461339855959, "loss": 3.4688, "step": 35685 }, { "epoch": 2.424921864383748, "grad_norm": 2.099729061126709, "learning_rate": 0.0006970036689767632, "loss": 3.5363, "step": 35690 }, { "epoch": 2.4252615844544096, "grad_norm": 1.999743103981018, "learning_rate": 0.0006969612039679304, "loss": 3.3247, "step": 35695 }, { "epoch": 2.4256013045250713, "grad_norm": 2.36936092376709, "learning_rate": 0.0006969187389590977, "loss": 3.4996, "step": 35700 }, { "epoch": 2.4259410245957334, "grad_norm": 2.2106988430023193, "learning_rate": 0.000696876273950265, "loss": 3.1384, "step": 35705 }, { "epoch": 2.426280744666395, "grad_norm": 1.7302800416946411, "learning_rate": 0.0006968338089414322, "loss": 3.7216, "step": 35710 }, { "epoch": 2.4266204647370566, "grad_norm": 1.7361551523208618, "learning_rate": 0.0006967913439325996, "loss": 3.3958, "step": 35715 }, { "epoch": 2.4269601848077182, "grad_norm": 2.4234063625335693, "learning_rate": 0.0006967488789237668, "loss": 3.5467, "step": 35720 }, { "epoch": 2.4272999048783803, "grad_norm": 1.4114201068878174, "learning_rate": 0.0006967064139149341, "loss": 3.3351, "step": 35725 }, { "epoch": 2.427639624949042, "grad_norm": 2.3251850605010986, "learning_rate": 0.0006966639489061014, "loss": 3.7411, "step": 35730 }, { "epoch": 2.4279793450197036, "grad_norm": 1.7725622653961182, "learning_rate": 0.0006966214838972686, "loss": 3.4776, "step": 35735 }, { "epoch": 2.4283190650903657, "grad_norm": 1.6827088594436646, "learning_rate": 0.0006965790188884359, "loss": 3.3849, "step": 35740 }, { "epoch": 2.4286587851610273, "grad_norm": 2.155423164367676, "learning_rate": 0.0006965365538796032, "loss": 3.2902, "step": 35745 }, { "epoch": 2.428998505231689, "grad_norm": 2.1729938983917236, "learning_rate": 0.0006964940888707705, "loss": 3.6115, "step": 35750 }, { "epoch": 2.429338225302351, "grad_norm": 2.107707977294922, "learning_rate": 0.0006964516238619379, "loss": 3.4787, "step": 35755 }, { "epoch": 2.4296779453730126, "grad_norm": 1.8188971281051636, "learning_rate": 0.0006964091588531051, "loss": 3.8007, "step": 35760 }, { "epoch": 2.4300176654436743, "grad_norm": 2.195333242416382, "learning_rate": 0.0006963666938442723, "loss": 3.6423, "step": 35765 }, { "epoch": 2.4303573855143363, "grad_norm": 1.900538682937622, "learning_rate": 0.0006963242288354396, "loss": 3.6778, "step": 35770 }, { "epoch": 2.430697105584998, "grad_norm": 1.9963064193725586, "learning_rate": 0.0006962817638266069, "loss": 3.5531, "step": 35775 }, { "epoch": 2.4310368256556596, "grad_norm": 1.6028969287872314, "learning_rate": 0.0006962392988177741, "loss": 3.1955, "step": 35780 }, { "epoch": 2.4313765457263217, "grad_norm": 2.0728228092193604, "learning_rate": 0.0006961968338089415, "loss": 3.5295, "step": 35785 }, { "epoch": 2.4317162657969833, "grad_norm": 1.6355764865875244, "learning_rate": 0.0006961543688001088, "loss": 3.4527, "step": 35790 }, { "epoch": 2.432055985867645, "grad_norm": 2.171161413192749, "learning_rate": 0.000696111903791276, "loss": 3.7892, "step": 35795 }, { "epoch": 2.432395705938307, "grad_norm": 2.1087965965270996, "learning_rate": 0.0006960694387824433, "loss": 3.5532, "step": 35800 }, { "epoch": 2.4327354260089686, "grad_norm": 2.5525732040405273, "learning_rate": 0.0006960269737736106, "loss": 3.486, "step": 35805 }, { "epoch": 2.4330751460796303, "grad_norm": 2.124913215637207, "learning_rate": 0.0006959845087647778, "loss": 3.5843, "step": 35810 }, { "epoch": 2.4334148661502923, "grad_norm": 1.8838473558425903, "learning_rate": 0.0006959420437559451, "loss": 3.4511, "step": 35815 }, { "epoch": 2.433754586220954, "grad_norm": 1.5275065898895264, "learning_rate": 0.0006958995787471124, "loss": 3.6516, "step": 35820 }, { "epoch": 2.4340943062916156, "grad_norm": 2.3455097675323486, "learning_rate": 0.0006958571137382797, "loss": 3.5047, "step": 35825 }, { "epoch": 2.4344340263622772, "grad_norm": 1.7476016283035278, "learning_rate": 0.000695814648729447, "loss": 3.4577, "step": 35830 }, { "epoch": 2.4347737464329393, "grad_norm": 1.9290564060211182, "learning_rate": 0.0006957721837206142, "loss": 3.3057, "step": 35835 }, { "epoch": 2.435113466503601, "grad_norm": 1.9494627714157104, "learning_rate": 0.0006957297187117815, "loss": 3.682, "step": 35840 }, { "epoch": 2.4354531865742626, "grad_norm": 2.1546874046325684, "learning_rate": 0.0006956872537029488, "loss": 3.3428, "step": 35845 }, { "epoch": 2.4357929066449246, "grad_norm": 2.201663017272949, "learning_rate": 0.000695644788694116, "loss": 3.4318, "step": 35850 }, { "epoch": 2.4361326267155863, "grad_norm": 2.0204763412475586, "learning_rate": 0.0006956023236852834, "loss": 3.5777, "step": 35855 }, { "epoch": 2.436472346786248, "grad_norm": 2.170245409011841, "learning_rate": 0.0006955598586764507, "loss": 3.7875, "step": 35860 }, { "epoch": 2.43681206685691, "grad_norm": 2.0559256076812744, "learning_rate": 0.0006955173936676179, "loss": 3.4579, "step": 35865 }, { "epoch": 2.4371517869275716, "grad_norm": 2.0949008464813232, "learning_rate": 0.0006954749286587851, "loss": 3.5172, "step": 35870 }, { "epoch": 2.4374915069982332, "grad_norm": 2.092940092086792, "learning_rate": 0.0006954324636499525, "loss": 3.5636, "step": 35875 }, { "epoch": 2.4378312270688953, "grad_norm": 2.1740665435791016, "learning_rate": 0.0006953899986411197, "loss": 3.4922, "step": 35880 }, { "epoch": 2.438170947139557, "grad_norm": 2.3623828887939453, "learning_rate": 0.0006953475336322869, "loss": 3.3388, "step": 35885 }, { "epoch": 2.4385106672102186, "grad_norm": 2.0144903659820557, "learning_rate": 0.0006953050686234544, "loss": 3.5401, "step": 35890 }, { "epoch": 2.4388503872808807, "grad_norm": 1.7455930709838867, "learning_rate": 0.0006952626036146216, "loss": 3.5166, "step": 35895 }, { "epoch": 2.4391901073515423, "grad_norm": 2.0754454135894775, "learning_rate": 0.0006952201386057888, "loss": 3.4099, "step": 35900 }, { "epoch": 2.439529827422204, "grad_norm": 6.21416711807251, "learning_rate": 0.0006951776735969562, "loss": 3.4516, "step": 35905 }, { "epoch": 2.439869547492866, "grad_norm": 1.5274250507354736, "learning_rate": 0.0006951352085881234, "loss": 3.772, "step": 35910 }, { "epoch": 2.4402092675635276, "grad_norm": 1.9679287672042847, "learning_rate": 0.0006950927435792906, "loss": 3.5329, "step": 35915 }, { "epoch": 2.4405489876341893, "grad_norm": 2.2358381748199463, "learning_rate": 0.0006950502785704579, "loss": 3.3692, "step": 35920 }, { "epoch": 2.4408887077048513, "grad_norm": 1.8526561260223389, "learning_rate": 0.0006950078135616253, "loss": 3.3411, "step": 35925 }, { "epoch": 2.441228427775513, "grad_norm": 2.292452335357666, "learning_rate": 0.0006949653485527925, "loss": 3.6405, "step": 35930 }, { "epoch": 2.4415681478461746, "grad_norm": 2.0626955032348633, "learning_rate": 0.0006949228835439598, "loss": 3.7463, "step": 35935 }, { "epoch": 2.4419078679168367, "grad_norm": 2.0596938133239746, "learning_rate": 0.0006948804185351271, "loss": 3.4125, "step": 35940 }, { "epoch": 2.4422475879874983, "grad_norm": 2.451794385910034, "learning_rate": 0.0006948379535262943, "loss": 3.5328, "step": 35945 }, { "epoch": 2.44258730805816, "grad_norm": 2.0214297771453857, "learning_rate": 0.0006947954885174616, "loss": 3.562, "step": 35950 }, { "epoch": 2.442927028128822, "grad_norm": 1.5132075548171997, "learning_rate": 0.0006947530235086288, "loss": 3.6335, "step": 35955 }, { "epoch": 2.4432667481994836, "grad_norm": 1.683493733406067, "learning_rate": 0.0006947105584997962, "loss": 3.3938, "step": 35960 }, { "epoch": 2.4436064682701453, "grad_norm": 1.7508968114852905, "learning_rate": 0.0006946680934909635, "loss": 3.5274, "step": 35965 }, { "epoch": 2.4439461883408073, "grad_norm": 1.8968799114227295, "learning_rate": 0.0006946256284821307, "loss": 3.3956, "step": 35970 }, { "epoch": 2.444285908411469, "grad_norm": 1.59727942943573, "learning_rate": 0.000694583163473298, "loss": 3.4716, "step": 35975 }, { "epoch": 2.4446256284821306, "grad_norm": 1.932336449623108, "learning_rate": 0.0006945406984644653, "loss": 3.2087, "step": 35980 }, { "epoch": 2.4449653485527927, "grad_norm": 1.9656521081924438, "learning_rate": 0.0006944982334556325, "loss": 3.1533, "step": 35985 }, { "epoch": 2.4453050686234543, "grad_norm": 2.0540735721588135, "learning_rate": 0.0006944557684467998, "loss": 3.6181, "step": 35990 }, { "epoch": 2.445644788694116, "grad_norm": 2.0738394260406494, "learning_rate": 0.0006944133034379672, "loss": 3.5325, "step": 35995 }, { "epoch": 2.445984508764778, "grad_norm": 2.3284530639648438, "learning_rate": 0.0006943708384291344, "loss": 3.7686, "step": 36000 }, { "epoch": 2.4463242288354397, "grad_norm": 1.925265908241272, "learning_rate": 0.0006943283734203016, "loss": 3.6622, "step": 36005 }, { "epoch": 2.4466639489061013, "grad_norm": 1.7894365787506104, "learning_rate": 0.000694285908411469, "loss": 3.5653, "step": 36010 }, { "epoch": 2.4470036689767634, "grad_norm": 1.9373884201049805, "learning_rate": 0.0006942434434026362, "loss": 3.5559, "step": 36015 }, { "epoch": 2.447343389047425, "grad_norm": 2.0753157138824463, "learning_rate": 0.0006942009783938034, "loss": 3.2867, "step": 36020 }, { "epoch": 2.4476831091180866, "grad_norm": 1.9426923990249634, "learning_rate": 0.0006941585133849709, "loss": 3.5475, "step": 36025 }, { "epoch": 2.4480228291887487, "grad_norm": 1.807020902633667, "learning_rate": 0.0006941160483761381, "loss": 3.4074, "step": 36030 }, { "epoch": 2.4483625492594103, "grad_norm": 2.365530014038086, "learning_rate": 0.0006940735833673053, "loss": 3.5799, "step": 36035 }, { "epoch": 2.448702269330072, "grad_norm": 1.9501031637191772, "learning_rate": 0.0006940311183584727, "loss": 3.4624, "step": 36040 }, { "epoch": 2.449041989400734, "grad_norm": 1.8587318658828735, "learning_rate": 0.0006939886533496399, "loss": 3.6694, "step": 36045 }, { "epoch": 2.4493817094713957, "grad_norm": 1.9303966760635376, "learning_rate": 0.0006939461883408071, "loss": 3.6298, "step": 36050 }, { "epoch": 2.4497214295420573, "grad_norm": 2.258074998855591, "learning_rate": 0.0006939037233319744, "loss": 3.5293, "step": 36055 }, { "epoch": 2.4500611496127194, "grad_norm": 1.823364019393921, "learning_rate": 0.0006938612583231418, "loss": 3.5025, "step": 36060 }, { "epoch": 2.450400869683381, "grad_norm": 2.5368621349334717, "learning_rate": 0.000693818793314309, "loss": 3.3525, "step": 36065 }, { "epoch": 2.4507405897540426, "grad_norm": 1.7386343479156494, "learning_rate": 0.0006937763283054763, "loss": 3.4529, "step": 36070 }, { "epoch": 2.4510803098247043, "grad_norm": 2.3753464221954346, "learning_rate": 0.0006937338632966436, "loss": 3.5375, "step": 36075 }, { "epoch": 2.4514200298953663, "grad_norm": 1.5671498775482178, "learning_rate": 0.0006936913982878108, "loss": 3.5713, "step": 36080 }, { "epoch": 2.451759749966028, "grad_norm": 1.9087769985198975, "learning_rate": 0.0006936489332789781, "loss": 3.6372, "step": 36085 }, { "epoch": 2.4520994700366896, "grad_norm": 1.8242754936218262, "learning_rate": 0.0006936064682701454, "loss": 3.3715, "step": 36090 }, { "epoch": 2.4524391901073517, "grad_norm": 2.550860643386841, "learning_rate": 0.0006935640032613128, "loss": 3.4557, "step": 36095 }, { "epoch": 2.4527789101780133, "grad_norm": 2.226104974746704, "learning_rate": 0.00069352153825248, "loss": 3.3873, "step": 36100 }, { "epoch": 2.453118630248675, "grad_norm": 1.7589648962020874, "learning_rate": 0.0006934790732436473, "loss": 3.3124, "step": 36105 }, { "epoch": 2.453458350319337, "grad_norm": 1.6819877624511719, "learning_rate": 0.0006934366082348146, "loss": 3.5858, "step": 36110 }, { "epoch": 2.4537980703899986, "grad_norm": 1.6700513362884521, "learning_rate": 0.0006933941432259818, "loss": 3.6236, "step": 36115 }, { "epoch": 2.4541377904606603, "grad_norm": 2.316419839859009, "learning_rate": 0.000693351678217149, "loss": 3.4735, "step": 36120 }, { "epoch": 2.4544775105313223, "grad_norm": 2.126535415649414, "learning_rate": 0.0006933092132083164, "loss": 3.5641, "step": 36125 }, { "epoch": 2.454817230601984, "grad_norm": 1.882787823677063, "learning_rate": 0.0006932667481994837, "loss": 3.4711, "step": 36130 }, { "epoch": 2.4551569506726456, "grad_norm": 1.9892359972000122, "learning_rate": 0.0006932242831906509, "loss": 3.6245, "step": 36135 }, { "epoch": 2.4554966707433077, "grad_norm": 1.9535800218582153, "learning_rate": 0.0006931818181818183, "loss": 3.3284, "step": 36140 }, { "epoch": 2.4558363908139693, "grad_norm": 1.82650625705719, "learning_rate": 0.0006931393531729855, "loss": 3.7216, "step": 36145 }, { "epoch": 2.456176110884631, "grad_norm": 1.578278660774231, "learning_rate": 0.0006930968881641527, "loss": 3.5422, "step": 36150 }, { "epoch": 2.456515830955293, "grad_norm": 1.8349907398223877, "learning_rate": 0.00069305442315532, "loss": 3.6609, "step": 36155 }, { "epoch": 2.4568555510259547, "grad_norm": 2.3917081356048584, "learning_rate": 0.0006930119581464873, "loss": 3.5589, "step": 36160 }, { "epoch": 2.4571952710966163, "grad_norm": 1.8738924264907837, "learning_rate": 0.0006929694931376546, "loss": 3.6442, "step": 36165 }, { "epoch": 2.457534991167278, "grad_norm": 2.007430076599121, "learning_rate": 0.0006929270281288219, "loss": 3.3554, "step": 36170 }, { "epoch": 2.45787471123794, "grad_norm": 1.8495419025421143, "learning_rate": 0.0006928845631199892, "loss": 3.5621, "step": 36175 }, { "epoch": 2.4582144313086016, "grad_norm": 1.846298336982727, "learning_rate": 0.0006928420981111564, "loss": 3.5417, "step": 36180 }, { "epoch": 2.4585541513792633, "grad_norm": 2.142214775085449, "learning_rate": 0.0006927996331023237, "loss": 3.5942, "step": 36185 }, { "epoch": 2.4588938714499253, "grad_norm": 1.8212991952896118, "learning_rate": 0.000692757168093491, "loss": 3.6286, "step": 36190 }, { "epoch": 2.459233591520587, "grad_norm": 2.3561203479766846, "learning_rate": 0.0006927147030846582, "loss": 3.6736, "step": 36195 }, { "epoch": 2.4595733115912486, "grad_norm": 1.9687303304672241, "learning_rate": 0.0006926722380758256, "loss": 3.3799, "step": 36200 }, { "epoch": 2.4599130316619107, "grad_norm": 1.7908295392990112, "learning_rate": 0.0006926297730669929, "loss": 3.7572, "step": 36205 }, { "epoch": 2.4602527517325723, "grad_norm": 1.7859569787979126, "learning_rate": 0.0006925873080581601, "loss": 3.5897, "step": 36210 }, { "epoch": 2.460592471803234, "grad_norm": 2.0248043537139893, "learning_rate": 0.0006925448430493274, "loss": 3.381, "step": 36215 }, { "epoch": 2.460932191873896, "grad_norm": 2.1613404750823975, "learning_rate": 0.0006925023780404946, "loss": 3.7506, "step": 36220 }, { "epoch": 2.4612719119445576, "grad_norm": 2.41288161277771, "learning_rate": 0.0006924599130316619, "loss": 3.2987, "step": 36225 }, { "epoch": 2.4616116320152193, "grad_norm": 1.8692479133605957, "learning_rate": 0.0006924174480228292, "loss": 3.5426, "step": 36230 }, { "epoch": 2.4619513520858813, "grad_norm": 1.7000385522842407, "learning_rate": 0.0006923749830139965, "loss": 3.2439, "step": 36235 }, { "epoch": 2.462291072156543, "grad_norm": 2.1148364543914795, "learning_rate": 0.0006923325180051638, "loss": 3.4789, "step": 36240 }, { "epoch": 2.4626307922272046, "grad_norm": 1.8365613222122192, "learning_rate": 0.0006922900529963311, "loss": 3.4936, "step": 36245 }, { "epoch": 2.4629705122978667, "grad_norm": 2.0079457759857178, "learning_rate": 0.0006922475879874983, "loss": 3.3456, "step": 36250 }, { "epoch": 2.4633102323685283, "grad_norm": 2.870009660720825, "learning_rate": 0.0006922051229786655, "loss": 3.3783, "step": 36255 }, { "epoch": 2.46364995243919, "grad_norm": 2.2338294982910156, "learning_rate": 0.0006921626579698329, "loss": 3.3715, "step": 36260 }, { "epoch": 2.463989672509852, "grad_norm": 2.1022839546203613, "learning_rate": 0.0006921201929610001, "loss": 3.3638, "step": 36265 }, { "epoch": 2.4643293925805136, "grad_norm": 2.301915168762207, "learning_rate": 0.0006920777279521674, "loss": 3.68, "step": 36270 }, { "epoch": 2.4646691126511753, "grad_norm": 1.9057174921035767, "learning_rate": 0.0006920352629433348, "loss": 3.4556, "step": 36275 }, { "epoch": 2.4650088327218374, "grad_norm": 2.1813344955444336, "learning_rate": 0.000691992797934502, "loss": 3.5486, "step": 36280 }, { "epoch": 2.465348552792499, "grad_norm": 1.584959864616394, "learning_rate": 0.0006919503329256692, "loss": 3.4879, "step": 36285 }, { "epoch": 2.4656882728631606, "grad_norm": 2.444960594177246, "learning_rate": 0.0006919078679168366, "loss": 3.5609, "step": 36290 }, { "epoch": 2.4660279929338227, "grad_norm": 1.6103527545928955, "learning_rate": 0.0006918654029080038, "loss": 3.2779, "step": 36295 }, { "epoch": 2.4663677130044843, "grad_norm": 2.3466904163360596, "learning_rate": 0.000691822937899171, "loss": 3.3693, "step": 36300 }, { "epoch": 2.466707433075146, "grad_norm": 1.523699164390564, "learning_rate": 0.0006917804728903385, "loss": 3.4965, "step": 36305 }, { "epoch": 2.467047153145808, "grad_norm": 2.0585765838623047, "learning_rate": 0.0006917380078815057, "loss": 3.6523, "step": 36310 }, { "epoch": 2.4673868732164697, "grad_norm": 1.8959670066833496, "learning_rate": 0.0006916955428726729, "loss": 3.6839, "step": 36315 }, { "epoch": 2.4677265932871313, "grad_norm": 1.6155893802642822, "learning_rate": 0.0006916530778638402, "loss": 3.5686, "step": 36320 }, { "epoch": 2.4680663133577934, "grad_norm": 2.3044846057891846, "learning_rate": 0.0006916106128550075, "loss": 3.4956, "step": 36325 }, { "epoch": 2.468406033428455, "grad_norm": 1.7769819498062134, "learning_rate": 0.0006915681478461747, "loss": 3.4021, "step": 36330 }, { "epoch": 2.4687457534991166, "grad_norm": 1.8258777856826782, "learning_rate": 0.000691525682837342, "loss": 3.6927, "step": 36335 }, { "epoch": 2.4690854735697787, "grad_norm": 1.9938610792160034, "learning_rate": 0.0006914832178285094, "loss": 3.273, "step": 36340 }, { "epoch": 2.4694251936404403, "grad_norm": 2.58488392829895, "learning_rate": 0.0006914407528196766, "loss": 3.4866, "step": 36345 }, { "epoch": 2.469764913711102, "grad_norm": 2.4074759483337402, "learning_rate": 0.0006913982878108439, "loss": 3.6778, "step": 36350 }, { "epoch": 2.470104633781764, "grad_norm": 2.438755512237549, "learning_rate": 0.0006913558228020111, "loss": 3.2977, "step": 36355 }, { "epoch": 2.4704443538524257, "grad_norm": 1.9084651470184326, "learning_rate": 0.0006913133577931784, "loss": 3.4695, "step": 36360 }, { "epoch": 2.4707840739230873, "grad_norm": 1.8043317794799805, "learning_rate": 0.0006912708927843457, "loss": 3.683, "step": 36365 }, { "epoch": 2.4711237939937494, "grad_norm": 1.8205420970916748, "learning_rate": 0.0006912284277755129, "loss": 3.4197, "step": 36370 }, { "epoch": 2.471463514064411, "grad_norm": 1.9851233959197998, "learning_rate": 0.0006911859627666803, "loss": 3.3744, "step": 36375 }, { "epoch": 2.4718032341350726, "grad_norm": 2.5146002769470215, "learning_rate": 0.0006911434977578476, "loss": 3.6754, "step": 36380 }, { "epoch": 2.4721429542057347, "grad_norm": 2.0329482555389404, "learning_rate": 0.0006911010327490148, "loss": 3.6498, "step": 36385 }, { "epoch": 2.4724826742763963, "grad_norm": 1.9395798444747925, "learning_rate": 0.000691058567740182, "loss": 3.4259, "step": 36390 }, { "epoch": 2.472822394347058, "grad_norm": 2.1111905574798584, "learning_rate": 0.0006910161027313494, "loss": 3.5058, "step": 36395 }, { "epoch": 2.47316211441772, "grad_norm": 1.9182153940200806, "learning_rate": 0.0006909736377225166, "loss": 3.4334, "step": 36400 }, { "epoch": 2.4735018344883817, "grad_norm": 1.826523780822754, "learning_rate": 0.0006909311727136838, "loss": 3.5995, "step": 36405 }, { "epoch": 2.4738415545590433, "grad_norm": 1.8129836320877075, "learning_rate": 0.0006908887077048513, "loss": 3.6311, "step": 36410 }, { "epoch": 2.474181274629705, "grad_norm": 2.4893736839294434, "learning_rate": 0.0006908462426960185, "loss": 3.4602, "step": 36415 }, { "epoch": 2.474520994700367, "grad_norm": 2.07446026802063, "learning_rate": 0.0006908037776871857, "loss": 3.5109, "step": 36420 }, { "epoch": 2.4748607147710286, "grad_norm": 1.8898564577102661, "learning_rate": 0.0006907613126783531, "loss": 3.5236, "step": 36425 }, { "epoch": 2.4752004348416903, "grad_norm": 1.5341761112213135, "learning_rate": 0.0006907188476695203, "loss": 3.5066, "step": 36430 }, { "epoch": 2.4755401549123524, "grad_norm": 2.066185474395752, "learning_rate": 0.0006906763826606876, "loss": 3.5273, "step": 36435 }, { "epoch": 2.475879874983014, "grad_norm": 2.1643800735473633, "learning_rate": 0.0006906339176518549, "loss": 3.5075, "step": 36440 }, { "epoch": 2.4762195950536756, "grad_norm": 2.0810763835906982, "learning_rate": 0.0006905914526430222, "loss": 3.686, "step": 36445 }, { "epoch": 2.4765593151243377, "grad_norm": 1.3804278373718262, "learning_rate": 0.0006905489876341895, "loss": 3.5755, "step": 36450 }, { "epoch": 2.4768990351949993, "grad_norm": 1.6941664218902588, "learning_rate": 0.0006905065226253567, "loss": 3.545, "step": 36455 }, { "epoch": 2.477238755265661, "grad_norm": 2.094698667526245, "learning_rate": 0.000690464057616524, "loss": 3.5237, "step": 36460 }, { "epoch": 2.477578475336323, "grad_norm": 1.917300820350647, "learning_rate": 0.0006904215926076913, "loss": 3.4337, "step": 36465 }, { "epoch": 2.4779181954069847, "grad_norm": 2.555663585662842, "learning_rate": 0.0006903791275988585, "loss": 3.4415, "step": 36470 }, { "epoch": 2.4782579154776463, "grad_norm": 1.8163450956344604, "learning_rate": 0.0006903366625900258, "loss": 3.5399, "step": 36475 }, { "epoch": 2.4785976355483084, "grad_norm": 2.1926114559173584, "learning_rate": 0.0006902941975811932, "loss": 3.4801, "step": 36480 }, { "epoch": 2.47893735561897, "grad_norm": 2.1425747871398926, "learning_rate": 0.0006902517325723604, "loss": 3.373, "step": 36485 }, { "epoch": 2.4792770756896316, "grad_norm": 2.266761541366577, "learning_rate": 0.0006902092675635277, "loss": 3.3847, "step": 36490 }, { "epoch": 2.4796167957602937, "grad_norm": 1.7840604782104492, "learning_rate": 0.000690166802554695, "loss": 3.5747, "step": 36495 }, { "epoch": 2.4799565158309553, "grad_norm": 2.1199142932891846, "learning_rate": 0.0006901243375458622, "loss": 3.3846, "step": 36500 }, { "epoch": 2.480296235901617, "grad_norm": 1.7544164657592773, "learning_rate": 0.0006900818725370294, "loss": 3.5769, "step": 36505 }, { "epoch": 2.4806359559722786, "grad_norm": 1.8207331895828247, "learning_rate": 0.0006900394075281968, "loss": 3.5415, "step": 36510 }, { "epoch": 2.4809756760429407, "grad_norm": 1.8821688890457153, "learning_rate": 0.0006899969425193641, "loss": 3.4056, "step": 36515 }, { "epoch": 2.4813153961136023, "grad_norm": 2.4346141815185547, "learning_rate": 0.0006899544775105313, "loss": 3.3701, "step": 36520 }, { "epoch": 2.481655116184264, "grad_norm": 2.4712374210357666, "learning_rate": 0.0006899120125016987, "loss": 3.6249, "step": 36525 }, { "epoch": 2.481994836254926, "grad_norm": 1.9180796146392822, "learning_rate": 0.0006898695474928659, "loss": 3.5162, "step": 36530 }, { "epoch": 2.4823345563255876, "grad_norm": 1.628359079360962, "learning_rate": 0.0006898270824840331, "loss": 3.6236, "step": 36535 }, { "epoch": 2.4826742763962493, "grad_norm": 1.7380529642105103, "learning_rate": 0.0006897846174752005, "loss": 3.3258, "step": 36540 }, { "epoch": 2.4830139964669113, "grad_norm": 1.984370470046997, "learning_rate": 0.0006897421524663677, "loss": 3.4898, "step": 36545 }, { "epoch": 2.483353716537573, "grad_norm": 2.525221824645996, "learning_rate": 0.000689699687457535, "loss": 3.3993, "step": 36550 }, { "epoch": 2.4836934366082346, "grad_norm": 2.2438125610351562, "learning_rate": 0.0006896572224487023, "loss": 3.3528, "step": 36555 }, { "epoch": 2.4840331566788967, "grad_norm": 2.207009792327881, "learning_rate": 0.0006896147574398696, "loss": 3.4035, "step": 36560 }, { "epoch": 2.4843728767495583, "grad_norm": 2.179316759109497, "learning_rate": 0.0006895722924310368, "loss": 3.3423, "step": 36565 }, { "epoch": 2.48471259682022, "grad_norm": 2.178685188293457, "learning_rate": 0.0006895298274222041, "loss": 3.2889, "step": 36570 }, { "epoch": 2.485052316890882, "grad_norm": 1.9996875524520874, "learning_rate": 0.0006894873624133714, "loss": 3.6629, "step": 36575 }, { "epoch": 2.4853920369615436, "grad_norm": 2.17214298248291, "learning_rate": 0.0006894448974045386, "loss": 3.729, "step": 36580 }, { "epoch": 2.4857317570322053, "grad_norm": 1.6552826166152954, "learning_rate": 0.000689402432395706, "loss": 3.515, "step": 36585 }, { "epoch": 2.4860714771028674, "grad_norm": 2.0754549503326416, "learning_rate": 0.0006893599673868733, "loss": 3.6082, "step": 36590 }, { "epoch": 2.486411197173529, "grad_norm": 2.1316232681274414, "learning_rate": 0.0006893175023780405, "loss": 3.318, "step": 36595 }, { "epoch": 2.4867509172441906, "grad_norm": 2.010876417160034, "learning_rate": 0.0006892750373692078, "loss": 3.6156, "step": 36600 }, { "epoch": 2.4870906373148527, "grad_norm": 1.9043071269989014, "learning_rate": 0.000689232572360375, "loss": 3.2303, "step": 36605 }, { "epoch": 2.4874303573855143, "grad_norm": 2.1811797618865967, "learning_rate": 0.0006891901073515423, "loss": 3.7149, "step": 36610 }, { "epoch": 2.487770077456176, "grad_norm": 1.8412444591522217, "learning_rate": 0.0006891476423427097, "loss": 3.6784, "step": 36615 }, { "epoch": 2.488109797526838, "grad_norm": 2.081989049911499, "learning_rate": 0.0006891051773338769, "loss": 3.4937, "step": 36620 }, { "epoch": 2.4884495175974997, "grad_norm": 1.8803200721740723, "learning_rate": 0.0006890627123250442, "loss": 3.623, "step": 36625 }, { "epoch": 2.4887892376681613, "grad_norm": 2.077389717102051, "learning_rate": 0.0006890202473162115, "loss": 3.3147, "step": 36630 }, { "epoch": 2.4891289577388234, "grad_norm": 1.8125395774841309, "learning_rate": 0.0006889777823073787, "loss": 3.5567, "step": 36635 }, { "epoch": 2.489468677809485, "grad_norm": 1.8830806016921997, "learning_rate": 0.000688935317298546, "loss": 3.445, "step": 36640 }, { "epoch": 2.4898083978801466, "grad_norm": 1.7041794061660767, "learning_rate": 0.0006888928522897133, "loss": 3.5936, "step": 36645 }, { "epoch": 2.4901481179508087, "grad_norm": 1.7809067964553833, "learning_rate": 0.0006888503872808806, "loss": 3.4191, "step": 36650 }, { "epoch": 2.4904878380214703, "grad_norm": 1.8464595079421997, "learning_rate": 0.0006888079222720478, "loss": 3.5425, "step": 36655 }, { "epoch": 2.490827558092132, "grad_norm": 2.027599811553955, "learning_rate": 0.0006887654572632152, "loss": 3.3195, "step": 36660 }, { "epoch": 2.491167278162794, "grad_norm": 3.213339328765869, "learning_rate": 0.0006887229922543824, "loss": 3.4049, "step": 36665 }, { "epoch": 2.4915069982334557, "grad_norm": 1.8583943843841553, "learning_rate": 0.0006886805272455496, "loss": 3.4, "step": 36670 }, { "epoch": 2.4918467183041173, "grad_norm": 1.7089630365371704, "learning_rate": 0.000688638062236717, "loss": 3.2314, "step": 36675 }, { "epoch": 2.4921864383747794, "grad_norm": 2.266293525695801, "learning_rate": 0.0006885955972278842, "loss": 3.3595, "step": 36680 }, { "epoch": 2.492526158445441, "grad_norm": 2.0031776428222656, "learning_rate": 0.0006885531322190515, "loss": 3.3797, "step": 36685 }, { "epoch": 2.4928658785161026, "grad_norm": 2.1441409587860107, "learning_rate": 0.0006885106672102189, "loss": 3.2883, "step": 36690 }, { "epoch": 2.4932055985867647, "grad_norm": 1.7802231311798096, "learning_rate": 0.0006884682022013861, "loss": 3.4844, "step": 36695 }, { "epoch": 2.4935453186574263, "grad_norm": 1.8146661520004272, "learning_rate": 0.0006884257371925533, "loss": 3.4403, "step": 36700 }, { "epoch": 2.493885038728088, "grad_norm": 2.212082624435425, "learning_rate": 0.0006883832721837206, "loss": 3.2945, "step": 36705 }, { "epoch": 2.49422475879875, "grad_norm": 1.873952865600586, "learning_rate": 0.0006883408071748879, "loss": 3.5669, "step": 36710 }, { "epoch": 2.4945644788694117, "grad_norm": 1.8018356561660767, "learning_rate": 0.0006882983421660551, "loss": 3.7093, "step": 36715 }, { "epoch": 2.4949041989400733, "grad_norm": 2.481151580810547, "learning_rate": 0.0006882558771572225, "loss": 3.3894, "step": 36720 }, { "epoch": 2.4952439190107354, "grad_norm": 2.1955063343048096, "learning_rate": 0.0006882134121483898, "loss": 3.3182, "step": 36725 }, { "epoch": 2.495583639081397, "grad_norm": 1.5805648565292358, "learning_rate": 0.000688170947139557, "loss": 3.575, "step": 36730 }, { "epoch": 2.4959233591520587, "grad_norm": 2.27366304397583, "learning_rate": 0.0006881284821307243, "loss": 3.6587, "step": 36735 }, { "epoch": 2.4962630792227207, "grad_norm": 2.499605894088745, "learning_rate": 0.0006880860171218915, "loss": 3.3491, "step": 36740 }, { "epoch": 2.4966027992933824, "grad_norm": 2.2952306270599365, "learning_rate": 0.0006880435521130588, "loss": 3.3929, "step": 36745 }, { "epoch": 2.496942519364044, "grad_norm": 1.6023911237716675, "learning_rate": 0.0006880010871042261, "loss": 3.589, "step": 36750 }, { "epoch": 2.4972822394347056, "grad_norm": 2.0061631202697754, "learning_rate": 0.0006879586220953934, "loss": 3.2776, "step": 36755 }, { "epoch": 2.4976219595053677, "grad_norm": 1.6910629272460938, "learning_rate": 0.0006879161570865607, "loss": 3.3992, "step": 36760 }, { "epoch": 2.4979616795760293, "grad_norm": 1.8157572746276855, "learning_rate": 0.000687873692077728, "loss": 3.3509, "step": 36765 }, { "epoch": 2.498301399646691, "grad_norm": 1.6945140361785889, "learning_rate": 0.0006878312270688952, "loss": 3.5865, "step": 36770 }, { "epoch": 2.498641119717353, "grad_norm": 1.93096923828125, "learning_rate": 0.0006877887620600626, "loss": 3.4542, "step": 36775 }, { "epoch": 2.4989808397880147, "grad_norm": 1.6966819763183594, "learning_rate": 0.0006877462970512298, "loss": 3.2414, "step": 36780 }, { "epoch": 2.4993205598586763, "grad_norm": 1.8588048219680786, "learning_rate": 0.000687703832042397, "loss": 3.5523, "step": 36785 }, { "epoch": 2.4996602799293384, "grad_norm": 1.8561389446258545, "learning_rate": 0.0006876613670335645, "loss": 3.4379, "step": 36790 }, { "epoch": 2.5, "grad_norm": 1.9305760860443115, "learning_rate": 0.0006876189020247317, "loss": 3.4523, "step": 36795 }, { "epoch": 2.5003397200706616, "grad_norm": 2.277203321456909, "learning_rate": 0.0006875764370158989, "loss": 3.649, "step": 36800 }, { "epoch": 2.5006794401413237, "grad_norm": 2.2742154598236084, "learning_rate": 0.0006875339720070662, "loss": 3.6979, "step": 36805 }, { "epoch": 2.5010191602119853, "grad_norm": 1.987792730331421, "learning_rate": 0.0006874915069982335, "loss": 3.625, "step": 36810 }, { "epoch": 2.501358880282647, "grad_norm": 2.2077436447143555, "learning_rate": 0.0006874490419894007, "loss": 3.3679, "step": 36815 }, { "epoch": 2.5016986003533086, "grad_norm": 1.8233284950256348, "learning_rate": 0.000687406576980568, "loss": 3.608, "step": 36820 }, { "epoch": 2.5020383204239707, "grad_norm": 1.6457786560058594, "learning_rate": 0.0006873641119717354, "loss": 3.5077, "step": 36825 }, { "epoch": 2.5023780404946323, "grad_norm": 2.4658849239349365, "learning_rate": 0.0006873216469629026, "loss": 3.5482, "step": 36830 }, { "epoch": 2.502717760565294, "grad_norm": 1.9377998113632202, "learning_rate": 0.0006872791819540699, "loss": 3.4834, "step": 36835 }, { "epoch": 2.503057480635956, "grad_norm": 1.6949175596237183, "learning_rate": 0.0006872367169452372, "loss": 3.5871, "step": 36840 }, { "epoch": 2.5033972007066176, "grad_norm": 2.659003973007202, "learning_rate": 0.0006871942519364044, "loss": 3.6763, "step": 36845 }, { "epoch": 2.5037369207772793, "grad_norm": 1.673632025718689, "learning_rate": 0.0006871517869275717, "loss": 3.4439, "step": 36850 }, { "epoch": 2.5040766408479413, "grad_norm": 2.1140780448913574, "learning_rate": 0.0006871093219187389, "loss": 3.3071, "step": 36855 }, { "epoch": 2.504416360918603, "grad_norm": 1.9539262056350708, "learning_rate": 0.0006870668569099063, "loss": 3.6778, "step": 36860 }, { "epoch": 2.5047560809892646, "grad_norm": 2.0501832962036133, "learning_rate": 0.0006870243919010736, "loss": 3.6517, "step": 36865 }, { "epoch": 2.5050958010599267, "grad_norm": 1.5914572477340698, "learning_rate": 0.0006869819268922408, "loss": 3.279, "step": 36870 }, { "epoch": 2.5054355211305883, "grad_norm": 1.9977457523345947, "learning_rate": 0.0006869394618834081, "loss": 3.4552, "step": 36875 }, { "epoch": 2.50577524120125, "grad_norm": 1.7159236669540405, "learning_rate": 0.0006868969968745754, "loss": 3.486, "step": 36880 }, { "epoch": 2.506114961271912, "grad_norm": 1.8604196310043335, "learning_rate": 0.0006868545318657426, "loss": 3.4399, "step": 36885 }, { "epoch": 2.5064546813425737, "grad_norm": 2.2226765155792236, "learning_rate": 0.0006868120668569098, "loss": 3.471, "step": 36890 }, { "epoch": 2.5067944014132353, "grad_norm": 2.1517653465270996, "learning_rate": 0.0006867696018480773, "loss": 3.3561, "step": 36895 }, { "epoch": 2.5071341214838974, "grad_norm": 2.132896900177002, "learning_rate": 0.0006867271368392445, "loss": 3.3594, "step": 36900 }, { "epoch": 2.507473841554559, "grad_norm": 1.7091223001480103, "learning_rate": 0.0006866846718304117, "loss": 3.5547, "step": 36905 }, { "epoch": 2.5078135616252206, "grad_norm": 1.4671788215637207, "learning_rate": 0.0006866422068215791, "loss": 3.3904, "step": 36910 }, { "epoch": 2.5081532816958827, "grad_norm": 2.5518839359283447, "learning_rate": 0.0006865997418127463, "loss": 3.5582, "step": 36915 }, { "epoch": 2.5084930017665443, "grad_norm": 1.7440807819366455, "learning_rate": 0.0006865572768039135, "loss": 3.3974, "step": 36920 }, { "epoch": 2.508832721837206, "grad_norm": 2.120089530944824, "learning_rate": 0.0006865148117950809, "loss": 3.6215, "step": 36925 }, { "epoch": 2.509172441907868, "grad_norm": 1.7557191848754883, "learning_rate": 0.0006864723467862482, "loss": 3.3511, "step": 36930 }, { "epoch": 2.5095121619785297, "grad_norm": 2.5565946102142334, "learning_rate": 0.0006864298817774154, "loss": 3.715, "step": 36935 }, { "epoch": 2.5098518820491913, "grad_norm": 1.4187575578689575, "learning_rate": 0.0006863874167685828, "loss": 3.5526, "step": 36940 }, { "epoch": 2.5101916021198534, "grad_norm": 2.0181164741516113, "learning_rate": 0.00068634495175975, "loss": 3.4478, "step": 36945 }, { "epoch": 2.510531322190515, "grad_norm": 2.8209967613220215, "learning_rate": 0.0006863024867509172, "loss": 3.4159, "step": 36950 }, { "epoch": 2.5108710422611766, "grad_norm": 2.1114113330841064, "learning_rate": 0.0006862600217420845, "loss": 3.5054, "step": 36955 }, { "epoch": 2.5112107623318387, "grad_norm": 1.5199919939041138, "learning_rate": 0.0006862175567332518, "loss": 3.451, "step": 36960 }, { "epoch": 2.5115504824025003, "grad_norm": 2.2193214893341064, "learning_rate": 0.0006861750917244191, "loss": 3.4526, "step": 36965 }, { "epoch": 2.511890202473162, "grad_norm": 1.8202308416366577, "learning_rate": 0.0006861326267155864, "loss": 3.5245, "step": 36970 }, { "epoch": 2.512229922543824, "grad_norm": 1.6943868398666382, "learning_rate": 0.0006860901617067537, "loss": 3.6967, "step": 36975 }, { "epoch": 2.5125696426144857, "grad_norm": 1.625431776046753, "learning_rate": 0.0006860476966979209, "loss": 3.3741, "step": 36980 }, { "epoch": 2.5129093626851473, "grad_norm": 2.913696050643921, "learning_rate": 0.0006860052316890882, "loss": 3.5495, "step": 36985 }, { "epoch": 2.5132490827558094, "grad_norm": 2.2010955810546875, "learning_rate": 0.0006859627666802554, "loss": 3.7472, "step": 36990 }, { "epoch": 2.513588802826471, "grad_norm": 1.8541558980941772, "learning_rate": 0.0006859203016714227, "loss": 3.7601, "step": 36995 }, { "epoch": 2.5139285228971326, "grad_norm": 1.7655694484710693, "learning_rate": 0.0006858778366625901, "loss": 3.5266, "step": 37000 }, { "epoch": 2.5142682429677947, "grad_norm": 1.8105207681655884, "learning_rate": 0.0006858353716537573, "loss": 3.9002, "step": 37005 }, { "epoch": 2.5146079630384564, "grad_norm": 1.5116887092590332, "learning_rate": 0.0006857929066449246, "loss": 3.2884, "step": 37010 }, { "epoch": 2.514947683109118, "grad_norm": 1.7621722221374512, "learning_rate": 0.0006857504416360919, "loss": 3.4765, "step": 37015 }, { "epoch": 2.51528740317978, "grad_norm": 1.947748064994812, "learning_rate": 0.0006857079766272591, "loss": 3.3647, "step": 37020 }, { "epoch": 2.5156271232504417, "grad_norm": 2.1053731441497803, "learning_rate": 0.0006856655116184264, "loss": 3.4333, "step": 37025 }, { "epoch": 2.5159668433211033, "grad_norm": 1.5310871601104736, "learning_rate": 0.0006856230466095937, "loss": 3.3935, "step": 37030 }, { "epoch": 2.5163065633917654, "grad_norm": 3.63301944732666, "learning_rate": 0.000685580581600761, "loss": 3.3167, "step": 37035 }, { "epoch": 2.516646283462427, "grad_norm": 1.6747797727584839, "learning_rate": 0.0006855381165919282, "loss": 3.5088, "step": 37040 }, { "epoch": 2.5169860035330887, "grad_norm": 2.134164571762085, "learning_rate": 0.0006854956515830956, "loss": 3.3871, "step": 37045 }, { "epoch": 2.5173257236037507, "grad_norm": 2.259823799133301, "learning_rate": 0.0006854531865742628, "loss": 3.6307, "step": 37050 }, { "epoch": 2.5176654436744124, "grad_norm": 2.380518674850464, "learning_rate": 0.00068541072156543, "loss": 3.6011, "step": 37055 }, { "epoch": 2.518005163745074, "grad_norm": 2.3044345378875732, "learning_rate": 0.0006853682565565974, "loss": 3.3748, "step": 37060 }, { "epoch": 2.518344883815736, "grad_norm": 1.6935824155807495, "learning_rate": 0.0006853257915477646, "loss": 3.5786, "step": 37065 }, { "epoch": 2.5186846038863977, "grad_norm": 1.713477373123169, "learning_rate": 0.0006852833265389319, "loss": 3.5205, "step": 37070 }, { "epoch": 2.5190243239570593, "grad_norm": 1.707205891609192, "learning_rate": 0.0006852408615300993, "loss": 3.1695, "step": 37075 }, { "epoch": 2.5193640440277214, "grad_norm": 2.4105992317199707, "learning_rate": 0.0006851983965212665, "loss": 3.6914, "step": 37080 }, { "epoch": 2.519703764098383, "grad_norm": 1.7624958753585815, "learning_rate": 0.0006851559315124337, "loss": 3.403, "step": 37085 }, { "epoch": 2.5200434841690447, "grad_norm": 1.6881808042526245, "learning_rate": 0.000685113466503601, "loss": 3.5354, "step": 37090 }, { "epoch": 2.5203832042397067, "grad_norm": 2.371490955352783, "learning_rate": 0.0006850710014947683, "loss": 3.7978, "step": 37095 }, { "epoch": 2.5207229243103684, "grad_norm": 1.7880733013153076, "learning_rate": 0.0006850285364859355, "loss": 3.3852, "step": 37100 }, { "epoch": 2.52106264438103, "grad_norm": 1.853695034980774, "learning_rate": 0.0006849860714771029, "loss": 3.2165, "step": 37105 }, { "epoch": 2.521402364451692, "grad_norm": 1.9336328506469727, "learning_rate": 0.0006849436064682702, "loss": 3.4005, "step": 37110 }, { "epoch": 2.5217420845223537, "grad_norm": 2.4208152294158936, "learning_rate": 0.0006849011414594374, "loss": 3.5132, "step": 37115 }, { "epoch": 2.5220818045930153, "grad_norm": 1.8109601736068726, "learning_rate": 0.0006848586764506047, "loss": 3.5496, "step": 37120 }, { "epoch": 2.522421524663677, "grad_norm": 1.8882675170898438, "learning_rate": 0.000684816211441772, "loss": 3.5268, "step": 37125 }, { "epoch": 2.522761244734339, "grad_norm": 1.8615118265151978, "learning_rate": 0.0006847737464329393, "loss": 3.499, "step": 37130 }, { "epoch": 2.5231009648050007, "grad_norm": 1.910473346710205, "learning_rate": 0.0006847312814241065, "loss": 3.6703, "step": 37135 }, { "epoch": 2.5234406848756623, "grad_norm": 1.2525444030761719, "learning_rate": 0.0006846888164152738, "loss": 3.6689, "step": 37140 }, { "epoch": 2.5237804049463244, "grad_norm": 1.9611862897872925, "learning_rate": 0.0006846463514064412, "loss": 3.3612, "step": 37145 }, { "epoch": 2.524120125016986, "grad_norm": 2.0502071380615234, "learning_rate": 0.0006846038863976084, "loss": 3.6486, "step": 37150 }, { "epoch": 2.5244598450876476, "grad_norm": 1.970602035522461, "learning_rate": 0.0006845614213887756, "loss": 3.5264, "step": 37155 }, { "epoch": 2.5247995651583093, "grad_norm": 2.156654119491577, "learning_rate": 0.000684518956379943, "loss": 3.3998, "step": 37160 }, { "epoch": 2.5251392852289714, "grad_norm": 2.02665114402771, "learning_rate": 0.0006844764913711102, "loss": 3.6685, "step": 37165 }, { "epoch": 2.525479005299633, "grad_norm": 2.1378469467163086, "learning_rate": 0.0006844340263622774, "loss": 3.5905, "step": 37170 }, { "epoch": 2.5258187253702946, "grad_norm": 2.5392203330993652, "learning_rate": 0.0006843915613534449, "loss": 3.3682, "step": 37175 }, { "epoch": 2.5261584454409567, "grad_norm": 2.502119779586792, "learning_rate": 0.0006843490963446121, "loss": 3.4361, "step": 37180 }, { "epoch": 2.5264981655116183, "grad_norm": 1.5129518508911133, "learning_rate": 0.0006843066313357793, "loss": 3.433, "step": 37185 }, { "epoch": 2.52683788558228, "grad_norm": 1.5039374828338623, "learning_rate": 0.0006842641663269466, "loss": 3.3588, "step": 37190 }, { "epoch": 2.527177605652942, "grad_norm": 1.9899705648422241, "learning_rate": 0.0006842217013181139, "loss": 3.6328, "step": 37195 }, { "epoch": 2.5275173257236037, "grad_norm": 1.8289436101913452, "learning_rate": 0.0006841792363092811, "loss": 3.3301, "step": 37200 }, { "epoch": 2.5278570457942653, "grad_norm": 1.6289070844650269, "learning_rate": 0.0006841367713004485, "loss": 3.5721, "step": 37205 }, { "epoch": 2.5281967658649274, "grad_norm": 2.152872323989868, "learning_rate": 0.0006840943062916158, "loss": 3.4549, "step": 37210 }, { "epoch": 2.528536485935589, "grad_norm": 2.223252296447754, "learning_rate": 0.000684051841282783, "loss": 3.3363, "step": 37215 }, { "epoch": 2.5288762060062506, "grad_norm": 1.6525956392288208, "learning_rate": 0.0006840093762739503, "loss": 3.7136, "step": 37220 }, { "epoch": 2.5292159260769127, "grad_norm": 1.9764117002487183, "learning_rate": 0.0006839669112651176, "loss": 3.3747, "step": 37225 }, { "epoch": 2.5295556461475743, "grad_norm": 1.769132375717163, "learning_rate": 0.0006839244462562848, "loss": 3.3619, "step": 37230 }, { "epoch": 2.529895366218236, "grad_norm": 2.503016948699951, "learning_rate": 0.0006838819812474521, "loss": 3.4103, "step": 37235 }, { "epoch": 2.530235086288898, "grad_norm": 2.152141809463501, "learning_rate": 0.0006838395162386194, "loss": 3.634, "step": 37240 }, { "epoch": 2.5305748063595597, "grad_norm": 2.786628007888794, "learning_rate": 0.0006837970512297867, "loss": 3.4365, "step": 37245 }, { "epoch": 2.5309145264302213, "grad_norm": 1.6404098272323608, "learning_rate": 0.000683754586220954, "loss": 3.3795, "step": 37250 }, { "epoch": 2.5312542465008834, "grad_norm": 1.9420331716537476, "learning_rate": 0.0006837121212121212, "loss": 3.5967, "step": 37255 }, { "epoch": 2.531593966571545, "grad_norm": 1.991816520690918, "learning_rate": 0.0006836696562032885, "loss": 3.5312, "step": 37260 }, { "epoch": 2.5319336866422066, "grad_norm": 2.276684284210205, "learning_rate": 0.0006836271911944558, "loss": 3.3795, "step": 37265 }, { "epoch": 2.5322734067128687, "grad_norm": 1.8433748483657837, "learning_rate": 0.000683584726185623, "loss": 3.4597, "step": 37270 }, { "epoch": 2.5326131267835303, "grad_norm": 1.8044530153274536, "learning_rate": 0.0006835422611767904, "loss": 3.4393, "step": 37275 }, { "epoch": 2.532952846854192, "grad_norm": 2.351996421813965, "learning_rate": 0.0006834997961679577, "loss": 3.4425, "step": 37280 }, { "epoch": 2.533292566924854, "grad_norm": 2.7107644081115723, "learning_rate": 0.0006834573311591249, "loss": 3.4806, "step": 37285 }, { "epoch": 2.5336322869955157, "grad_norm": 1.7549446821212769, "learning_rate": 0.0006834148661502921, "loss": 3.4068, "step": 37290 }, { "epoch": 2.5339720070661773, "grad_norm": 2.0077648162841797, "learning_rate": 0.0006833724011414595, "loss": 3.4065, "step": 37295 }, { "epoch": 2.5343117271368394, "grad_norm": 2.2656960487365723, "learning_rate": 0.0006833299361326267, "loss": 3.4263, "step": 37300 }, { "epoch": 2.534651447207501, "grad_norm": 2.0051162242889404, "learning_rate": 0.0006832874711237939, "loss": 3.6244, "step": 37305 }, { "epoch": 2.5349911672781626, "grad_norm": 1.6173673868179321, "learning_rate": 0.0006832450061149614, "loss": 3.2817, "step": 37310 }, { "epoch": 2.5353308873488247, "grad_norm": 2.4309463500976562, "learning_rate": 0.0006832025411061286, "loss": 3.2337, "step": 37315 }, { "epoch": 2.5356706074194864, "grad_norm": 1.6836190223693848, "learning_rate": 0.0006831600760972958, "loss": 3.6107, "step": 37320 }, { "epoch": 2.536010327490148, "grad_norm": 2.1151504516601562, "learning_rate": 0.0006831176110884632, "loss": 3.4629, "step": 37325 }, { "epoch": 2.53635004756081, "grad_norm": 1.9055194854736328, "learning_rate": 0.0006830751460796304, "loss": 3.5561, "step": 37330 }, { "epoch": 2.5366897676314717, "grad_norm": 1.957523226737976, "learning_rate": 0.0006830326810707976, "loss": 3.5261, "step": 37335 }, { "epoch": 2.5370294877021333, "grad_norm": 1.4782710075378418, "learning_rate": 0.0006829902160619649, "loss": 3.5398, "step": 37340 }, { "epoch": 2.5373692077727954, "grad_norm": 2.14192271232605, "learning_rate": 0.0006829477510531323, "loss": 3.8221, "step": 37345 }, { "epoch": 2.537708927843457, "grad_norm": 2.281951904296875, "learning_rate": 0.0006829052860442995, "loss": 3.3055, "step": 37350 }, { "epoch": 2.5380486479141187, "grad_norm": 1.6207696199417114, "learning_rate": 0.0006828628210354668, "loss": 3.588, "step": 37355 }, { "epoch": 2.5383883679847807, "grad_norm": 2.0426175594329834, "learning_rate": 0.0006828203560266341, "loss": 3.4821, "step": 37360 }, { "epoch": 2.5387280880554424, "grad_norm": 1.933263897895813, "learning_rate": 0.0006827778910178013, "loss": 3.2376, "step": 37365 }, { "epoch": 2.539067808126104, "grad_norm": 2.5944344997406006, "learning_rate": 0.0006827354260089686, "loss": 3.5452, "step": 37370 }, { "epoch": 2.539407528196766, "grad_norm": 1.8264379501342773, "learning_rate": 0.0006826929610001358, "loss": 3.5522, "step": 37375 }, { "epoch": 2.5397472482674277, "grad_norm": 1.5343434810638428, "learning_rate": 0.0006826504959913032, "loss": 3.4547, "step": 37380 }, { "epoch": 2.5400869683380893, "grad_norm": 1.8048299551010132, "learning_rate": 0.0006826080309824705, "loss": 3.5071, "step": 37385 }, { "epoch": 2.5404266884087514, "grad_norm": 2.383769989013672, "learning_rate": 0.0006825655659736377, "loss": 3.5697, "step": 37390 }, { "epoch": 2.540766408479413, "grad_norm": 1.5904558897018433, "learning_rate": 0.000682523100964805, "loss": 3.5632, "step": 37395 }, { "epoch": 2.5411061285500747, "grad_norm": 2.142784595489502, "learning_rate": 0.0006824806359559723, "loss": 3.8284, "step": 37400 }, { "epoch": 2.5414458486207367, "grad_norm": 1.92389714717865, "learning_rate": 0.0006824381709471395, "loss": 3.3432, "step": 37405 }, { "epoch": 2.5417855686913984, "grad_norm": 1.8195526599884033, "learning_rate": 0.0006823957059383068, "loss": 3.5637, "step": 37410 }, { "epoch": 2.54212528876206, "grad_norm": 1.9208611249923706, "learning_rate": 0.0006823532409294742, "loss": 3.7576, "step": 37415 }, { "epoch": 2.542465008832722, "grad_norm": 1.9897503852844238, "learning_rate": 0.0006823107759206414, "loss": 3.7807, "step": 37420 }, { "epoch": 2.5428047289033837, "grad_norm": 2.0662546157836914, "learning_rate": 0.0006822683109118086, "loss": 3.6282, "step": 37425 }, { "epoch": 2.5431444489740453, "grad_norm": 1.779291033744812, "learning_rate": 0.000682225845902976, "loss": 3.6144, "step": 37430 }, { "epoch": 2.5434841690447074, "grad_norm": 2.3041276931762695, "learning_rate": 0.0006821833808941432, "loss": 3.496, "step": 37435 }, { "epoch": 2.543823889115369, "grad_norm": 2.05456805229187, "learning_rate": 0.0006821409158853104, "loss": 3.4499, "step": 37440 }, { "epoch": 2.5441636091860307, "grad_norm": 2.1711621284484863, "learning_rate": 0.0006820984508764778, "loss": 3.35, "step": 37445 }, { "epoch": 2.5445033292566928, "grad_norm": 1.9971789121627808, "learning_rate": 0.0006820559858676451, "loss": 3.3427, "step": 37450 }, { "epoch": 2.5448430493273544, "grad_norm": 3.0005979537963867, "learning_rate": 0.0006820135208588123, "loss": 3.4891, "step": 37455 }, { "epoch": 2.545182769398016, "grad_norm": 1.8664921522140503, "learning_rate": 0.0006819710558499797, "loss": 3.5735, "step": 37460 }, { "epoch": 2.5455224894686777, "grad_norm": 2.1953413486480713, "learning_rate": 0.0006819285908411469, "loss": 3.5733, "step": 37465 }, { "epoch": 2.5458622095393397, "grad_norm": 1.7724395990371704, "learning_rate": 0.0006818861258323142, "loss": 3.5489, "step": 37470 }, { "epoch": 2.5462019296100014, "grad_norm": 1.9779261350631714, "learning_rate": 0.0006818436608234814, "loss": 3.3835, "step": 37475 }, { "epoch": 2.546541649680663, "grad_norm": 1.8312777280807495, "learning_rate": 0.0006818011958146487, "loss": 3.2277, "step": 37480 }, { "epoch": 2.546881369751325, "grad_norm": 1.6964340209960938, "learning_rate": 0.0006817587308058161, "loss": 3.7094, "step": 37485 }, { "epoch": 2.5472210898219867, "grad_norm": 2.032829761505127, "learning_rate": 0.0006817162657969833, "loss": 3.7226, "step": 37490 }, { "epoch": 2.5475608098926483, "grad_norm": 2.136857509613037, "learning_rate": 0.0006816738007881506, "loss": 3.4624, "step": 37495 }, { "epoch": 2.54790052996331, "grad_norm": 2.0949044227600098, "learning_rate": 0.0006816313357793179, "loss": 3.533, "step": 37500 }, { "epoch": 2.548240250033972, "grad_norm": 1.6432446241378784, "learning_rate": 0.0006815888707704851, "loss": 3.5948, "step": 37505 }, { "epoch": 2.5485799701046337, "grad_norm": 2.4343390464782715, "learning_rate": 0.0006815464057616524, "loss": 3.5682, "step": 37510 }, { "epoch": 2.5489196901752953, "grad_norm": 1.8384275436401367, "learning_rate": 0.0006815039407528197, "loss": 3.4756, "step": 37515 }, { "epoch": 2.5492594102459574, "grad_norm": 1.6864622831344604, "learning_rate": 0.000681461475743987, "loss": 3.4538, "step": 37520 }, { "epoch": 2.549599130316619, "grad_norm": 2.214017391204834, "learning_rate": 0.0006814190107351543, "loss": 3.6361, "step": 37525 }, { "epoch": 2.5499388503872806, "grad_norm": 1.7373015880584717, "learning_rate": 0.0006813765457263216, "loss": 3.3725, "step": 37530 }, { "epoch": 2.5502785704579427, "grad_norm": 1.5070524215698242, "learning_rate": 0.0006813340807174888, "loss": 3.61, "step": 37535 }, { "epoch": 2.5506182905286043, "grad_norm": 1.8833904266357422, "learning_rate": 0.000681291615708656, "loss": 3.4025, "step": 37540 }, { "epoch": 2.550958010599266, "grad_norm": 1.9991204738616943, "learning_rate": 0.0006812491506998234, "loss": 3.5572, "step": 37545 }, { "epoch": 2.551297730669928, "grad_norm": 2.1429831981658936, "learning_rate": 0.0006812066856909906, "loss": 3.3874, "step": 37550 }, { "epoch": 2.5516374507405897, "grad_norm": 1.7563717365264893, "learning_rate": 0.0006811642206821579, "loss": 3.2078, "step": 37555 }, { "epoch": 2.5519771708112513, "grad_norm": 1.7078982591629028, "learning_rate": 0.0006811217556733253, "loss": 3.1901, "step": 37560 }, { "epoch": 2.5523168908819134, "grad_norm": 1.8427501916885376, "learning_rate": 0.0006810792906644925, "loss": 3.3869, "step": 37565 }, { "epoch": 2.552656610952575, "grad_norm": 2.4892947673797607, "learning_rate": 0.0006810368256556597, "loss": 3.3131, "step": 37570 }, { "epoch": 2.5529963310232366, "grad_norm": 1.9223064184188843, "learning_rate": 0.000680994360646827, "loss": 3.4614, "step": 37575 }, { "epoch": 2.5533360510938987, "grad_norm": 1.8923563957214355, "learning_rate": 0.0006809518956379943, "loss": 3.6799, "step": 37580 }, { "epoch": 2.5536757711645603, "grad_norm": 1.5145633220672607, "learning_rate": 0.0006809094306291615, "loss": 3.1628, "step": 37585 }, { "epoch": 2.554015491235222, "grad_norm": 2.2230937480926514, "learning_rate": 0.0006808669656203289, "loss": 3.5626, "step": 37590 }, { "epoch": 2.554355211305884, "grad_norm": 1.9866116046905518, "learning_rate": 0.0006808245006114962, "loss": 3.6125, "step": 37595 }, { "epoch": 2.5546949313765457, "grad_norm": 1.8352628946304321, "learning_rate": 0.0006807820356026634, "loss": 3.1868, "step": 37600 }, { "epoch": 2.5550346514472073, "grad_norm": 1.6084375381469727, "learning_rate": 0.0006807395705938307, "loss": 3.504, "step": 37605 }, { "epoch": 2.5553743715178694, "grad_norm": 2.2952675819396973, "learning_rate": 0.000680697105584998, "loss": 3.5825, "step": 37610 }, { "epoch": 2.555714091588531, "grad_norm": 2.598555088043213, "learning_rate": 0.0006806546405761652, "loss": 3.201, "step": 37615 }, { "epoch": 2.5560538116591927, "grad_norm": 1.613495945930481, "learning_rate": 0.0006806121755673325, "loss": 3.5773, "step": 37620 }, { "epoch": 2.5563935317298547, "grad_norm": 2.829346179962158, "learning_rate": 0.0006805697105584999, "loss": 3.5544, "step": 37625 }, { "epoch": 2.5567332518005164, "grad_norm": 2.1525046825408936, "learning_rate": 0.0006805272455496671, "loss": 3.4697, "step": 37630 }, { "epoch": 2.557072971871178, "grad_norm": 2.0354950428009033, "learning_rate": 0.0006804847805408344, "loss": 3.6654, "step": 37635 }, { "epoch": 2.55741269194184, "grad_norm": 2.3100502490997314, "learning_rate": 0.0006804423155320016, "loss": 3.4441, "step": 37640 }, { "epoch": 2.5577524120125017, "grad_norm": 1.7916629314422607, "learning_rate": 0.0006803998505231689, "loss": 3.6193, "step": 37645 }, { "epoch": 2.5580921320831633, "grad_norm": 1.7745500802993774, "learning_rate": 0.0006803573855143362, "loss": 3.4362, "step": 37650 }, { "epoch": 2.5584318521538254, "grad_norm": 1.7780588865280151, "learning_rate": 0.0006803149205055034, "loss": 3.4846, "step": 37655 }, { "epoch": 2.558771572224487, "grad_norm": 1.9643398523330688, "learning_rate": 0.0006802724554966708, "loss": 3.4856, "step": 37660 }, { "epoch": 2.5591112922951487, "grad_norm": 1.632834553718567, "learning_rate": 0.0006802299904878381, "loss": 3.2023, "step": 37665 }, { "epoch": 2.5594510123658107, "grad_norm": 1.5713448524475098, "learning_rate": 0.0006801875254790053, "loss": 3.4937, "step": 37670 }, { "epoch": 2.5597907324364724, "grad_norm": 1.9457731246948242, "learning_rate": 0.0006801450604701725, "loss": 3.6542, "step": 37675 }, { "epoch": 2.560130452507134, "grad_norm": 2.296828269958496, "learning_rate": 0.0006801025954613399, "loss": 3.5463, "step": 37680 }, { "epoch": 2.560470172577796, "grad_norm": 1.9881370067596436, "learning_rate": 0.0006800601304525071, "loss": 3.5151, "step": 37685 }, { "epoch": 2.5608098926484577, "grad_norm": 2.44297456741333, "learning_rate": 0.0006800176654436743, "loss": 3.7491, "step": 37690 }, { "epoch": 2.5611496127191193, "grad_norm": 1.6500157117843628, "learning_rate": 0.0006799752004348418, "loss": 3.5208, "step": 37695 }, { "epoch": 2.5614893327897814, "grad_norm": 1.9250657558441162, "learning_rate": 0.000679932735426009, "loss": 3.421, "step": 37700 }, { "epoch": 2.561829052860443, "grad_norm": 2.1756181716918945, "learning_rate": 0.0006798902704171762, "loss": 3.4618, "step": 37705 }, { "epoch": 2.5621687729311047, "grad_norm": 1.9979335069656372, "learning_rate": 0.0006798478054083436, "loss": 3.393, "step": 37710 }, { "epoch": 2.5625084930017668, "grad_norm": 1.671593189239502, "learning_rate": 0.0006798053403995108, "loss": 3.4893, "step": 37715 }, { "epoch": 2.5628482130724284, "grad_norm": 2.1104114055633545, "learning_rate": 0.000679762875390678, "loss": 3.4624, "step": 37720 }, { "epoch": 2.56318793314309, "grad_norm": 2.0773723125457764, "learning_rate": 0.0006797204103818455, "loss": 3.245, "step": 37725 }, { "epoch": 2.563527653213752, "grad_norm": 2.037781238555908, "learning_rate": 0.0006796779453730127, "loss": 3.6683, "step": 37730 }, { "epoch": 2.5638673732844137, "grad_norm": 2.1134016513824463, "learning_rate": 0.0006796354803641799, "loss": 3.5375, "step": 37735 }, { "epoch": 2.5642070933550754, "grad_norm": 1.540381908416748, "learning_rate": 0.0006795930153553472, "loss": 3.5605, "step": 37740 }, { "epoch": 2.5645468134257374, "grad_norm": 1.7681045532226562, "learning_rate": 0.0006795505503465145, "loss": 3.6498, "step": 37745 }, { "epoch": 2.564886533496399, "grad_norm": 1.8093386888504028, "learning_rate": 0.0006795080853376817, "loss": 3.3398, "step": 37750 }, { "epoch": 2.5652262535670607, "grad_norm": 1.717056155204773, "learning_rate": 0.000679465620328849, "loss": 3.5246, "step": 37755 }, { "epoch": 2.5655659736377228, "grad_norm": 1.9544901847839355, "learning_rate": 0.0006794231553200164, "loss": 3.5837, "step": 37760 }, { "epoch": 2.5659056937083844, "grad_norm": 1.8670880794525146, "learning_rate": 0.0006793806903111836, "loss": 3.5524, "step": 37765 }, { "epoch": 2.566245413779046, "grad_norm": 2.0649781227111816, "learning_rate": 0.0006793382253023509, "loss": 3.3625, "step": 37770 }, { "epoch": 2.566585133849708, "grad_norm": 2.2533812522888184, "learning_rate": 0.0006792957602935181, "loss": 3.8453, "step": 37775 }, { "epoch": 2.5669248539203697, "grad_norm": 1.6533910036087036, "learning_rate": 0.0006792532952846854, "loss": 3.5747, "step": 37780 }, { "epoch": 2.5672645739910314, "grad_norm": 1.5173028707504272, "learning_rate": 0.0006792108302758527, "loss": 3.4164, "step": 37785 }, { "epoch": 2.5676042940616934, "grad_norm": 1.775769591331482, "learning_rate": 0.0006791683652670199, "loss": 3.8357, "step": 37790 }, { "epoch": 2.567944014132355, "grad_norm": 1.7166417837142944, "learning_rate": 0.0006791259002581873, "loss": 3.4908, "step": 37795 }, { "epoch": 2.5682837342030167, "grad_norm": 2.71997332572937, "learning_rate": 0.0006790834352493546, "loss": 3.7131, "step": 37800 }, { "epoch": 2.5686234542736783, "grad_norm": 1.8328567743301392, "learning_rate": 0.0006790409702405218, "loss": 3.459, "step": 37805 }, { "epoch": 2.5689631743443404, "grad_norm": 1.5724366903305054, "learning_rate": 0.0006789985052316892, "loss": 3.5478, "step": 37810 }, { "epoch": 2.569302894415002, "grad_norm": 1.6694949865341187, "learning_rate": 0.0006789560402228564, "loss": 3.5069, "step": 37815 }, { "epoch": 2.5696426144856637, "grad_norm": 1.958440899848938, "learning_rate": 0.0006789135752140236, "loss": 3.5369, "step": 37820 }, { "epoch": 2.5699823345563257, "grad_norm": 2.0782577991485596, "learning_rate": 0.000678871110205191, "loss": 3.5213, "step": 37825 }, { "epoch": 2.5703220546269874, "grad_norm": 1.8136874437332153, "learning_rate": 0.0006788286451963583, "loss": 3.1841, "step": 37830 }, { "epoch": 2.570661774697649, "grad_norm": 2.00423002243042, "learning_rate": 0.0006787861801875255, "loss": 3.3005, "step": 37835 }, { "epoch": 2.5710014947683106, "grad_norm": 1.9283891916275024, "learning_rate": 0.0006787437151786928, "loss": 3.4934, "step": 37840 }, { "epoch": 2.5713412148389727, "grad_norm": 2.134859323501587, "learning_rate": 0.0006787012501698601, "loss": 3.4509, "step": 37845 }, { "epoch": 2.5716809349096343, "grad_norm": 2.1233608722686768, "learning_rate": 0.0006786587851610273, "loss": 3.7083, "step": 37850 }, { "epoch": 2.572020654980296, "grad_norm": 1.8289611339569092, "learning_rate": 0.0006786163201521946, "loss": 3.6141, "step": 37855 }, { "epoch": 2.572360375050958, "grad_norm": 1.7070783376693726, "learning_rate": 0.0006785738551433619, "loss": 3.4704, "step": 37860 }, { "epoch": 2.5727000951216197, "grad_norm": 1.8469555377960205, "learning_rate": 0.0006785313901345292, "loss": 3.6859, "step": 37865 }, { "epoch": 2.5730398151922813, "grad_norm": 1.9322071075439453, "learning_rate": 0.0006784889251256965, "loss": 3.5044, "step": 37870 }, { "epoch": 2.5733795352629434, "grad_norm": 2.043165683746338, "learning_rate": 0.0006784464601168637, "loss": 3.6442, "step": 37875 }, { "epoch": 2.573719255333605, "grad_norm": 2.494982957839966, "learning_rate": 0.000678403995108031, "loss": 3.4195, "step": 37880 }, { "epoch": 2.5740589754042666, "grad_norm": 2.1255383491516113, "learning_rate": 0.0006783615300991983, "loss": 3.4353, "step": 37885 }, { "epoch": 2.5743986954749287, "grad_norm": 1.5402867794036865, "learning_rate": 0.0006783190650903655, "loss": 3.4708, "step": 37890 }, { "epoch": 2.5747384155455904, "grad_norm": 2.4538462162017822, "learning_rate": 0.0006782766000815328, "loss": 3.5605, "step": 37895 }, { "epoch": 2.575078135616252, "grad_norm": 2.169098377227783, "learning_rate": 0.0006782341350727002, "loss": 3.7488, "step": 37900 }, { "epoch": 2.575417855686914, "grad_norm": 1.8422327041625977, "learning_rate": 0.0006781916700638674, "loss": 3.7122, "step": 37905 }, { "epoch": 2.5757575757575757, "grad_norm": 2.3882813453674316, "learning_rate": 0.0006781492050550347, "loss": 3.5499, "step": 37910 }, { "epoch": 2.5760972958282373, "grad_norm": 2.1819674968719482, "learning_rate": 0.000678106740046202, "loss": 3.5184, "step": 37915 }, { "epoch": 2.5764370158988994, "grad_norm": 1.6325130462646484, "learning_rate": 0.0006780642750373692, "loss": 3.4168, "step": 37920 }, { "epoch": 2.576776735969561, "grad_norm": 2.19425630569458, "learning_rate": 0.0006780218100285364, "loss": 3.5727, "step": 37925 }, { "epoch": 2.5771164560402227, "grad_norm": 2.1335041522979736, "learning_rate": 0.0006779793450197038, "loss": 3.4192, "step": 37930 }, { "epoch": 2.5774561761108847, "grad_norm": 2.229919672012329, "learning_rate": 0.0006779368800108711, "loss": 3.2579, "step": 37935 }, { "epoch": 2.5777958961815464, "grad_norm": 1.8402048349380493, "learning_rate": 0.0006778944150020383, "loss": 3.3648, "step": 37940 }, { "epoch": 2.578135616252208, "grad_norm": 1.8864998817443848, "learning_rate": 0.0006778519499932057, "loss": 3.3531, "step": 37945 }, { "epoch": 2.57847533632287, "grad_norm": 2.0876965522766113, "learning_rate": 0.0006778094849843729, "loss": 3.4291, "step": 37950 }, { "epoch": 2.5788150563935317, "grad_norm": 2.0758135318756104, "learning_rate": 0.0006777670199755401, "loss": 3.3752, "step": 37955 }, { "epoch": 2.5791547764641933, "grad_norm": 2.079012155532837, "learning_rate": 0.0006777245549667075, "loss": 3.2532, "step": 37960 }, { "epoch": 2.5794944965348554, "grad_norm": 2.0416295528411865, "learning_rate": 0.0006776820899578747, "loss": 3.6925, "step": 37965 }, { "epoch": 2.579834216605517, "grad_norm": 2.127652406692505, "learning_rate": 0.000677639624949042, "loss": 3.3932, "step": 37970 }, { "epoch": 2.5801739366761787, "grad_norm": 1.9455097913742065, "learning_rate": 0.0006775971599402093, "loss": 3.481, "step": 37975 }, { "epoch": 2.5805136567468407, "grad_norm": 1.6759321689605713, "learning_rate": 0.0006775546949313766, "loss": 3.3968, "step": 37980 }, { "epoch": 2.5808533768175024, "grad_norm": 2.1957991123199463, "learning_rate": 0.0006775122299225438, "loss": 3.373, "step": 37985 }, { "epoch": 2.581193096888164, "grad_norm": 2.119511604309082, "learning_rate": 0.0006774697649137111, "loss": 3.6079, "step": 37990 }, { "epoch": 2.581532816958826, "grad_norm": 1.7653765678405762, "learning_rate": 0.0006774272999048784, "loss": 3.671, "step": 37995 }, { "epoch": 2.5818725370294877, "grad_norm": 2.2496209144592285, "learning_rate": 0.0006773848348960456, "loss": 3.3353, "step": 38000 }, { "epoch": 2.5822122571001493, "grad_norm": 2.3479714393615723, "learning_rate": 0.000677342369887213, "loss": 3.464, "step": 38005 }, { "epoch": 2.5825519771708114, "grad_norm": 1.657193660736084, "learning_rate": 0.0006772999048783803, "loss": 3.7651, "step": 38010 }, { "epoch": 2.582891697241473, "grad_norm": 2.2482240200042725, "learning_rate": 0.0006772574398695475, "loss": 3.6636, "step": 38015 }, { "epoch": 2.5832314173121347, "grad_norm": 1.7004144191741943, "learning_rate": 0.0006772149748607148, "loss": 3.6292, "step": 38020 }, { "epoch": 2.5835711373827968, "grad_norm": 1.4448211193084717, "learning_rate": 0.000677172509851882, "loss": 3.5091, "step": 38025 }, { "epoch": 2.5839108574534584, "grad_norm": 1.9337836503982544, "learning_rate": 0.0006771300448430493, "loss": 3.4968, "step": 38030 }, { "epoch": 2.58425057752412, "grad_norm": 2.30444073677063, "learning_rate": 0.0006770875798342166, "loss": 3.4927, "step": 38035 }, { "epoch": 2.584590297594782, "grad_norm": 2.035043954849243, "learning_rate": 0.0006770451148253839, "loss": 3.7002, "step": 38040 }, { "epoch": 2.5849300176654437, "grad_norm": 1.7582367658615112, "learning_rate": 0.0006770026498165512, "loss": 3.5452, "step": 38045 }, { "epoch": 2.5852697377361054, "grad_norm": 1.9826948642730713, "learning_rate": 0.0006769601848077185, "loss": 3.4153, "step": 38050 }, { "epoch": 2.5856094578067674, "grad_norm": 1.9521597623825073, "learning_rate": 0.0006769177197988857, "loss": 3.6578, "step": 38055 }, { "epoch": 2.585949177877429, "grad_norm": 2.01812744140625, "learning_rate": 0.000676875254790053, "loss": 3.5758, "step": 38060 }, { "epoch": 2.5862888979480907, "grad_norm": 2.3802852630615234, "learning_rate": 0.0006768327897812203, "loss": 3.7603, "step": 38065 }, { "epoch": 2.5866286180187528, "grad_norm": 1.7035598754882812, "learning_rate": 0.0006767903247723875, "loss": 3.6349, "step": 38070 }, { "epoch": 2.5869683380894144, "grad_norm": 2.040395975112915, "learning_rate": 0.0006767478597635548, "loss": 3.6457, "step": 38075 }, { "epoch": 2.587308058160076, "grad_norm": 2.1955416202545166, "learning_rate": 0.0006767053947547222, "loss": 3.375, "step": 38080 }, { "epoch": 2.587647778230738, "grad_norm": 1.411986231803894, "learning_rate": 0.0006766629297458894, "loss": 3.5353, "step": 38085 }, { "epoch": 2.5879874983013997, "grad_norm": 1.9406524896621704, "learning_rate": 0.0006766204647370566, "loss": 3.511, "step": 38090 }, { "epoch": 2.5883272183720614, "grad_norm": 1.9824309349060059, "learning_rate": 0.000676577999728224, "loss": 3.5229, "step": 38095 }, { "epoch": 2.5886669384427234, "grad_norm": 1.583524465560913, "learning_rate": 0.0006765355347193912, "loss": 3.4623, "step": 38100 }, { "epoch": 2.589006658513385, "grad_norm": 1.917067050933838, "learning_rate": 0.0006764930697105584, "loss": 3.7757, "step": 38105 }, { "epoch": 2.5893463785840467, "grad_norm": 2.522609233856201, "learning_rate": 0.0006764506047017259, "loss": 3.776, "step": 38110 }, { "epoch": 2.589686098654709, "grad_norm": 2.480823278427124, "learning_rate": 0.0006764081396928931, "loss": 3.4626, "step": 38115 }, { "epoch": 2.5900258187253704, "grad_norm": 2.3530161380767822, "learning_rate": 0.0006763656746840603, "loss": 3.3664, "step": 38120 }, { "epoch": 2.590365538796032, "grad_norm": 2.0183756351470947, "learning_rate": 0.0006763232096752276, "loss": 3.6362, "step": 38125 }, { "epoch": 2.590705258866694, "grad_norm": 2.354381561279297, "learning_rate": 0.0006762807446663949, "loss": 3.4712, "step": 38130 }, { "epoch": 2.5910449789373557, "grad_norm": 1.9849616289138794, "learning_rate": 0.0006762382796575621, "loss": 3.5712, "step": 38135 }, { "epoch": 2.5913846990080174, "grad_norm": 2.4174180030822754, "learning_rate": 0.0006761958146487294, "loss": 3.4011, "step": 38140 }, { "epoch": 2.591724419078679, "grad_norm": 2.1378352642059326, "learning_rate": 0.0006761533496398968, "loss": 3.6863, "step": 38145 }, { "epoch": 2.592064139149341, "grad_norm": 2.5333502292633057, "learning_rate": 0.0006761108846310641, "loss": 3.4679, "step": 38150 }, { "epoch": 2.5924038592200027, "grad_norm": 1.7875999212265015, "learning_rate": 0.0006760684196222313, "loss": 3.5273, "step": 38155 }, { "epoch": 2.5927435792906643, "grad_norm": 1.8577066659927368, "learning_rate": 0.0006760259546133985, "loss": 3.5284, "step": 38160 }, { "epoch": 2.5930832993613264, "grad_norm": 1.9427109956741333, "learning_rate": 0.0006759834896045659, "loss": 3.3374, "step": 38165 }, { "epoch": 2.593423019431988, "grad_norm": 2.6696970462799072, "learning_rate": 0.0006759410245957331, "loss": 3.446, "step": 38170 }, { "epoch": 2.5937627395026497, "grad_norm": 1.9224944114685059, "learning_rate": 0.0006758985595869003, "loss": 3.4042, "step": 38175 }, { "epoch": 2.5941024595733113, "grad_norm": 1.906386137008667, "learning_rate": 0.0006758560945780678, "loss": 3.5384, "step": 38180 }, { "epoch": 2.5944421796439734, "grad_norm": 1.8576321601867676, "learning_rate": 0.000675813629569235, "loss": 3.2857, "step": 38185 }, { "epoch": 2.594781899714635, "grad_norm": 1.9989612102508545, "learning_rate": 0.0006757711645604022, "loss": 3.4156, "step": 38190 }, { "epoch": 2.5951216197852967, "grad_norm": 2.3418092727661133, "learning_rate": 0.0006757286995515696, "loss": 3.7272, "step": 38195 }, { "epoch": 2.5954613398559587, "grad_norm": 2.0404932498931885, "learning_rate": 0.0006756862345427368, "loss": 3.3992, "step": 38200 }, { "epoch": 2.5958010599266204, "grad_norm": 1.9498577117919922, "learning_rate": 0.000675643769533904, "loss": 3.4312, "step": 38205 }, { "epoch": 2.596140779997282, "grad_norm": 1.6188273429870605, "learning_rate": 0.0006756013045250713, "loss": 3.5888, "step": 38210 }, { "epoch": 2.596480500067944, "grad_norm": 2.9003994464874268, "learning_rate": 0.0006755588395162387, "loss": 3.5071, "step": 38215 }, { "epoch": 2.5968202201386057, "grad_norm": 1.7944210767745972, "learning_rate": 0.0006755163745074059, "loss": 3.3534, "step": 38220 }, { "epoch": 2.5971599402092673, "grad_norm": 1.7356724739074707, "learning_rate": 0.0006754739094985732, "loss": 3.4502, "step": 38225 }, { "epoch": 2.5974996602799294, "grad_norm": 1.7592934370040894, "learning_rate": 0.0006754314444897405, "loss": 3.5947, "step": 38230 }, { "epoch": 2.597839380350591, "grad_norm": 1.87758469581604, "learning_rate": 0.0006753889794809077, "loss": 3.6211, "step": 38235 }, { "epoch": 2.5981791004212527, "grad_norm": 1.6165028810501099, "learning_rate": 0.000675346514472075, "loss": 3.4318, "step": 38240 }, { "epoch": 2.5985188204919147, "grad_norm": 2.0050408840179443, "learning_rate": 0.0006753040494632423, "loss": 3.6829, "step": 38245 }, { "epoch": 2.5988585405625764, "grad_norm": 2.02394962310791, "learning_rate": 0.0006752615844544096, "loss": 3.7743, "step": 38250 }, { "epoch": 2.599198260633238, "grad_norm": 2.0662522315979004, "learning_rate": 0.0006752191194455769, "loss": 3.5597, "step": 38255 }, { "epoch": 2.5995379807039, "grad_norm": 1.7259219884872437, "learning_rate": 0.0006751766544367442, "loss": 3.5229, "step": 38260 }, { "epoch": 2.5998777007745617, "grad_norm": 2.167975902557373, "learning_rate": 0.0006751341894279114, "loss": 3.3351, "step": 38265 }, { "epoch": 2.6002174208452233, "grad_norm": 2.1497628688812256, "learning_rate": 0.0006750917244190787, "loss": 3.7625, "step": 38270 }, { "epoch": 2.6005571409158854, "grad_norm": 1.4825413227081299, "learning_rate": 0.0006750492594102459, "loss": 3.404, "step": 38275 }, { "epoch": 2.600896860986547, "grad_norm": 2.8730483055114746, "learning_rate": 0.0006750067944014132, "loss": 3.4253, "step": 38280 }, { "epoch": 2.6012365810572087, "grad_norm": 1.7466950416564941, "learning_rate": 0.0006749643293925806, "loss": 3.611, "step": 38285 }, { "epoch": 2.6015763011278707, "grad_norm": 2.369882106781006, "learning_rate": 0.0006749218643837478, "loss": 3.3601, "step": 38290 }, { "epoch": 2.6019160211985324, "grad_norm": 2.1134212017059326, "learning_rate": 0.0006748793993749151, "loss": 3.5367, "step": 38295 }, { "epoch": 2.602255741269194, "grad_norm": 2.683480978012085, "learning_rate": 0.0006748369343660824, "loss": 3.6622, "step": 38300 }, { "epoch": 2.602595461339856, "grad_norm": 2.4852147102355957, "learning_rate": 0.0006747944693572496, "loss": 3.3735, "step": 38305 }, { "epoch": 2.6029351814105177, "grad_norm": 1.664330005645752, "learning_rate": 0.0006747520043484168, "loss": 3.4171, "step": 38310 }, { "epoch": 2.6032749014811793, "grad_norm": 2.4040706157684326, "learning_rate": 0.0006747095393395843, "loss": 3.3846, "step": 38315 }, { "epoch": 2.6036146215518414, "grad_norm": 1.8377605676651, "learning_rate": 0.0006746670743307515, "loss": 3.5798, "step": 38320 }, { "epoch": 2.603954341622503, "grad_norm": 2.237058162689209, "learning_rate": 0.0006746246093219187, "loss": 3.8118, "step": 38325 }, { "epoch": 2.6042940616931647, "grad_norm": 1.5999209880828857, "learning_rate": 0.0006745821443130861, "loss": 3.5809, "step": 38330 }, { "epoch": 2.6046337817638268, "grad_norm": 1.7262240648269653, "learning_rate": 0.0006745396793042533, "loss": 3.5691, "step": 38335 }, { "epoch": 2.6049735018344884, "grad_norm": 2.369396209716797, "learning_rate": 0.0006744972142954205, "loss": 3.5985, "step": 38340 }, { "epoch": 2.60531322190515, "grad_norm": 1.7398394346237183, "learning_rate": 0.0006744547492865879, "loss": 3.5488, "step": 38345 }, { "epoch": 2.605652941975812, "grad_norm": 2.7169699668884277, "learning_rate": 0.0006744122842777552, "loss": 3.3107, "step": 38350 }, { "epoch": 2.6059926620464737, "grad_norm": 2.2025516033172607, "learning_rate": 0.0006743698192689224, "loss": 3.3866, "step": 38355 }, { "epoch": 2.6063323821171354, "grad_norm": 2.115766763687134, "learning_rate": 0.0006743273542600898, "loss": 3.4258, "step": 38360 }, { "epoch": 2.6066721021877974, "grad_norm": 2.1467788219451904, "learning_rate": 0.000674284889251257, "loss": 3.3134, "step": 38365 }, { "epoch": 2.607011822258459, "grad_norm": 1.5800564289093018, "learning_rate": 0.0006742424242424242, "loss": 3.6281, "step": 38370 }, { "epoch": 2.6073515423291207, "grad_norm": 1.6294622421264648, "learning_rate": 0.0006741999592335915, "loss": 3.7668, "step": 38375 }, { "epoch": 2.6076912623997828, "grad_norm": 1.7742589712142944, "learning_rate": 0.0006741574942247588, "loss": 3.6109, "step": 38380 }, { "epoch": 2.6080309824704444, "grad_norm": 2.054821014404297, "learning_rate": 0.0006741150292159261, "loss": 3.5952, "step": 38385 }, { "epoch": 2.608370702541106, "grad_norm": 1.6055036783218384, "learning_rate": 0.0006740725642070934, "loss": 3.3944, "step": 38390 }, { "epoch": 2.608710422611768, "grad_norm": 1.6356335878372192, "learning_rate": 0.0006740300991982607, "loss": 3.6793, "step": 38395 }, { "epoch": 2.6090501426824297, "grad_norm": 1.4584157466888428, "learning_rate": 0.0006739876341894279, "loss": 3.6402, "step": 38400 }, { "epoch": 2.6093898627530914, "grad_norm": 1.980802059173584, "learning_rate": 0.0006739451691805952, "loss": 3.4653, "step": 38405 }, { "epoch": 2.6097295828237534, "grad_norm": 1.6573907136917114, "learning_rate": 0.0006739027041717624, "loss": 3.5082, "step": 38410 }, { "epoch": 2.610069302894415, "grad_norm": 1.8618868589401245, "learning_rate": 0.0006738602391629297, "loss": 3.6094, "step": 38415 }, { "epoch": 2.6104090229650767, "grad_norm": 1.7979873418807983, "learning_rate": 0.0006738177741540971, "loss": 3.3864, "step": 38420 }, { "epoch": 2.610748743035739, "grad_norm": 1.9072132110595703, "learning_rate": 0.0006737753091452643, "loss": 3.475, "step": 38425 }, { "epoch": 2.6110884631064004, "grad_norm": 1.8245922327041626, "learning_rate": 0.0006737328441364316, "loss": 3.2123, "step": 38430 }, { "epoch": 2.611428183177062, "grad_norm": 1.946069359779358, "learning_rate": 0.0006736903791275989, "loss": 3.3355, "step": 38435 }, { "epoch": 2.611767903247724, "grad_norm": 2.3046865463256836, "learning_rate": 0.0006736479141187661, "loss": 3.4755, "step": 38440 }, { "epoch": 2.6121076233183858, "grad_norm": 2.2902884483337402, "learning_rate": 0.0006736054491099334, "loss": 3.2504, "step": 38445 }, { "epoch": 2.6124473433890474, "grad_norm": 1.7423863410949707, "learning_rate": 0.0006735629841011007, "loss": 3.3974, "step": 38450 }, { "epoch": 2.6127870634597095, "grad_norm": 1.8427064418792725, "learning_rate": 0.000673520519092268, "loss": 3.5085, "step": 38455 }, { "epoch": 2.613126783530371, "grad_norm": 1.744056224822998, "learning_rate": 0.0006734780540834352, "loss": 3.6168, "step": 38460 }, { "epoch": 2.6134665036010327, "grad_norm": 1.713815689086914, "learning_rate": 0.0006734355890746026, "loss": 3.5826, "step": 38465 }, { "epoch": 2.613806223671695, "grad_norm": 2.4620094299316406, "learning_rate": 0.0006733931240657698, "loss": 3.4426, "step": 38470 }, { "epoch": 2.6141459437423564, "grad_norm": 1.8954858779907227, "learning_rate": 0.000673350659056937, "loss": 3.3783, "step": 38475 }, { "epoch": 2.614485663813018, "grad_norm": 2.12970232963562, "learning_rate": 0.0006733081940481044, "loss": 3.6264, "step": 38480 }, { "epoch": 2.6148253838836797, "grad_norm": 1.7565521001815796, "learning_rate": 0.0006732657290392716, "loss": 3.4182, "step": 38485 }, { "epoch": 2.6151651039543418, "grad_norm": 2.7119626998901367, "learning_rate": 0.000673223264030439, "loss": 3.3016, "step": 38490 }, { "epoch": 2.6155048240250034, "grad_norm": 2.119771718978882, "learning_rate": 0.0006731807990216063, "loss": 3.5515, "step": 38495 }, { "epoch": 2.615844544095665, "grad_norm": 2.6237738132476807, "learning_rate": 0.0006731383340127735, "loss": 3.3365, "step": 38500 }, { "epoch": 2.616184264166327, "grad_norm": 1.8492987155914307, "learning_rate": 0.0006730958690039408, "loss": 3.2114, "step": 38505 }, { "epoch": 2.6165239842369887, "grad_norm": 2.97222900390625, "learning_rate": 0.000673053403995108, "loss": 3.4623, "step": 38510 }, { "epoch": 2.6168637043076504, "grad_norm": 3.2744359970092773, "learning_rate": 0.0006730109389862753, "loss": 3.2946, "step": 38515 }, { "epoch": 2.617203424378312, "grad_norm": 1.993544101715088, "learning_rate": 0.0006729684739774426, "loss": 3.4895, "step": 38520 }, { "epoch": 2.617543144448974, "grad_norm": 1.8445978164672852, "learning_rate": 0.0006729260089686099, "loss": 3.319, "step": 38525 }, { "epoch": 2.6178828645196357, "grad_norm": 2.3482437133789062, "learning_rate": 0.0006728835439597772, "loss": 3.4541, "step": 38530 }, { "epoch": 2.6182225845902973, "grad_norm": 1.3124078512191772, "learning_rate": 0.0006728410789509445, "loss": 3.1802, "step": 38535 }, { "epoch": 2.6185623046609594, "grad_norm": 1.5157750844955444, "learning_rate": 0.0006727986139421117, "loss": 3.5289, "step": 38540 }, { "epoch": 2.618902024731621, "grad_norm": 1.737012505531311, "learning_rate": 0.000672756148933279, "loss": 3.4201, "step": 38545 }, { "epoch": 2.6192417448022827, "grad_norm": 1.4602605104446411, "learning_rate": 0.0006727136839244463, "loss": 3.3894, "step": 38550 }, { "epoch": 2.6195814648729447, "grad_norm": 1.8919283151626587, "learning_rate": 0.0006726712189156135, "loss": 3.5583, "step": 38555 }, { "epoch": 2.6199211849436064, "grad_norm": 1.9239789247512817, "learning_rate": 0.0006726287539067808, "loss": 3.4752, "step": 38560 }, { "epoch": 2.620260905014268, "grad_norm": 1.9785480499267578, "learning_rate": 0.0006725862888979482, "loss": 3.538, "step": 38565 }, { "epoch": 2.62060062508493, "grad_norm": 2.121901750564575, "learning_rate": 0.0006725438238891154, "loss": 3.7097, "step": 38570 }, { "epoch": 2.6209403451555917, "grad_norm": 1.8272305727005005, "learning_rate": 0.0006725013588802826, "loss": 3.5161, "step": 38575 }, { "epoch": 2.6212800652262533, "grad_norm": 1.680481195449829, "learning_rate": 0.00067245889387145, "loss": 3.5859, "step": 38580 }, { "epoch": 2.6216197852969154, "grad_norm": 1.7746381759643555, "learning_rate": 0.0006724164288626172, "loss": 3.5973, "step": 38585 }, { "epoch": 2.621959505367577, "grad_norm": 1.8658668994903564, "learning_rate": 0.0006723739638537844, "loss": 3.5414, "step": 38590 }, { "epoch": 2.6222992254382387, "grad_norm": 2.471665143966675, "learning_rate": 0.0006723314988449519, "loss": 3.6049, "step": 38595 }, { "epoch": 2.6226389455089008, "grad_norm": 2.046705722808838, "learning_rate": 0.0006722890338361191, "loss": 3.5458, "step": 38600 }, { "epoch": 2.6229786655795624, "grad_norm": 1.9155621528625488, "learning_rate": 0.0006722465688272863, "loss": 3.7616, "step": 38605 }, { "epoch": 2.623318385650224, "grad_norm": 1.7498188018798828, "learning_rate": 0.0006722041038184536, "loss": 3.5966, "step": 38610 }, { "epoch": 2.623658105720886, "grad_norm": 1.7851029634475708, "learning_rate": 0.0006721616388096209, "loss": 3.3378, "step": 38615 }, { "epoch": 2.6239978257915477, "grad_norm": 2.4257357120513916, "learning_rate": 0.0006721191738007881, "loss": 3.503, "step": 38620 }, { "epoch": 2.6243375458622094, "grad_norm": 1.9422235488891602, "learning_rate": 0.0006720767087919554, "loss": 3.6881, "step": 38625 }, { "epoch": 2.6246772659328714, "grad_norm": 1.6779392957687378, "learning_rate": 0.0006720342437831228, "loss": 3.7475, "step": 38630 }, { "epoch": 2.625016986003533, "grad_norm": 2.679643154144287, "learning_rate": 0.00067199177877429, "loss": 3.8678, "step": 38635 }, { "epoch": 2.6253567060741947, "grad_norm": 2.267449378967285, "learning_rate": 0.0006719493137654573, "loss": 3.6327, "step": 38640 }, { "epoch": 2.6256964261448568, "grad_norm": 1.7044793367385864, "learning_rate": 0.0006719068487566246, "loss": 3.5715, "step": 38645 }, { "epoch": 2.6260361462155184, "grad_norm": 2.1216602325439453, "learning_rate": 0.0006718643837477918, "loss": 3.6146, "step": 38650 }, { "epoch": 2.62637586628618, "grad_norm": 2.2653450965881348, "learning_rate": 0.0006718219187389591, "loss": 3.2356, "step": 38655 }, { "epoch": 2.626715586356842, "grad_norm": 2.0368216037750244, "learning_rate": 0.0006717794537301263, "loss": 3.3398, "step": 38660 }, { "epoch": 2.6270553064275037, "grad_norm": 1.798949122428894, "learning_rate": 0.0006717369887212937, "loss": 3.2958, "step": 38665 }, { "epoch": 2.6273950264981654, "grad_norm": 2.3422834873199463, "learning_rate": 0.000671694523712461, "loss": 3.5656, "step": 38670 }, { "epoch": 2.6277347465688274, "grad_norm": 2.2114412784576416, "learning_rate": 0.0006716520587036282, "loss": 3.4678, "step": 38675 }, { "epoch": 2.628074466639489, "grad_norm": 2.0994627475738525, "learning_rate": 0.0006716095936947955, "loss": 3.6509, "step": 38680 }, { "epoch": 2.6284141867101507, "grad_norm": 1.6650460958480835, "learning_rate": 0.0006715671286859628, "loss": 3.7343, "step": 38685 }, { "epoch": 2.6287539067808128, "grad_norm": 1.5194439888000488, "learning_rate": 0.00067152466367713, "loss": 3.5916, "step": 38690 }, { "epoch": 2.6290936268514744, "grad_norm": 1.9885005950927734, "learning_rate": 0.0006714821986682972, "loss": 3.4416, "step": 38695 }, { "epoch": 2.629433346922136, "grad_norm": 1.6140577793121338, "learning_rate": 0.0006714397336594647, "loss": 3.3521, "step": 38700 }, { "epoch": 2.629773066992798, "grad_norm": 1.8919057846069336, "learning_rate": 0.0006713972686506319, "loss": 3.583, "step": 38705 }, { "epoch": 2.6301127870634597, "grad_norm": 1.4350781440734863, "learning_rate": 0.0006713548036417991, "loss": 3.5999, "step": 38710 }, { "epoch": 2.6304525071341214, "grad_norm": 2.1859095096588135, "learning_rate": 0.0006713123386329665, "loss": 3.5541, "step": 38715 }, { "epoch": 2.6307922272047835, "grad_norm": 2.2796289920806885, "learning_rate": 0.0006712698736241337, "loss": 3.5966, "step": 38720 }, { "epoch": 2.631131947275445, "grad_norm": 1.6242560148239136, "learning_rate": 0.0006712274086153009, "loss": 3.4324, "step": 38725 }, { "epoch": 2.6314716673461067, "grad_norm": 2.419994354248047, "learning_rate": 0.0006711849436064683, "loss": 3.4601, "step": 38730 }, { "epoch": 2.631811387416769, "grad_norm": 2.1890015602111816, "learning_rate": 0.0006711424785976356, "loss": 3.5317, "step": 38735 }, { "epoch": 2.6321511074874304, "grad_norm": 2.2677688598632812, "learning_rate": 0.0006711000135888028, "loss": 3.4743, "step": 38740 }, { "epoch": 2.632490827558092, "grad_norm": 2.080934762954712, "learning_rate": 0.0006710575485799702, "loss": 3.5223, "step": 38745 }, { "epoch": 2.632830547628754, "grad_norm": 1.69635009765625, "learning_rate": 0.0006710150835711374, "loss": 3.5717, "step": 38750 }, { "epoch": 2.6331702676994158, "grad_norm": 2.672288417816162, "learning_rate": 0.0006709726185623046, "loss": 3.3154, "step": 38755 }, { "epoch": 2.6335099877700774, "grad_norm": 1.8519169092178345, "learning_rate": 0.0006709301535534719, "loss": 3.621, "step": 38760 }, { "epoch": 2.6338497078407395, "grad_norm": 1.747889518737793, "learning_rate": 0.0006708876885446392, "loss": 3.3499, "step": 38765 }, { "epoch": 2.634189427911401, "grad_norm": 2.456123113632202, "learning_rate": 0.0006708452235358065, "loss": 3.6289, "step": 38770 }, { "epoch": 2.6345291479820627, "grad_norm": 1.7479467391967773, "learning_rate": 0.0006708027585269738, "loss": 3.4422, "step": 38775 }, { "epoch": 2.634868868052725, "grad_norm": 1.653857946395874, "learning_rate": 0.0006707602935181411, "loss": 3.6905, "step": 38780 }, { "epoch": 2.6352085881233864, "grad_norm": 2.335437774658203, "learning_rate": 0.0006707178285093083, "loss": 3.7684, "step": 38785 }, { "epoch": 2.635548308194048, "grad_norm": 2.2767937183380127, "learning_rate": 0.0006706753635004756, "loss": 3.611, "step": 38790 }, { "epoch": 2.63588802826471, "grad_norm": 2.045140504837036, "learning_rate": 0.0006706328984916428, "loss": 3.4751, "step": 38795 }, { "epoch": 2.6362277483353718, "grad_norm": 1.8914036750793457, "learning_rate": 0.0006705904334828101, "loss": 3.5905, "step": 38800 }, { "epoch": 2.6365674684060334, "grad_norm": 2.176997423171997, "learning_rate": 0.0006705479684739775, "loss": 3.3388, "step": 38805 }, { "epoch": 2.6369071884766955, "grad_norm": 2.783702850341797, "learning_rate": 0.0006705055034651447, "loss": 3.471, "step": 38810 }, { "epoch": 2.637246908547357, "grad_norm": 1.9666619300842285, "learning_rate": 0.000670463038456312, "loss": 3.3135, "step": 38815 }, { "epoch": 2.6375866286180187, "grad_norm": 2.144124984741211, "learning_rate": 0.0006704205734474793, "loss": 3.7182, "step": 38820 }, { "epoch": 2.6379263486886804, "grad_norm": 2.01399827003479, "learning_rate": 0.0006703781084386465, "loss": 3.5858, "step": 38825 }, { "epoch": 2.6382660687593424, "grad_norm": 2.1246755123138428, "learning_rate": 0.0006703356434298139, "loss": 3.4887, "step": 38830 }, { "epoch": 2.638605788830004, "grad_norm": 1.7491928339004517, "learning_rate": 0.0006702931784209811, "loss": 3.6451, "step": 38835 }, { "epoch": 2.6389455089006657, "grad_norm": 1.7371337413787842, "learning_rate": 0.0006702507134121484, "loss": 3.3358, "step": 38840 }, { "epoch": 2.639285228971328, "grad_norm": 1.5030293464660645, "learning_rate": 0.0006702082484033158, "loss": 3.4876, "step": 38845 }, { "epoch": 2.6396249490419894, "grad_norm": 2.2424473762512207, "learning_rate": 0.000670165783394483, "loss": 3.4851, "step": 38850 }, { "epoch": 2.639964669112651, "grad_norm": 1.7694083452224731, "learning_rate": 0.0006701233183856502, "loss": 3.5856, "step": 38855 }, { "epoch": 2.640304389183313, "grad_norm": 1.8488961458206177, "learning_rate": 0.0006700808533768175, "loss": 3.4707, "step": 38860 }, { "epoch": 2.6406441092539747, "grad_norm": 1.8507877588272095, "learning_rate": 0.0006700383883679848, "loss": 3.4373, "step": 38865 }, { "epoch": 2.6409838293246364, "grad_norm": 1.6992113590240479, "learning_rate": 0.000669995923359152, "loss": 3.4058, "step": 38870 }, { "epoch": 2.641323549395298, "grad_norm": 1.9364160299301147, "learning_rate": 0.0006699534583503194, "loss": 3.7552, "step": 38875 }, { "epoch": 2.64166326946596, "grad_norm": 1.415567398071289, "learning_rate": 0.0006699109933414867, "loss": 3.4269, "step": 38880 }, { "epoch": 2.6420029895366217, "grad_norm": 1.8744635581970215, "learning_rate": 0.0006698685283326539, "loss": 3.6925, "step": 38885 }, { "epoch": 2.6423427096072833, "grad_norm": 1.6150176525115967, "learning_rate": 0.0006698260633238212, "loss": 3.5258, "step": 38890 }, { "epoch": 2.6426824296779454, "grad_norm": 1.9863125085830688, "learning_rate": 0.0006697835983149884, "loss": 3.4177, "step": 38895 }, { "epoch": 2.643022149748607, "grad_norm": 1.8692365884780884, "learning_rate": 0.0006697411333061557, "loss": 3.1235, "step": 38900 }, { "epoch": 2.6433618698192687, "grad_norm": 2.0580291748046875, "learning_rate": 0.0006696986682973231, "loss": 3.4311, "step": 38905 }, { "epoch": 2.6437015898899308, "grad_norm": 1.945749282836914, "learning_rate": 0.0006696562032884903, "loss": 3.3611, "step": 38910 }, { "epoch": 2.6440413099605924, "grad_norm": 1.9264148473739624, "learning_rate": 0.0006696137382796576, "loss": 3.4887, "step": 38915 }, { "epoch": 2.644381030031254, "grad_norm": 2.063382863998413, "learning_rate": 0.0006695712732708249, "loss": 3.6605, "step": 38920 }, { "epoch": 2.644720750101916, "grad_norm": 1.7680178880691528, "learning_rate": 0.0006695288082619921, "loss": 3.2888, "step": 38925 }, { "epoch": 2.6450604701725777, "grad_norm": 1.7979238033294678, "learning_rate": 0.0006694863432531594, "loss": 3.2522, "step": 38930 }, { "epoch": 2.6454001902432394, "grad_norm": 1.8469446897506714, "learning_rate": 0.0006694438782443267, "loss": 3.3472, "step": 38935 }, { "epoch": 2.6457399103139014, "grad_norm": 1.9754704236984253, "learning_rate": 0.000669401413235494, "loss": 3.3991, "step": 38940 }, { "epoch": 2.646079630384563, "grad_norm": 1.8436063528060913, "learning_rate": 0.0006693589482266613, "loss": 3.5857, "step": 38945 }, { "epoch": 2.6464193504552247, "grad_norm": 2.442485809326172, "learning_rate": 0.0006693164832178286, "loss": 3.5223, "step": 38950 }, { "epoch": 2.6467590705258868, "grad_norm": 1.814894437789917, "learning_rate": 0.0006692740182089958, "loss": 3.3953, "step": 38955 }, { "epoch": 2.6470987905965484, "grad_norm": 1.9554295539855957, "learning_rate": 0.000669231553200163, "loss": 3.2854, "step": 38960 }, { "epoch": 2.64743851066721, "grad_norm": 1.9559659957885742, "learning_rate": 0.0006691890881913304, "loss": 3.4926, "step": 38965 }, { "epoch": 2.647778230737872, "grad_norm": 1.782837152481079, "learning_rate": 0.0006691466231824976, "loss": 3.5221, "step": 38970 }, { "epoch": 2.6481179508085337, "grad_norm": 1.6016349792480469, "learning_rate": 0.0006691041581736649, "loss": 3.5089, "step": 38975 }, { "epoch": 2.6484576708791954, "grad_norm": 2.017868757247925, "learning_rate": 0.0006690616931648323, "loss": 3.7061, "step": 38980 }, { "epoch": 2.6487973909498574, "grad_norm": 1.7210437059402466, "learning_rate": 0.0006690192281559995, "loss": 3.3187, "step": 38985 }, { "epoch": 2.649137111020519, "grad_norm": 2.1240315437316895, "learning_rate": 0.0006689767631471667, "loss": 3.4193, "step": 38990 }, { "epoch": 2.6494768310911807, "grad_norm": 1.9974335432052612, "learning_rate": 0.000668934298138334, "loss": 3.415, "step": 38995 }, { "epoch": 2.649816551161843, "grad_norm": 1.9849735498428345, "learning_rate": 0.0006688918331295013, "loss": 3.7232, "step": 39000 }, { "epoch": 2.6501562712325044, "grad_norm": 2.369847297668457, "learning_rate": 0.0006688493681206685, "loss": 3.4775, "step": 39005 }, { "epoch": 2.650495991303166, "grad_norm": 2.110848903656006, "learning_rate": 0.0006688069031118359, "loss": 3.6311, "step": 39010 }, { "epoch": 2.650835711373828, "grad_norm": 2.0272274017333984, "learning_rate": 0.0006687644381030032, "loss": 3.4599, "step": 39015 }, { "epoch": 2.6511754314444897, "grad_norm": 2.0492117404937744, "learning_rate": 0.0006687219730941704, "loss": 3.2924, "step": 39020 }, { "epoch": 2.6515151515151514, "grad_norm": 1.7393065690994263, "learning_rate": 0.0006686795080853377, "loss": 3.7481, "step": 39025 }, { "epoch": 2.6518548715858135, "grad_norm": 1.927003264427185, "learning_rate": 0.000668637043076505, "loss": 3.5877, "step": 39030 }, { "epoch": 2.652194591656475, "grad_norm": 2.012798309326172, "learning_rate": 0.0006685945780676722, "loss": 3.5593, "step": 39035 }, { "epoch": 2.6525343117271367, "grad_norm": 2.1301190853118896, "learning_rate": 0.0006685521130588395, "loss": 3.6021, "step": 39040 }, { "epoch": 2.652874031797799, "grad_norm": 2.2571828365325928, "learning_rate": 0.0006685096480500069, "loss": 3.569, "step": 39045 }, { "epoch": 2.6532137518684604, "grad_norm": 2.1799309253692627, "learning_rate": 0.0006684671830411741, "loss": 3.4002, "step": 39050 }, { "epoch": 2.653553471939122, "grad_norm": 2.781949043273926, "learning_rate": 0.0006684247180323414, "loss": 3.2191, "step": 39055 }, { "epoch": 2.653893192009784, "grad_norm": 3.1807219982147217, "learning_rate": 0.0006683822530235086, "loss": 3.6311, "step": 39060 }, { "epoch": 2.6542329120804458, "grad_norm": 1.731387972831726, "learning_rate": 0.0006683397880146759, "loss": 3.2733, "step": 39065 }, { "epoch": 2.6545726321511074, "grad_norm": 2.0977439880371094, "learning_rate": 0.0006682973230058432, "loss": 3.7545, "step": 39070 }, { "epoch": 2.6549123522217695, "grad_norm": 2.301081418991089, "learning_rate": 0.0006682548579970104, "loss": 3.3794, "step": 39075 }, { "epoch": 2.655252072292431, "grad_norm": 1.8570789098739624, "learning_rate": 0.0006682123929881778, "loss": 3.5269, "step": 39080 }, { "epoch": 2.6555917923630927, "grad_norm": 1.9388405084609985, "learning_rate": 0.0006681699279793451, "loss": 3.5828, "step": 39085 }, { "epoch": 2.655931512433755, "grad_norm": 1.6125940084457397, "learning_rate": 0.0006681274629705123, "loss": 3.4507, "step": 39090 }, { "epoch": 2.6562712325044164, "grad_norm": 1.9415137767791748, "learning_rate": 0.0006680849979616795, "loss": 3.4864, "step": 39095 }, { "epoch": 2.656610952575078, "grad_norm": 2.4094600677490234, "learning_rate": 0.0006680425329528469, "loss": 3.4417, "step": 39100 }, { "epoch": 2.65695067264574, "grad_norm": 1.7102365493774414, "learning_rate": 0.0006680000679440141, "loss": 3.3483, "step": 39105 }, { "epoch": 2.6572903927164018, "grad_norm": 2.4306936264038086, "learning_rate": 0.0006679576029351813, "loss": 3.2624, "step": 39110 }, { "epoch": 2.6576301127870634, "grad_norm": 2.452016830444336, "learning_rate": 0.0006679151379263488, "loss": 3.3997, "step": 39115 }, { "epoch": 2.6579698328577255, "grad_norm": 1.8387846946716309, "learning_rate": 0.000667872672917516, "loss": 3.734, "step": 39120 }, { "epoch": 2.658309552928387, "grad_norm": 1.7455552816390991, "learning_rate": 0.0006678302079086832, "loss": 3.1737, "step": 39125 }, { "epoch": 2.6586492729990487, "grad_norm": 2.1361873149871826, "learning_rate": 0.0006677877428998506, "loss": 3.3636, "step": 39130 }, { "epoch": 2.658988993069711, "grad_norm": 2.4363415241241455, "learning_rate": 0.0006677452778910178, "loss": 3.5981, "step": 39135 }, { "epoch": 2.6593287131403724, "grad_norm": 1.7474274635314941, "learning_rate": 0.000667702812882185, "loss": 3.5002, "step": 39140 }, { "epoch": 2.659668433211034, "grad_norm": 2.2550110816955566, "learning_rate": 0.0006676603478733523, "loss": 3.2864, "step": 39145 }, { "epoch": 2.660008153281696, "grad_norm": 1.832486867904663, "learning_rate": 0.0006676178828645197, "loss": 3.2775, "step": 39150 }, { "epoch": 2.660347873352358, "grad_norm": 1.8111233711242676, "learning_rate": 0.0006675754178556869, "loss": 3.8492, "step": 39155 }, { "epoch": 2.6606875934230194, "grad_norm": 2.0248165130615234, "learning_rate": 0.0006675329528468542, "loss": 3.3897, "step": 39160 }, { "epoch": 2.661027313493681, "grad_norm": 2.4274673461914062, "learning_rate": 0.0006674904878380215, "loss": 3.73, "step": 39165 }, { "epoch": 2.661367033564343, "grad_norm": 1.8330546617507935, "learning_rate": 0.0006674480228291888, "loss": 3.4808, "step": 39170 }, { "epoch": 2.6617067536350048, "grad_norm": 1.7536954879760742, "learning_rate": 0.000667405557820356, "loss": 3.2939, "step": 39175 }, { "epoch": 2.6620464737056664, "grad_norm": 1.7902284860610962, "learning_rate": 0.0006673630928115233, "loss": 3.372, "step": 39180 }, { "epoch": 2.6623861937763285, "grad_norm": 1.7849076986312866, "learning_rate": 0.0006673206278026907, "loss": 3.166, "step": 39185 }, { "epoch": 2.66272591384699, "grad_norm": 2.329331874847412, "learning_rate": 0.0006672781627938579, "loss": 3.4138, "step": 39190 }, { "epoch": 2.6630656339176517, "grad_norm": 1.928131341934204, "learning_rate": 0.0006672356977850251, "loss": 3.571, "step": 39195 }, { "epoch": 2.663405353988314, "grad_norm": 2.161935329437256, "learning_rate": 0.0006671932327761925, "loss": 3.5869, "step": 39200 }, { "epoch": 2.6637450740589754, "grad_norm": 2.2009758949279785, "learning_rate": 0.0006671507677673597, "loss": 3.4304, "step": 39205 }, { "epoch": 2.664084794129637, "grad_norm": 2.1535160541534424, "learning_rate": 0.0006671083027585269, "loss": 3.4445, "step": 39210 }, { "epoch": 2.6644245142002987, "grad_norm": 1.985149621963501, "learning_rate": 0.0006670658377496943, "loss": 3.4414, "step": 39215 }, { "epoch": 2.6647642342709608, "grad_norm": 2.077913761138916, "learning_rate": 0.0006670233727408616, "loss": 3.3071, "step": 39220 }, { "epoch": 2.6651039543416224, "grad_norm": 1.7121585607528687, "learning_rate": 0.0006669809077320288, "loss": 3.6734, "step": 39225 }, { "epoch": 2.665443674412284, "grad_norm": 2.1228785514831543, "learning_rate": 0.0006669384427231962, "loss": 3.4577, "step": 39230 }, { "epoch": 2.665783394482946, "grad_norm": 1.8216447830200195, "learning_rate": 0.0006668959777143634, "loss": 3.4145, "step": 39235 }, { "epoch": 2.6661231145536077, "grad_norm": 1.9189012050628662, "learning_rate": 0.0006668535127055306, "loss": 3.351, "step": 39240 }, { "epoch": 2.6664628346242694, "grad_norm": 1.8116388320922852, "learning_rate": 0.000666811047696698, "loss": 3.4712, "step": 39245 }, { "epoch": 2.6668025546949314, "grad_norm": 1.879854679107666, "learning_rate": 0.0006667685826878652, "loss": 3.6674, "step": 39250 }, { "epoch": 2.667142274765593, "grad_norm": 1.5977572202682495, "learning_rate": 0.0006667261176790325, "loss": 3.2192, "step": 39255 }, { "epoch": 2.6674819948362547, "grad_norm": 2.702073812484741, "learning_rate": 0.0006666836526701998, "loss": 3.82, "step": 39260 }, { "epoch": 2.6678217149069168, "grad_norm": 2.2181224822998047, "learning_rate": 0.0006666411876613671, "loss": 3.3725, "step": 39265 }, { "epoch": 2.6681614349775784, "grad_norm": 1.7401841878890991, "learning_rate": 0.0006665987226525343, "loss": 3.3335, "step": 39270 }, { "epoch": 2.66850115504824, "grad_norm": 1.7278245687484741, "learning_rate": 0.0006665562576437016, "loss": 3.6542, "step": 39275 }, { "epoch": 2.668840875118902, "grad_norm": 2.167839527130127, "learning_rate": 0.0006665137926348689, "loss": 3.6115, "step": 39280 }, { "epoch": 2.6691805951895637, "grad_norm": 1.6934704780578613, "learning_rate": 0.0006664713276260361, "loss": 3.6496, "step": 39285 }, { "epoch": 2.6695203152602254, "grad_norm": 1.7914400100708008, "learning_rate": 0.0006664288626172035, "loss": 3.4678, "step": 39290 }, { "epoch": 2.6698600353308874, "grad_norm": 1.9475237131118774, "learning_rate": 0.0006663863976083707, "loss": 3.5697, "step": 39295 }, { "epoch": 2.670199755401549, "grad_norm": 2.0935990810394287, "learning_rate": 0.000666343932599538, "loss": 3.5676, "step": 39300 }, { "epoch": 2.6705394754722107, "grad_norm": 1.9921060800552368, "learning_rate": 0.0006663014675907053, "loss": 3.4074, "step": 39305 }, { "epoch": 2.670879195542873, "grad_norm": 1.7747070789337158, "learning_rate": 0.0006662590025818725, "loss": 3.4664, "step": 39310 }, { "epoch": 2.6712189156135344, "grad_norm": 2.083585023880005, "learning_rate": 0.0006662165375730398, "loss": 3.6619, "step": 39315 }, { "epoch": 2.671558635684196, "grad_norm": 2.2686121463775635, "learning_rate": 0.0006661740725642071, "loss": 3.9662, "step": 39320 }, { "epoch": 2.671898355754858, "grad_norm": 1.5297456979751587, "learning_rate": 0.0006661316075553744, "loss": 3.4457, "step": 39325 }, { "epoch": 2.6722380758255198, "grad_norm": 1.6275157928466797, "learning_rate": 0.0006660891425465417, "loss": 3.5496, "step": 39330 }, { "epoch": 2.6725777958961814, "grad_norm": 1.8022942543029785, "learning_rate": 0.000666046677537709, "loss": 3.429, "step": 39335 }, { "epoch": 2.6729175159668435, "grad_norm": 2.155400276184082, "learning_rate": 0.0006660042125288762, "loss": 3.5876, "step": 39340 }, { "epoch": 2.673257236037505, "grad_norm": 2.1285223960876465, "learning_rate": 0.0006659617475200434, "loss": 3.452, "step": 39345 }, { "epoch": 2.6735969561081667, "grad_norm": 2.130781888961792, "learning_rate": 0.0006659192825112108, "loss": 3.4592, "step": 39350 }, { "epoch": 2.673936676178829, "grad_norm": 1.8411561250686646, "learning_rate": 0.000665876817502378, "loss": 3.7353, "step": 39355 }, { "epoch": 2.6742763962494904, "grad_norm": 1.6159409284591675, "learning_rate": 0.0006658343524935453, "loss": 3.2616, "step": 39360 }, { "epoch": 2.674616116320152, "grad_norm": 1.7357001304626465, "learning_rate": 0.0006657918874847127, "loss": 3.4016, "step": 39365 }, { "epoch": 2.674955836390814, "grad_norm": 1.9388796091079712, "learning_rate": 0.0006657494224758799, "loss": 3.5148, "step": 39370 }, { "epoch": 2.6752955564614758, "grad_norm": 1.8430733680725098, "learning_rate": 0.0006657069574670471, "loss": 3.6587, "step": 39375 }, { "epoch": 2.6756352765321374, "grad_norm": 1.6911190748214722, "learning_rate": 0.0006656644924582145, "loss": 3.4729, "step": 39380 }, { "epoch": 2.6759749966027995, "grad_norm": 1.6989473104476929, "learning_rate": 0.0006656220274493817, "loss": 3.3771, "step": 39385 }, { "epoch": 2.676314716673461, "grad_norm": 1.650848627090454, "learning_rate": 0.0006655795624405489, "loss": 3.6156, "step": 39390 }, { "epoch": 2.6766544367441227, "grad_norm": 1.9805734157562256, "learning_rate": 0.0006655370974317163, "loss": 3.6369, "step": 39395 }, { "epoch": 2.676994156814785, "grad_norm": 1.6001389026641846, "learning_rate": 0.0006654946324228836, "loss": 3.6447, "step": 39400 }, { "epoch": 2.6773338768854464, "grad_norm": 1.8296531438827515, "learning_rate": 0.0006654521674140508, "loss": 3.4321, "step": 39405 }, { "epoch": 2.677673596956108, "grad_norm": 1.9239553213119507, "learning_rate": 0.0006654097024052181, "loss": 3.301, "step": 39410 }, { "epoch": 2.67801331702677, "grad_norm": 1.4479914903640747, "learning_rate": 0.0006653672373963854, "loss": 3.6858, "step": 39415 }, { "epoch": 2.6783530370974318, "grad_norm": 2.25343656539917, "learning_rate": 0.0006653247723875526, "loss": 3.585, "step": 39420 }, { "epoch": 2.6786927571680934, "grad_norm": 2.1911370754241943, "learning_rate": 0.00066528230737872, "loss": 3.6393, "step": 39425 }, { "epoch": 2.6790324772387555, "grad_norm": 2.165926218032837, "learning_rate": 0.0006652398423698873, "loss": 3.6617, "step": 39430 }, { "epoch": 2.679372197309417, "grad_norm": 2.065922260284424, "learning_rate": 0.0006651973773610545, "loss": 3.4353, "step": 39435 }, { "epoch": 2.6797119173800787, "grad_norm": 1.6475296020507812, "learning_rate": 0.0006651549123522218, "loss": 3.4818, "step": 39440 }, { "epoch": 2.680051637450741, "grad_norm": 1.7649284601211548, "learning_rate": 0.000665112447343389, "loss": 3.5698, "step": 39445 }, { "epoch": 2.6803913575214025, "grad_norm": 1.777248501777649, "learning_rate": 0.0006650699823345563, "loss": 3.6059, "step": 39450 }, { "epoch": 2.680731077592064, "grad_norm": 2.2029757499694824, "learning_rate": 0.0006650275173257236, "loss": 3.5651, "step": 39455 }, { "epoch": 2.681070797662726, "grad_norm": 1.8286933898925781, "learning_rate": 0.0006649850523168909, "loss": 3.4671, "step": 39460 }, { "epoch": 2.681410517733388, "grad_norm": 2.07869291305542, "learning_rate": 0.0006649425873080582, "loss": 3.409, "step": 39465 }, { "epoch": 2.6817502378040494, "grad_norm": 1.6141579151153564, "learning_rate": 0.0006649001222992255, "loss": 3.5273, "step": 39470 }, { "epoch": 2.6820899578747115, "grad_norm": 1.6660434007644653, "learning_rate": 0.0006648576572903927, "loss": 3.3782, "step": 39475 }, { "epoch": 2.682429677945373, "grad_norm": 1.5599422454833984, "learning_rate": 0.00066481519228156, "loss": 3.3903, "step": 39480 }, { "epoch": 2.6827693980160348, "grad_norm": 2.361490488052368, "learning_rate": 0.0006647727272727273, "loss": 3.7311, "step": 39485 }, { "epoch": 2.683109118086697, "grad_norm": 1.9799941778182983, "learning_rate": 0.0006647302622638945, "loss": 3.585, "step": 39490 }, { "epoch": 2.6834488381573585, "grad_norm": 2.1718945503234863, "learning_rate": 0.0006646877972550618, "loss": 3.4523, "step": 39495 }, { "epoch": 2.68378855822802, "grad_norm": 1.8354952335357666, "learning_rate": 0.0006646453322462292, "loss": 3.4024, "step": 39500 }, { "epoch": 2.6841282782986817, "grad_norm": 2.2081353664398193, "learning_rate": 0.0006646028672373964, "loss": 3.3802, "step": 39505 }, { "epoch": 2.684467998369344, "grad_norm": 1.576534628868103, "learning_rate": 0.0006645604022285637, "loss": 3.5578, "step": 39510 }, { "epoch": 2.6848077184400054, "grad_norm": 2.362473726272583, "learning_rate": 0.000664517937219731, "loss": 3.3964, "step": 39515 }, { "epoch": 2.685147438510667, "grad_norm": 2.354809284210205, "learning_rate": 0.0006644754722108982, "loss": 3.3316, "step": 39520 }, { "epoch": 2.685487158581329, "grad_norm": 2.2212882041931152, "learning_rate": 0.0006644330072020655, "loss": 3.3666, "step": 39525 }, { "epoch": 2.6858268786519908, "grad_norm": 2.1464953422546387, "learning_rate": 0.0006643905421932329, "loss": 3.5181, "step": 39530 }, { "epoch": 2.6861665987226524, "grad_norm": 2.02402400970459, "learning_rate": 0.0006643480771844001, "loss": 3.6686, "step": 39535 }, { "epoch": 2.6865063187933145, "grad_norm": 1.9348118305206299, "learning_rate": 0.0006643056121755674, "loss": 3.5302, "step": 39540 }, { "epoch": 2.686846038863976, "grad_norm": 2.6656572818756104, "learning_rate": 0.0006642631471667346, "loss": 3.2627, "step": 39545 }, { "epoch": 2.6871857589346377, "grad_norm": 2.68573260307312, "learning_rate": 0.0006642206821579019, "loss": 3.5102, "step": 39550 }, { "epoch": 2.6875254790052994, "grad_norm": 1.9689741134643555, "learning_rate": 0.0006641782171490692, "loss": 3.4664, "step": 39555 }, { "epoch": 2.6878651990759614, "grad_norm": 2.121636152267456, "learning_rate": 0.0006641357521402364, "loss": 3.599, "step": 39560 }, { "epoch": 2.688204919146623, "grad_norm": 1.6283522844314575, "learning_rate": 0.0006640932871314038, "loss": 3.5848, "step": 39565 }, { "epoch": 2.6885446392172847, "grad_norm": 2.538648843765259, "learning_rate": 0.0006640508221225711, "loss": 3.5656, "step": 39570 }, { "epoch": 2.688884359287947, "grad_norm": 1.7087510824203491, "learning_rate": 0.0006640083571137383, "loss": 3.3968, "step": 39575 }, { "epoch": 2.6892240793586084, "grad_norm": 2.0522778034210205, "learning_rate": 0.0006639658921049055, "loss": 3.412, "step": 39580 }, { "epoch": 2.68956379942927, "grad_norm": 2.1268815994262695, "learning_rate": 0.0006639234270960729, "loss": 3.7238, "step": 39585 }, { "epoch": 2.689903519499932, "grad_norm": 2.3459784984588623, "learning_rate": 0.0006638809620872401, "loss": 3.5452, "step": 39590 }, { "epoch": 2.6902432395705937, "grad_norm": 2.0278732776641846, "learning_rate": 0.0006638384970784073, "loss": 3.6039, "step": 39595 }, { "epoch": 2.6905829596412554, "grad_norm": 2.2044291496276855, "learning_rate": 0.0006637960320695748, "loss": 3.3315, "step": 39600 }, { "epoch": 2.6909226797119175, "grad_norm": 1.7905645370483398, "learning_rate": 0.000663753567060742, "loss": 3.4189, "step": 39605 }, { "epoch": 2.691262399782579, "grad_norm": 2.1854588985443115, "learning_rate": 0.0006637111020519092, "loss": 3.5598, "step": 39610 }, { "epoch": 2.6916021198532407, "grad_norm": 1.7378909587860107, "learning_rate": 0.0006636686370430766, "loss": 3.6827, "step": 39615 }, { "epoch": 2.691941839923903, "grad_norm": 1.6957194805145264, "learning_rate": 0.0006636261720342438, "loss": 3.4664, "step": 39620 }, { "epoch": 2.6922815599945644, "grad_norm": 2.367537260055542, "learning_rate": 0.000663583707025411, "loss": 3.6169, "step": 39625 }, { "epoch": 2.692621280065226, "grad_norm": 2.004631519317627, "learning_rate": 0.0006635412420165783, "loss": 3.3606, "step": 39630 }, { "epoch": 2.692961000135888, "grad_norm": 2.103217124938965, "learning_rate": 0.0006634987770077457, "loss": 3.4118, "step": 39635 }, { "epoch": 2.6933007202065498, "grad_norm": 2.1497504711151123, "learning_rate": 0.0006634563119989129, "loss": 3.4592, "step": 39640 }, { "epoch": 2.6936404402772114, "grad_norm": 2.03657865524292, "learning_rate": 0.0006634138469900802, "loss": 3.2858, "step": 39645 }, { "epoch": 2.6939801603478735, "grad_norm": 1.8560898303985596, "learning_rate": 0.0006633713819812475, "loss": 3.5028, "step": 39650 }, { "epoch": 2.694319880418535, "grad_norm": 1.9124295711517334, "learning_rate": 0.0006633289169724147, "loss": 3.5162, "step": 39655 }, { "epoch": 2.6946596004891967, "grad_norm": 2.2913312911987305, "learning_rate": 0.000663286451963582, "loss": 3.4048, "step": 39660 }, { "epoch": 2.694999320559859, "grad_norm": 1.5327175855636597, "learning_rate": 0.0006632439869547493, "loss": 3.5546, "step": 39665 }, { "epoch": 2.6953390406305204, "grad_norm": 1.7923425436019897, "learning_rate": 0.0006632015219459166, "loss": 3.3268, "step": 39670 }, { "epoch": 2.695678760701182, "grad_norm": 2.016533851623535, "learning_rate": 0.0006631590569370839, "loss": 3.3314, "step": 39675 }, { "epoch": 2.696018480771844, "grad_norm": 1.780828595161438, "learning_rate": 0.0006631165919282512, "loss": 3.4063, "step": 39680 }, { "epoch": 2.6963582008425058, "grad_norm": 2.332409381866455, "learning_rate": 0.0006630741269194184, "loss": 3.4262, "step": 39685 }, { "epoch": 2.6966979209131674, "grad_norm": 2.26332688331604, "learning_rate": 0.0006630316619105857, "loss": 3.5045, "step": 39690 }, { "epoch": 2.6970376409838295, "grad_norm": 1.6983845233917236, "learning_rate": 0.0006629891969017529, "loss": 3.4561, "step": 39695 }, { "epoch": 2.697377361054491, "grad_norm": 1.6372959613800049, "learning_rate": 0.0006629467318929202, "loss": 3.5988, "step": 39700 }, { "epoch": 2.6977170811251527, "grad_norm": 1.5651459693908691, "learning_rate": 0.0006629042668840876, "loss": 3.3959, "step": 39705 }, { "epoch": 2.698056801195815, "grad_norm": 1.9111522436141968, "learning_rate": 0.0006628618018752548, "loss": 3.5304, "step": 39710 }, { "epoch": 2.6983965212664764, "grad_norm": 2.212484121322632, "learning_rate": 0.0006628193368664221, "loss": 3.4438, "step": 39715 }, { "epoch": 2.698736241337138, "grad_norm": 2.0299084186553955, "learning_rate": 0.0006627768718575894, "loss": 3.6918, "step": 39720 }, { "epoch": 2.6990759614078, "grad_norm": 1.6476975679397583, "learning_rate": 0.0006627344068487566, "loss": 3.4845, "step": 39725 }, { "epoch": 2.699415681478462, "grad_norm": 1.484178900718689, "learning_rate": 0.0006626919418399238, "loss": 3.315, "step": 39730 }, { "epoch": 2.6997554015491234, "grad_norm": 2.4020233154296875, "learning_rate": 0.0006626494768310912, "loss": 3.4863, "step": 39735 }, { "epoch": 2.7000951216197855, "grad_norm": 1.8068889379501343, "learning_rate": 0.0006626070118222585, "loss": 3.341, "step": 39740 }, { "epoch": 2.700434841690447, "grad_norm": 1.8258399963378906, "learning_rate": 0.0006625645468134257, "loss": 3.5437, "step": 39745 }, { "epoch": 2.7007745617611087, "grad_norm": 2.028590202331543, "learning_rate": 0.0006625220818045931, "loss": 3.5481, "step": 39750 }, { "epoch": 2.701114281831771, "grad_norm": 1.4880434274673462, "learning_rate": 0.0006624796167957603, "loss": 3.4859, "step": 39755 }, { "epoch": 2.7014540019024325, "grad_norm": 2.1121561527252197, "learning_rate": 0.0006624371517869275, "loss": 3.5676, "step": 39760 }, { "epoch": 2.701793721973094, "grad_norm": 1.7903757095336914, "learning_rate": 0.0006623946867780949, "loss": 3.5117, "step": 39765 }, { "epoch": 2.702133442043756, "grad_norm": 2.0107343196868896, "learning_rate": 0.0006623522217692621, "loss": 3.4662, "step": 39770 }, { "epoch": 2.702473162114418, "grad_norm": 1.8070259094238281, "learning_rate": 0.0006623097567604294, "loss": 3.3704, "step": 39775 }, { "epoch": 2.7028128821850794, "grad_norm": 2.2967491149902344, "learning_rate": 0.0006622672917515968, "loss": 3.2421, "step": 39780 }, { "epoch": 2.7031526022557415, "grad_norm": 2.1452975273132324, "learning_rate": 0.000662224826742764, "loss": 3.4092, "step": 39785 }, { "epoch": 2.703492322326403, "grad_norm": 2.0922863483428955, "learning_rate": 0.0006621823617339312, "loss": 3.593, "step": 39790 }, { "epoch": 2.7038320423970648, "grad_norm": 2.7233338356018066, "learning_rate": 0.0006621398967250985, "loss": 3.4561, "step": 39795 }, { "epoch": 2.704171762467727, "grad_norm": 2.7892954349517822, "learning_rate": 0.0006620974317162658, "loss": 3.5078, "step": 39800 }, { "epoch": 2.7045114825383885, "grad_norm": 2.3007264137268066, "learning_rate": 0.000662054966707433, "loss": 3.4598, "step": 39805 }, { "epoch": 2.70485120260905, "grad_norm": 1.827209711074829, "learning_rate": 0.0006620125016986004, "loss": 3.4727, "step": 39810 }, { "epoch": 2.705190922679712, "grad_norm": 1.8741480112075806, "learning_rate": 0.0006619700366897677, "loss": 3.3828, "step": 39815 }, { "epoch": 2.705530642750374, "grad_norm": 1.9127705097198486, "learning_rate": 0.0006619275716809349, "loss": 3.4973, "step": 39820 }, { "epoch": 2.7058703628210354, "grad_norm": 1.831343412399292, "learning_rate": 0.0006618851066721022, "loss": 3.1764, "step": 39825 }, { "epoch": 2.7062100828916975, "grad_norm": 1.9703097343444824, "learning_rate": 0.0006618426416632694, "loss": 3.4512, "step": 39830 }, { "epoch": 2.706549802962359, "grad_norm": 2.0712292194366455, "learning_rate": 0.0006618001766544367, "loss": 3.5841, "step": 39835 }, { "epoch": 2.7068895230330208, "grad_norm": 1.706813931465149, "learning_rate": 0.000661757711645604, "loss": 3.3133, "step": 39840 }, { "epoch": 2.7072292431036824, "grad_norm": 1.4271636009216309, "learning_rate": 0.0006617152466367713, "loss": 3.3783, "step": 39845 }, { "epoch": 2.7075689631743445, "grad_norm": 1.8416014909744263, "learning_rate": 0.0006616727816279387, "loss": 3.5389, "step": 39850 }, { "epoch": 2.707908683245006, "grad_norm": 2.0566210746765137, "learning_rate": 0.0006616303166191059, "loss": 3.4922, "step": 39855 }, { "epoch": 2.7082484033156677, "grad_norm": 2.084573745727539, "learning_rate": 0.0006615878516102731, "loss": 3.4696, "step": 39860 }, { "epoch": 2.70858812338633, "grad_norm": 1.9691557884216309, "learning_rate": 0.0006615453866014405, "loss": 3.553, "step": 39865 }, { "epoch": 2.7089278434569914, "grad_norm": 2.240605592727661, "learning_rate": 0.0006615029215926077, "loss": 3.5972, "step": 39870 }, { "epoch": 2.709267563527653, "grad_norm": 1.8343218564987183, "learning_rate": 0.0006614604565837749, "loss": 3.3491, "step": 39875 }, { "epoch": 2.709607283598315, "grad_norm": 2.781374454498291, "learning_rate": 0.0006614179915749424, "loss": 3.5229, "step": 39880 }, { "epoch": 2.709947003668977, "grad_norm": 1.6926151514053345, "learning_rate": 0.0006613755265661096, "loss": 3.6202, "step": 39885 }, { "epoch": 2.7102867237396384, "grad_norm": 1.5569254159927368, "learning_rate": 0.0006613330615572768, "loss": 3.5398, "step": 39890 }, { "epoch": 2.7106264438103, "grad_norm": 1.781578779220581, "learning_rate": 0.0006612905965484441, "loss": 3.5745, "step": 39895 }, { "epoch": 2.710966163880962, "grad_norm": 1.9110573530197144, "learning_rate": 0.0006612481315396114, "loss": 3.4274, "step": 39900 }, { "epoch": 2.7113058839516238, "grad_norm": 2.1621553897857666, "learning_rate": 0.0006612056665307786, "loss": 3.5435, "step": 39905 }, { "epoch": 2.7116456040222854, "grad_norm": 1.83211350440979, "learning_rate": 0.0006611632015219459, "loss": 3.4948, "step": 39910 }, { "epoch": 2.7119853240929475, "grad_norm": 2.021904230117798, "learning_rate": 0.0006611207365131133, "loss": 3.4396, "step": 39915 }, { "epoch": 2.712325044163609, "grad_norm": 1.5927156209945679, "learning_rate": 0.0006610782715042805, "loss": 3.5935, "step": 39920 }, { "epoch": 2.7126647642342707, "grad_norm": 1.6614960432052612, "learning_rate": 0.0006610358064954478, "loss": 3.5789, "step": 39925 }, { "epoch": 2.713004484304933, "grad_norm": 2.057523250579834, "learning_rate": 0.000660993341486615, "loss": 3.6655, "step": 39930 }, { "epoch": 2.7133442043755944, "grad_norm": 2.238888740539551, "learning_rate": 0.0006609508764777823, "loss": 3.5463, "step": 39935 }, { "epoch": 2.713683924446256, "grad_norm": 2.0381736755371094, "learning_rate": 0.0006609084114689496, "loss": 3.5049, "step": 39940 }, { "epoch": 2.714023644516918, "grad_norm": 1.7980974912643433, "learning_rate": 0.0006608659464601168, "loss": 3.2667, "step": 39945 }, { "epoch": 2.7143633645875798, "grad_norm": 2.4501655101776123, "learning_rate": 0.0006608234814512842, "loss": 3.303, "step": 39950 }, { "epoch": 2.7147030846582414, "grad_norm": 1.8926990032196045, "learning_rate": 0.0006607810164424515, "loss": 3.5286, "step": 39955 }, { "epoch": 2.7150428047289035, "grad_norm": 1.7383252382278442, "learning_rate": 0.0006607385514336187, "loss": 3.5887, "step": 39960 }, { "epoch": 2.715382524799565, "grad_norm": 1.949302315711975, "learning_rate": 0.000660696086424786, "loss": 3.5132, "step": 39965 }, { "epoch": 2.7157222448702267, "grad_norm": 1.7654976844787598, "learning_rate": 0.0006606536214159533, "loss": 3.2781, "step": 39970 }, { "epoch": 2.716061964940889, "grad_norm": 1.8645261526107788, "learning_rate": 0.0006606111564071205, "loss": 3.5813, "step": 39975 }, { "epoch": 2.7164016850115504, "grad_norm": 2.2463066577911377, "learning_rate": 0.0006605686913982877, "loss": 3.4422, "step": 39980 }, { "epoch": 2.716741405082212, "grad_norm": 1.9726874828338623, "learning_rate": 0.0006605262263894552, "loss": 3.3792, "step": 39985 }, { "epoch": 2.717081125152874, "grad_norm": 1.75690758228302, "learning_rate": 0.0006604837613806224, "loss": 3.5247, "step": 39990 }, { "epoch": 2.7174208452235358, "grad_norm": 1.7678804397583008, "learning_rate": 0.0006604412963717896, "loss": 3.5415, "step": 39995 }, { "epoch": 2.7177605652941974, "grad_norm": 1.6862913370132446, "learning_rate": 0.000660398831362957, "loss": 3.5844, "step": 40000 }, { "epoch": 2.7181002853648595, "grad_norm": 2.221518039703369, "learning_rate": 0.0006603563663541242, "loss": 3.5919, "step": 40005 }, { "epoch": 2.718440005435521, "grad_norm": 2.1429169178009033, "learning_rate": 0.0006603139013452914, "loss": 3.4135, "step": 40010 }, { "epoch": 2.7187797255061827, "grad_norm": 1.7880196571350098, "learning_rate": 0.0006602714363364589, "loss": 3.3338, "step": 40015 }, { "epoch": 2.719119445576845, "grad_norm": 2.1877262592315674, "learning_rate": 0.0006602289713276261, "loss": 3.5436, "step": 40020 }, { "epoch": 2.7194591656475064, "grad_norm": 1.4858880043029785, "learning_rate": 0.0006601865063187933, "loss": 3.6115, "step": 40025 }, { "epoch": 2.719798885718168, "grad_norm": 2.3659253120422363, "learning_rate": 0.0006601440413099606, "loss": 3.5342, "step": 40030 }, { "epoch": 2.72013860578883, "grad_norm": 1.709861159324646, "learning_rate": 0.0006601015763011279, "loss": 3.2947, "step": 40035 }, { "epoch": 2.720478325859492, "grad_norm": 2.261918544769287, "learning_rate": 0.0006600591112922951, "loss": 3.6551, "step": 40040 }, { "epoch": 2.7208180459301534, "grad_norm": 1.8988417387008667, "learning_rate": 0.0006600166462834624, "loss": 3.4447, "step": 40045 }, { "epoch": 2.7211577660008155, "grad_norm": 1.7106353044509888, "learning_rate": 0.0006599741812746298, "loss": 3.3644, "step": 40050 }, { "epoch": 2.721497486071477, "grad_norm": 2.253324270248413, "learning_rate": 0.000659931716265797, "loss": 3.5215, "step": 40055 }, { "epoch": 2.7218372061421388, "grad_norm": 2.669940710067749, "learning_rate": 0.0006598892512569643, "loss": 3.3555, "step": 40060 }, { "epoch": 2.722176926212801, "grad_norm": 1.9949671030044556, "learning_rate": 0.0006598467862481316, "loss": 3.5112, "step": 40065 }, { "epoch": 2.7225166462834625, "grad_norm": 1.8455257415771484, "learning_rate": 0.0006598043212392988, "loss": 3.6323, "step": 40070 }, { "epoch": 2.722856366354124, "grad_norm": 2.1170926094055176, "learning_rate": 0.0006597618562304661, "loss": 3.5978, "step": 40075 }, { "epoch": 2.723196086424786, "grad_norm": 2.5665433406829834, "learning_rate": 0.0006597193912216333, "loss": 3.4175, "step": 40080 }, { "epoch": 2.723535806495448, "grad_norm": 1.952949047088623, "learning_rate": 0.0006596769262128007, "loss": 3.6998, "step": 40085 }, { "epoch": 2.7238755265661094, "grad_norm": 2.495952844619751, "learning_rate": 0.000659634461203968, "loss": 3.632, "step": 40090 }, { "epoch": 2.7242152466367715, "grad_norm": 1.5944526195526123, "learning_rate": 0.0006595919961951352, "loss": 3.5806, "step": 40095 }, { "epoch": 2.724554966707433, "grad_norm": 2.0230371952056885, "learning_rate": 0.0006595495311863025, "loss": 3.5284, "step": 40100 }, { "epoch": 2.7248946867780948, "grad_norm": 1.928767204284668, "learning_rate": 0.0006595070661774698, "loss": 3.5757, "step": 40105 }, { "epoch": 2.725234406848757, "grad_norm": 2.628840923309326, "learning_rate": 0.000659464601168637, "loss": 3.6693, "step": 40110 }, { "epoch": 2.7255741269194185, "grad_norm": 1.9099262952804565, "learning_rate": 0.0006594221361598042, "loss": 3.5066, "step": 40115 }, { "epoch": 2.72591384699008, "grad_norm": 2.01987886428833, "learning_rate": 0.0006593796711509717, "loss": 3.3637, "step": 40120 }, { "epoch": 2.726253567060742, "grad_norm": 2.5901875495910645, "learning_rate": 0.0006593372061421389, "loss": 3.4149, "step": 40125 }, { "epoch": 2.726593287131404, "grad_norm": 3.660163402557373, "learning_rate": 0.0006592947411333061, "loss": 3.6726, "step": 40130 }, { "epoch": 2.7269330072020654, "grad_norm": 1.9830665588378906, "learning_rate": 0.0006592522761244735, "loss": 3.3942, "step": 40135 }, { "epoch": 2.7272727272727275, "grad_norm": 1.9154908657073975, "learning_rate": 0.0006592098111156407, "loss": 3.3251, "step": 40140 }, { "epoch": 2.727612447343389, "grad_norm": 2.1156132221221924, "learning_rate": 0.0006591673461068079, "loss": 3.4524, "step": 40145 }, { "epoch": 2.7279521674140508, "grad_norm": 2.155216693878174, "learning_rate": 0.0006591248810979753, "loss": 3.5098, "step": 40150 }, { "epoch": 2.728291887484713, "grad_norm": 1.476972222328186, "learning_rate": 0.0006590824160891426, "loss": 3.4411, "step": 40155 }, { "epoch": 2.7286316075553745, "grad_norm": 1.6644856929779053, "learning_rate": 0.0006590399510803098, "loss": 3.449, "step": 40160 }, { "epoch": 2.728971327626036, "grad_norm": 2.274945020675659, "learning_rate": 0.0006589974860714772, "loss": 3.6238, "step": 40165 }, { "epoch": 2.729311047696698, "grad_norm": 2.6398324966430664, "learning_rate": 0.0006589550210626444, "loss": 3.5193, "step": 40170 }, { "epoch": 2.72965076776736, "grad_norm": 1.6819229125976562, "learning_rate": 0.0006589125560538116, "loss": 3.5175, "step": 40175 }, { "epoch": 2.7299904878380215, "grad_norm": 2.3946032524108887, "learning_rate": 0.0006588700910449789, "loss": 3.7196, "step": 40180 }, { "epoch": 2.730330207908683, "grad_norm": 1.6468273401260376, "learning_rate": 0.0006588276260361462, "loss": 3.5364, "step": 40185 }, { "epoch": 2.730669927979345, "grad_norm": 1.7972043752670288, "learning_rate": 0.0006587851610273136, "loss": 3.5946, "step": 40190 }, { "epoch": 2.731009648050007, "grad_norm": 1.6913700103759766, "learning_rate": 0.0006587426960184808, "loss": 3.5604, "step": 40195 }, { "epoch": 2.7313493681206684, "grad_norm": 1.975653052330017, "learning_rate": 0.0006587002310096481, "loss": 3.74, "step": 40200 }, { "epoch": 2.7316890881913305, "grad_norm": 2.0430145263671875, "learning_rate": 0.0006586577660008154, "loss": 3.4048, "step": 40205 }, { "epoch": 2.732028808261992, "grad_norm": 1.6372450590133667, "learning_rate": 0.0006586153009919826, "loss": 3.5492, "step": 40210 }, { "epoch": 2.7323685283326538, "grad_norm": 2.1698741912841797, "learning_rate": 0.0006585728359831498, "loss": 3.6703, "step": 40215 }, { "epoch": 2.732708248403316, "grad_norm": 2.0066466331481934, "learning_rate": 0.0006585303709743172, "loss": 3.4294, "step": 40220 }, { "epoch": 2.7330479684739775, "grad_norm": 2.203777313232422, "learning_rate": 0.0006584879059654845, "loss": 3.5058, "step": 40225 }, { "epoch": 2.733387688544639, "grad_norm": 1.9959038496017456, "learning_rate": 0.0006584454409566517, "loss": 3.36, "step": 40230 }, { "epoch": 2.7337274086153007, "grad_norm": 2.4360430240631104, "learning_rate": 0.0006584029759478191, "loss": 3.5395, "step": 40235 }, { "epoch": 2.734067128685963, "grad_norm": 1.8360987901687622, "learning_rate": 0.0006583605109389863, "loss": 3.5502, "step": 40240 }, { "epoch": 2.7344068487566244, "grad_norm": 2.042752504348755, "learning_rate": 0.0006583180459301535, "loss": 3.4615, "step": 40245 }, { "epoch": 2.734746568827286, "grad_norm": 2.312889814376831, "learning_rate": 0.0006582755809213209, "loss": 3.617, "step": 40250 }, { "epoch": 2.735086288897948, "grad_norm": 2.152071475982666, "learning_rate": 0.0006582331159124881, "loss": 3.7172, "step": 40255 }, { "epoch": 2.7354260089686098, "grad_norm": 1.4226679801940918, "learning_rate": 0.0006581906509036554, "loss": 3.4852, "step": 40260 }, { "epoch": 2.7357657290392714, "grad_norm": 1.9672636985778809, "learning_rate": 0.0006581481858948228, "loss": 3.406, "step": 40265 }, { "epoch": 2.7361054491099335, "grad_norm": 1.69137442111969, "learning_rate": 0.00065810572088599, "loss": 3.5332, "step": 40270 }, { "epoch": 2.736445169180595, "grad_norm": 1.9450730085372925, "learning_rate": 0.0006580632558771572, "loss": 3.2778, "step": 40275 }, { "epoch": 2.7367848892512567, "grad_norm": 2.0837697982788086, "learning_rate": 0.0006580207908683245, "loss": 3.6251, "step": 40280 }, { "epoch": 2.737124609321919, "grad_norm": 2.0163514614105225, "learning_rate": 0.0006579783258594918, "loss": 3.4069, "step": 40285 }, { "epoch": 2.7374643293925804, "grad_norm": 1.9067981243133545, "learning_rate": 0.000657935860850659, "loss": 3.5468, "step": 40290 }, { "epoch": 2.737804049463242, "grad_norm": 1.7955492734909058, "learning_rate": 0.0006578933958418264, "loss": 3.3874, "step": 40295 }, { "epoch": 2.738143769533904, "grad_norm": 2.0632293224334717, "learning_rate": 0.0006578509308329937, "loss": 3.3498, "step": 40300 }, { "epoch": 2.738483489604566, "grad_norm": 2.355713367462158, "learning_rate": 0.0006578084658241609, "loss": 3.4452, "step": 40305 }, { "epoch": 2.7388232096752274, "grad_norm": 1.7413322925567627, "learning_rate": 0.0006577660008153282, "loss": 3.6253, "step": 40310 }, { "epoch": 2.7391629297458895, "grad_norm": 1.9102602005004883, "learning_rate": 0.0006577235358064954, "loss": 3.5918, "step": 40315 }, { "epoch": 2.739502649816551, "grad_norm": 2.102555990219116, "learning_rate": 0.0006576810707976627, "loss": 3.1852, "step": 40320 }, { "epoch": 2.7398423698872127, "grad_norm": 1.754690170288086, "learning_rate": 0.00065763860578883, "loss": 3.5818, "step": 40325 }, { "epoch": 2.740182089957875, "grad_norm": 2.0684430599212646, "learning_rate": 0.0006575961407799973, "loss": 3.6117, "step": 40330 }, { "epoch": 2.7405218100285365, "grad_norm": 1.6025750637054443, "learning_rate": 0.0006575536757711646, "loss": 3.5375, "step": 40335 }, { "epoch": 2.740861530099198, "grad_norm": 2.0043702125549316, "learning_rate": 0.0006575112107623319, "loss": 3.4521, "step": 40340 }, { "epoch": 2.74120125016986, "grad_norm": 2.1909685134887695, "learning_rate": 0.0006574687457534991, "loss": 3.5626, "step": 40345 }, { "epoch": 2.741540970240522, "grad_norm": 2.5576589107513428, "learning_rate": 0.0006574262807446664, "loss": 3.0704, "step": 40350 }, { "epoch": 2.7418806903111834, "grad_norm": 1.9451247453689575, "learning_rate": 0.0006573838157358337, "loss": 3.3698, "step": 40355 }, { "epoch": 2.7422204103818455, "grad_norm": 1.7577756643295288, "learning_rate": 0.0006573413507270009, "loss": 3.5744, "step": 40360 }, { "epoch": 2.742560130452507, "grad_norm": 2.2025532722473145, "learning_rate": 0.0006572988857181682, "loss": 3.5791, "step": 40365 }, { "epoch": 2.7428998505231688, "grad_norm": 1.7638927698135376, "learning_rate": 0.0006572564207093356, "loss": 3.5978, "step": 40370 }, { "epoch": 2.743239570593831, "grad_norm": 2.4217023849487305, "learning_rate": 0.0006572139557005028, "loss": 3.5361, "step": 40375 }, { "epoch": 2.7435792906644925, "grad_norm": 1.7936484813690186, "learning_rate": 0.00065717149069167, "loss": 3.5876, "step": 40380 }, { "epoch": 2.743919010735154, "grad_norm": 2.1578776836395264, "learning_rate": 0.0006571290256828374, "loss": 3.548, "step": 40385 }, { "epoch": 2.744258730805816, "grad_norm": 1.9011746644973755, "learning_rate": 0.0006570865606740046, "loss": 3.642, "step": 40390 }, { "epoch": 2.744598450876478, "grad_norm": 1.860335111618042, "learning_rate": 0.0006570440956651718, "loss": 3.4463, "step": 40395 }, { "epoch": 2.7449381709471394, "grad_norm": 2.0699808597564697, "learning_rate": 0.0006570016306563393, "loss": 3.3102, "step": 40400 }, { "epoch": 2.7452778910178015, "grad_norm": 1.8411219120025635, "learning_rate": 0.0006569591656475065, "loss": 3.5277, "step": 40405 }, { "epoch": 2.745617611088463, "grad_norm": 1.6912727355957031, "learning_rate": 0.0006569167006386737, "loss": 3.4938, "step": 40410 }, { "epoch": 2.7459573311591248, "grad_norm": 1.6790907382965088, "learning_rate": 0.000656874235629841, "loss": 3.4744, "step": 40415 }, { "epoch": 2.746297051229787, "grad_norm": 1.5843346118927002, "learning_rate": 0.0006568317706210083, "loss": 3.5929, "step": 40420 }, { "epoch": 2.7466367713004485, "grad_norm": 1.5829716920852661, "learning_rate": 0.0006567893056121755, "loss": 3.6276, "step": 40425 }, { "epoch": 2.74697649137111, "grad_norm": 1.588818073272705, "learning_rate": 0.0006567468406033428, "loss": 3.2528, "step": 40430 }, { "epoch": 2.747316211441772, "grad_norm": 1.7689847946166992, "learning_rate": 0.0006567043755945102, "loss": 3.4047, "step": 40435 }, { "epoch": 2.747655931512434, "grad_norm": 2.048717975616455, "learning_rate": 0.0006566619105856774, "loss": 3.6467, "step": 40440 }, { "epoch": 2.7479956515830954, "grad_norm": 1.7509238719940186, "learning_rate": 0.0006566194455768447, "loss": 3.6916, "step": 40445 }, { "epoch": 2.7483353716537575, "grad_norm": 2.217118501663208, "learning_rate": 0.000656576980568012, "loss": 3.5489, "step": 40450 }, { "epoch": 2.748675091724419, "grad_norm": 1.5148423910140991, "learning_rate": 0.0006565345155591792, "loss": 3.4423, "step": 40455 }, { "epoch": 2.749014811795081, "grad_norm": 2.3426761627197266, "learning_rate": 0.0006564920505503465, "loss": 3.6859, "step": 40460 }, { "epoch": 2.749354531865743, "grad_norm": 2.1094391345977783, "learning_rate": 0.0006564495855415137, "loss": 3.527, "step": 40465 }, { "epoch": 2.7496942519364045, "grad_norm": 2.637468099594116, "learning_rate": 0.0006564071205326811, "loss": 3.1283, "step": 40470 }, { "epoch": 2.750033972007066, "grad_norm": 1.7979570627212524, "learning_rate": 0.0006563646555238484, "loss": 3.6091, "step": 40475 }, { "epoch": 2.750373692077728, "grad_norm": 2.033876895904541, "learning_rate": 0.0006563221905150156, "loss": 3.5644, "step": 40480 }, { "epoch": 2.75071341214839, "grad_norm": 1.719833493232727, "learning_rate": 0.0006562797255061829, "loss": 3.5681, "step": 40485 }, { "epoch": 2.7510531322190515, "grad_norm": 1.6910865306854248, "learning_rate": 0.0006562372604973502, "loss": 3.5337, "step": 40490 }, { "epoch": 2.7513928522897135, "grad_norm": 1.7561553716659546, "learning_rate": 0.0006561947954885174, "loss": 3.537, "step": 40495 }, { "epoch": 2.751732572360375, "grad_norm": 1.8711553812026978, "learning_rate": 0.0006561523304796846, "loss": 3.5783, "step": 40500 }, { "epoch": 2.752072292431037, "grad_norm": 1.8557939529418945, "learning_rate": 0.0006561098654708521, "loss": 3.314, "step": 40505 }, { "epoch": 2.752412012501699, "grad_norm": 1.5678855180740356, "learning_rate": 0.0006560674004620193, "loss": 3.5379, "step": 40510 }, { "epoch": 2.7527517325723605, "grad_norm": 1.9937278032302856, "learning_rate": 0.0006560249354531865, "loss": 3.5261, "step": 40515 }, { "epoch": 2.753091452643022, "grad_norm": 2.3831162452697754, "learning_rate": 0.0006559824704443539, "loss": 3.3337, "step": 40520 }, { "epoch": 2.7534311727136838, "grad_norm": 2.250974655151367, "learning_rate": 0.0006559400054355211, "loss": 3.4905, "step": 40525 }, { "epoch": 2.753770892784346, "grad_norm": 1.6495634317398071, "learning_rate": 0.0006558975404266884, "loss": 3.4345, "step": 40530 }, { "epoch": 2.7541106128550075, "grad_norm": 1.8466109037399292, "learning_rate": 0.0006558550754178557, "loss": 3.634, "step": 40535 }, { "epoch": 2.754450332925669, "grad_norm": 2.0139451026916504, "learning_rate": 0.000655812610409023, "loss": 3.4869, "step": 40540 }, { "epoch": 2.754790052996331, "grad_norm": 1.991456151008606, "learning_rate": 0.0006557701454001903, "loss": 3.502, "step": 40545 }, { "epoch": 2.755129773066993, "grad_norm": 2.1388397216796875, "learning_rate": 0.0006557276803913576, "loss": 3.6761, "step": 40550 }, { "epoch": 2.7554694931376544, "grad_norm": 2.2491257190704346, "learning_rate": 0.0006556852153825248, "loss": 3.6217, "step": 40555 }, { "epoch": 2.7558092132083165, "grad_norm": 1.7051012516021729, "learning_rate": 0.0006556512433754587, "loss": 3.4256, "step": 40560 }, { "epoch": 2.756148933278978, "grad_norm": 1.7996258735656738, "learning_rate": 0.0006556087783666259, "loss": 3.4352, "step": 40565 }, { "epoch": 2.7564886533496398, "grad_norm": 1.5967693328857422, "learning_rate": 0.0006555663133577932, "loss": 3.2118, "step": 40570 }, { "epoch": 2.7568283734203014, "grad_norm": 2.2952442169189453, "learning_rate": 0.0006555238483489605, "loss": 3.4653, "step": 40575 }, { "epoch": 2.7571680934909635, "grad_norm": 2.2270700931549072, "learning_rate": 0.0006554813833401277, "loss": 3.4179, "step": 40580 }, { "epoch": 2.757507813561625, "grad_norm": 2.074157238006592, "learning_rate": 0.000655438918331295, "loss": 3.3712, "step": 40585 }, { "epoch": 2.7578475336322867, "grad_norm": 2.093081474304199, "learning_rate": 0.0006553964533224623, "loss": 3.4273, "step": 40590 }, { "epoch": 2.758187253702949, "grad_norm": 2.3874592781066895, "learning_rate": 0.0006553539883136296, "loss": 3.5997, "step": 40595 }, { "epoch": 2.7585269737736104, "grad_norm": 1.748595952987671, "learning_rate": 0.0006553115233047968, "loss": 3.6324, "step": 40600 }, { "epoch": 2.758866693844272, "grad_norm": 1.559563159942627, "learning_rate": 0.0006552690582959642, "loss": 3.6176, "step": 40605 }, { "epoch": 2.759206413914934, "grad_norm": 2.3246185779571533, "learning_rate": 0.0006552265932871314, "loss": 3.5405, "step": 40610 }, { "epoch": 2.759546133985596, "grad_norm": 1.8244537115097046, "learning_rate": 0.0006551841282782986, "loss": 3.144, "step": 40615 }, { "epoch": 2.7598858540562574, "grad_norm": 1.8872584104537964, "learning_rate": 0.000655141663269466, "loss": 3.5922, "step": 40620 }, { "epoch": 2.7602255741269195, "grad_norm": 1.7978981733322144, "learning_rate": 0.0006550991982606332, "loss": 3.4506, "step": 40625 }, { "epoch": 2.760565294197581, "grad_norm": 2.0642945766448975, "learning_rate": 0.0006550567332518005, "loss": 3.4648, "step": 40630 }, { "epoch": 2.7609050142682428, "grad_norm": 1.7835726737976074, "learning_rate": 0.0006550142682429679, "loss": 3.4312, "step": 40635 }, { "epoch": 2.761244734338905, "grad_norm": 1.4047499895095825, "learning_rate": 0.0006549718032341351, "loss": 3.0877, "step": 40640 }, { "epoch": 2.7615844544095665, "grad_norm": 1.9770114421844482, "learning_rate": 0.0006549293382253023, "loss": 3.6468, "step": 40645 }, { "epoch": 2.761924174480228, "grad_norm": 2.0990781784057617, "learning_rate": 0.0006548868732164696, "loss": 3.4904, "step": 40650 }, { "epoch": 2.76226389455089, "grad_norm": 1.5803231000900269, "learning_rate": 0.0006548444082076369, "loss": 3.4695, "step": 40655 }, { "epoch": 2.762603614621552, "grad_norm": 2.1712141036987305, "learning_rate": 0.0006548019431988041, "loss": 3.5404, "step": 40660 }, { "epoch": 2.7629433346922134, "grad_norm": 1.8774493932724, "learning_rate": 0.0006547594781899715, "loss": 3.4641, "step": 40665 }, { "epoch": 2.7632830547628755, "grad_norm": 1.7349170446395874, "learning_rate": 0.0006547170131811388, "loss": 3.4835, "step": 40670 }, { "epoch": 2.763622774833537, "grad_norm": 1.9875199794769287, "learning_rate": 0.000654674548172306, "loss": 3.4175, "step": 40675 }, { "epoch": 2.7639624949041988, "grad_norm": 1.604730248451233, "learning_rate": 0.0006546320831634733, "loss": 3.3268, "step": 40680 }, { "epoch": 2.764302214974861, "grad_norm": 4.8443145751953125, "learning_rate": 0.0006545896181546406, "loss": 3.6537, "step": 40685 }, { "epoch": 2.7646419350455225, "grad_norm": 2.158318281173706, "learning_rate": 0.0006545471531458078, "loss": 3.6664, "step": 40690 }, { "epoch": 2.764981655116184, "grad_norm": 2.0694456100463867, "learning_rate": 0.0006545046881369751, "loss": 3.4308, "step": 40695 }, { "epoch": 2.765321375186846, "grad_norm": 2.039278268814087, "learning_rate": 0.0006544622231281425, "loss": 3.3765, "step": 40700 }, { "epoch": 2.765661095257508, "grad_norm": 1.6917670965194702, "learning_rate": 0.0006544197581193097, "loss": 3.5274, "step": 40705 }, { "epoch": 2.7660008153281694, "grad_norm": 2.0979695320129395, "learning_rate": 0.000654377293110477, "loss": 3.4691, "step": 40710 }, { "epoch": 2.7663405353988315, "grad_norm": 1.751339316368103, "learning_rate": 0.0006543348281016442, "loss": 3.4705, "step": 40715 }, { "epoch": 2.766680255469493, "grad_norm": 2.2849655151367188, "learning_rate": 0.0006542923630928115, "loss": 3.535, "step": 40720 }, { "epoch": 2.7670199755401548, "grad_norm": 2.245398759841919, "learning_rate": 0.0006542498980839788, "loss": 3.3107, "step": 40725 }, { "epoch": 2.767359695610817, "grad_norm": 2.226125955581665, "learning_rate": 0.000654207433075146, "loss": 3.4255, "step": 40730 }, { "epoch": 2.7676994156814785, "grad_norm": 1.4468274116516113, "learning_rate": 0.0006541649680663135, "loss": 3.5212, "step": 40735 }, { "epoch": 2.76803913575214, "grad_norm": 1.8293375968933105, "learning_rate": 0.0006541225030574807, "loss": 3.4793, "step": 40740 }, { "epoch": 2.768378855822802, "grad_norm": 1.5376189947128296, "learning_rate": 0.0006540800380486479, "loss": 3.4787, "step": 40745 }, { "epoch": 2.768718575893464, "grad_norm": 2.104729175567627, "learning_rate": 0.0006540375730398153, "loss": 3.5727, "step": 40750 }, { "epoch": 2.7690582959641254, "grad_norm": 2.212388515472412, "learning_rate": 0.0006539951080309825, "loss": 3.404, "step": 40755 }, { "epoch": 2.7693980160347875, "grad_norm": 2.142744541168213, "learning_rate": 0.0006539526430221497, "loss": 3.478, "step": 40760 }, { "epoch": 2.769737736105449, "grad_norm": 1.7796475887298584, "learning_rate": 0.0006539101780133171, "loss": 3.5173, "step": 40765 }, { "epoch": 2.770077456176111, "grad_norm": 1.877986192703247, "learning_rate": 0.0006538677130044844, "loss": 3.5431, "step": 40770 }, { "epoch": 2.770417176246773, "grad_norm": 2.2779250144958496, "learning_rate": 0.0006538252479956516, "loss": 3.7176, "step": 40775 }, { "epoch": 2.7707568963174345, "grad_norm": 2.1196300983428955, "learning_rate": 0.0006537827829868189, "loss": 3.5073, "step": 40780 }, { "epoch": 2.771096616388096, "grad_norm": 2.0954480171203613, "learning_rate": 0.0006537403179779862, "loss": 3.4652, "step": 40785 }, { "epoch": 2.771436336458758, "grad_norm": 1.5193485021591187, "learning_rate": 0.0006536978529691534, "loss": 3.5184, "step": 40790 }, { "epoch": 2.77177605652942, "grad_norm": 1.8255221843719482, "learning_rate": 0.0006536553879603207, "loss": 3.6335, "step": 40795 }, { "epoch": 2.7721157766000815, "grad_norm": 1.805977702140808, "learning_rate": 0.000653612922951488, "loss": 3.3939, "step": 40800 }, { "epoch": 2.7724554966707435, "grad_norm": 2.0888586044311523, "learning_rate": 0.0006535704579426553, "loss": 3.3571, "step": 40805 }, { "epoch": 2.772795216741405, "grad_norm": 1.7741544246673584, "learning_rate": 0.0006535279929338226, "loss": 3.325, "step": 40810 }, { "epoch": 2.773134936812067, "grad_norm": 2.000915765762329, "learning_rate": 0.0006534855279249898, "loss": 3.6961, "step": 40815 }, { "epoch": 2.773474656882729, "grad_norm": 1.6384013891220093, "learning_rate": 0.0006534430629161571, "loss": 3.6378, "step": 40820 }, { "epoch": 2.7738143769533905, "grad_norm": 1.9900325536727905, "learning_rate": 0.0006534005979073244, "loss": 3.3994, "step": 40825 }, { "epoch": 2.774154097024052, "grad_norm": 2.6277196407318115, "learning_rate": 0.0006533581328984916, "loss": 3.3962, "step": 40830 }, { "epoch": 2.774493817094714, "grad_norm": 1.868296504020691, "learning_rate": 0.000653315667889659, "loss": 3.6147, "step": 40835 }, { "epoch": 2.774833537165376, "grad_norm": 1.6855266094207764, "learning_rate": 0.0006532732028808263, "loss": 3.6738, "step": 40840 }, { "epoch": 2.7751732572360375, "grad_norm": 1.8970952033996582, "learning_rate": 0.0006532307378719935, "loss": 3.6748, "step": 40845 }, { "epoch": 2.7755129773066995, "grad_norm": 2.0346362590789795, "learning_rate": 0.0006531882728631607, "loss": 3.3207, "step": 40850 }, { "epoch": 2.775852697377361, "grad_norm": 2.323904275894165, "learning_rate": 0.0006531458078543281, "loss": 3.3771, "step": 40855 }, { "epoch": 2.776192417448023, "grad_norm": 1.8224214315414429, "learning_rate": 0.0006531033428454953, "loss": 3.4405, "step": 40860 }, { "epoch": 2.7765321375186844, "grad_norm": 1.9839128255844116, "learning_rate": 0.0006530608778366625, "loss": 3.7259, "step": 40865 }, { "epoch": 2.7768718575893465, "grad_norm": 1.614876389503479, "learning_rate": 0.00065301841282783, "loss": 3.457, "step": 40870 }, { "epoch": 2.777211577660008, "grad_norm": 2.4341228008270264, "learning_rate": 0.0006529759478189972, "loss": 3.5018, "step": 40875 }, { "epoch": 2.7775512977306698, "grad_norm": 2.1174027919769287, "learning_rate": 0.0006529334828101644, "loss": 3.5298, "step": 40880 }, { "epoch": 2.777891017801332, "grad_norm": 2.650953531265259, "learning_rate": 0.0006528910178013318, "loss": 3.3615, "step": 40885 }, { "epoch": 2.7782307378719935, "grad_norm": 2.59495210647583, "learning_rate": 0.000652848552792499, "loss": 3.5406, "step": 40890 }, { "epoch": 2.778570457942655, "grad_norm": 1.8840477466583252, "learning_rate": 0.0006528060877836662, "loss": 3.6781, "step": 40895 }, { "epoch": 2.778910178013317, "grad_norm": 1.9357606172561646, "learning_rate": 0.0006527636227748335, "loss": 3.1914, "step": 40900 }, { "epoch": 2.779249898083979, "grad_norm": 1.8969565629959106, "learning_rate": 0.0006527211577660009, "loss": 3.6045, "step": 40905 }, { "epoch": 2.7795896181546405, "grad_norm": 1.6777567863464355, "learning_rate": 0.0006526786927571681, "loss": 3.4747, "step": 40910 }, { "epoch": 2.779929338225302, "grad_norm": 2.441063404083252, "learning_rate": 0.0006526362277483354, "loss": 3.5133, "step": 40915 }, { "epoch": 2.780269058295964, "grad_norm": 2.473067283630371, "learning_rate": 0.0006525937627395027, "loss": 3.2064, "step": 40920 }, { "epoch": 2.780608778366626, "grad_norm": 1.9255053997039795, "learning_rate": 0.0006525512977306699, "loss": 3.7212, "step": 40925 }, { "epoch": 2.7809484984372874, "grad_norm": 1.7716295719146729, "learning_rate": 0.0006525088327218372, "loss": 3.5101, "step": 40930 }, { "epoch": 2.7812882185079495, "grad_norm": 1.663835048675537, "learning_rate": 0.0006524663677130045, "loss": 3.5163, "step": 40935 }, { "epoch": 2.781627938578611, "grad_norm": 1.4619674682617188, "learning_rate": 0.0006524239027041718, "loss": 3.6009, "step": 40940 }, { "epoch": 2.7819676586492728, "grad_norm": 1.7396286725997925, "learning_rate": 0.0006523814376953391, "loss": 3.3644, "step": 40945 }, { "epoch": 2.782307378719935, "grad_norm": 2.7073047161102295, "learning_rate": 0.0006523389726865063, "loss": 3.6205, "step": 40950 }, { "epoch": 2.7826470987905965, "grad_norm": 1.9076470136642456, "learning_rate": 0.0006522965076776736, "loss": 3.5217, "step": 40955 }, { "epoch": 2.782986818861258, "grad_norm": 2.0810608863830566, "learning_rate": 0.0006522540426688409, "loss": 3.5883, "step": 40960 }, { "epoch": 2.78332653893192, "grad_norm": 2.817373752593994, "learning_rate": 0.0006522115776600081, "loss": 3.5029, "step": 40965 }, { "epoch": 2.783666259002582, "grad_norm": 1.5872732400894165, "learning_rate": 0.0006521691126511754, "loss": 3.361, "step": 40970 }, { "epoch": 2.7840059790732434, "grad_norm": 1.6193054914474487, "learning_rate": 0.0006521266476423428, "loss": 3.4565, "step": 40975 }, { "epoch": 2.7843456991439055, "grad_norm": 2.307054281234741, "learning_rate": 0.00065208418263351, "loss": 3.2567, "step": 40980 }, { "epoch": 2.784685419214567, "grad_norm": 1.7031259536743164, "learning_rate": 0.0006520417176246773, "loss": 3.5003, "step": 40985 }, { "epoch": 2.7850251392852288, "grad_norm": 1.9746779203414917, "learning_rate": 0.0006519992526158446, "loss": 3.4323, "step": 40990 }, { "epoch": 2.785364859355891, "grad_norm": 2.0525624752044678, "learning_rate": 0.0006519567876070118, "loss": 3.6085, "step": 40995 }, { "epoch": 2.7857045794265525, "grad_norm": 2.140350580215454, "learning_rate": 0.000651914322598179, "loss": 3.3003, "step": 41000 }, { "epoch": 2.786044299497214, "grad_norm": 1.640519142150879, "learning_rate": 0.0006518718575893464, "loss": 3.7514, "step": 41005 }, { "epoch": 2.786384019567876, "grad_norm": 1.849300742149353, "learning_rate": 0.0006518293925805137, "loss": 3.5772, "step": 41010 }, { "epoch": 2.786723739638538, "grad_norm": 1.6387447118759155, "learning_rate": 0.0006517869275716809, "loss": 3.5646, "step": 41015 }, { "epoch": 2.7870634597091994, "grad_norm": 2.199744939804077, "learning_rate": 0.0006517444625628483, "loss": 3.5113, "step": 41020 }, { "epoch": 2.7874031797798615, "grad_norm": 2.0539722442626953, "learning_rate": 0.0006517019975540155, "loss": 3.6557, "step": 41025 }, { "epoch": 2.787742899850523, "grad_norm": 2.6692357063293457, "learning_rate": 0.0006516595325451827, "loss": 3.3247, "step": 41030 }, { "epoch": 2.788082619921185, "grad_norm": 1.891115665435791, "learning_rate": 0.00065161706753635, "loss": 3.5852, "step": 41035 }, { "epoch": 2.788422339991847, "grad_norm": 1.83669114112854, "learning_rate": 0.0006515746025275173, "loss": 3.4906, "step": 41040 }, { "epoch": 2.7887620600625085, "grad_norm": 1.7146339416503906, "learning_rate": 0.0006515321375186846, "loss": 3.1255, "step": 41045 }, { "epoch": 2.78910178013317, "grad_norm": 2.1055030822753906, "learning_rate": 0.000651489672509852, "loss": 3.2473, "step": 41050 }, { "epoch": 2.789441500203832, "grad_norm": 1.4264822006225586, "learning_rate": 0.0006514472075010192, "loss": 3.5766, "step": 41055 }, { "epoch": 2.789781220274494, "grad_norm": 1.606100082397461, "learning_rate": 0.0006514047424921864, "loss": 3.4704, "step": 41060 }, { "epoch": 2.7901209403451555, "grad_norm": 1.5794703960418701, "learning_rate": 0.0006513622774833537, "loss": 3.4196, "step": 41065 }, { "epoch": 2.7904606604158175, "grad_norm": 1.604437232017517, "learning_rate": 0.000651319812474521, "loss": 3.5437, "step": 41070 }, { "epoch": 2.790800380486479, "grad_norm": 2.251843214035034, "learning_rate": 0.0006512773474656883, "loss": 3.3868, "step": 41075 }, { "epoch": 2.791140100557141, "grad_norm": 1.8958760499954224, "learning_rate": 0.0006512348824568556, "loss": 3.4254, "step": 41080 }, { "epoch": 2.791479820627803, "grad_norm": 2.42134165763855, "learning_rate": 0.0006511924174480229, "loss": 3.4458, "step": 41085 }, { "epoch": 2.7918195406984645, "grad_norm": 1.8175679445266724, "learning_rate": 0.0006511499524391902, "loss": 3.3671, "step": 41090 }, { "epoch": 2.792159260769126, "grad_norm": 1.861329436302185, "learning_rate": 0.0006511074874303574, "loss": 3.5771, "step": 41095 }, { "epoch": 2.792498980839788, "grad_norm": 2.0671703815460205, "learning_rate": 0.0006510650224215246, "loss": 3.3761, "step": 41100 }, { "epoch": 2.79283870091045, "grad_norm": 1.6182039976119995, "learning_rate": 0.000651022557412692, "loss": 3.5675, "step": 41105 }, { "epoch": 2.7931784209811115, "grad_norm": 1.642660140991211, "learning_rate": 0.0006509800924038592, "loss": 3.5406, "step": 41110 }, { "epoch": 2.7935181410517735, "grad_norm": 1.8303589820861816, "learning_rate": 0.0006509376273950265, "loss": 3.6869, "step": 41115 }, { "epoch": 2.793857861122435, "grad_norm": 1.686037302017212, "learning_rate": 0.0006508951623861939, "loss": 3.4803, "step": 41120 }, { "epoch": 2.794197581193097, "grad_norm": 1.550033688545227, "learning_rate": 0.0006508526973773611, "loss": 3.589, "step": 41125 }, { "epoch": 2.794537301263759, "grad_norm": 1.8224786520004272, "learning_rate": 0.0006508102323685283, "loss": 3.3054, "step": 41130 }, { "epoch": 2.7948770213344205, "grad_norm": 1.6485037803649902, "learning_rate": 0.0006507677673596957, "loss": 3.3894, "step": 41135 }, { "epoch": 2.795216741405082, "grad_norm": 2.0739099979400635, "learning_rate": 0.0006507253023508629, "loss": 3.3176, "step": 41140 }, { "epoch": 2.795556461475744, "grad_norm": 3.0713603496551514, "learning_rate": 0.0006506828373420301, "loss": 3.3512, "step": 41145 }, { "epoch": 2.795896181546406, "grad_norm": 1.8901045322418213, "learning_rate": 0.0006506403723331975, "loss": 3.531, "step": 41150 }, { "epoch": 2.7962359016170675, "grad_norm": 2.158926010131836, "learning_rate": 0.0006505979073243648, "loss": 3.3402, "step": 41155 }, { "epoch": 2.7965756216877296, "grad_norm": 1.686616063117981, "learning_rate": 0.000650555442315532, "loss": 3.4202, "step": 41160 }, { "epoch": 2.796915341758391, "grad_norm": 2.159701347351074, "learning_rate": 0.0006505129773066993, "loss": 3.5271, "step": 41165 }, { "epoch": 2.797255061829053, "grad_norm": 1.5736467838287354, "learning_rate": 0.0006504705122978666, "loss": 3.4737, "step": 41170 }, { "epoch": 2.797594781899715, "grad_norm": 1.854995608329773, "learning_rate": 0.0006504280472890338, "loss": 3.492, "step": 41175 }, { "epoch": 2.7979345019703765, "grad_norm": 2.029691457748413, "learning_rate": 0.0006503855822802011, "loss": 3.5989, "step": 41180 }, { "epoch": 2.798274222041038, "grad_norm": 1.994094967842102, "learning_rate": 0.0006503431172713685, "loss": 3.3956, "step": 41185 }, { "epoch": 2.7986139421117002, "grad_norm": 1.7112997770309448, "learning_rate": 0.0006503006522625357, "loss": 3.5748, "step": 41190 }, { "epoch": 2.798953662182362, "grad_norm": 1.5187112092971802, "learning_rate": 0.000650258187253703, "loss": 3.7558, "step": 41195 }, { "epoch": 2.7992933822530235, "grad_norm": 2.060880661010742, "learning_rate": 0.0006502157222448702, "loss": 3.4348, "step": 41200 }, { "epoch": 2.799633102323685, "grad_norm": 1.6231788396835327, "learning_rate": 0.0006501732572360375, "loss": 3.4873, "step": 41205 }, { "epoch": 2.799972822394347, "grad_norm": 1.6644299030303955, "learning_rate": 0.0006501307922272048, "loss": 3.715, "step": 41210 }, { "epoch": 2.800312542465009, "grad_norm": 1.9556574821472168, "learning_rate": 0.000650088327218372, "loss": 3.2593, "step": 41215 }, { "epoch": 2.8006522625356705, "grad_norm": 2.130115509033203, "learning_rate": 0.0006500458622095394, "loss": 3.3417, "step": 41220 }, { "epoch": 2.8009919826063325, "grad_norm": 1.690883755683899, "learning_rate": 0.0006500033972007067, "loss": 3.4196, "step": 41225 }, { "epoch": 2.801331702676994, "grad_norm": 1.8908201456069946, "learning_rate": 0.0006499609321918739, "loss": 3.3638, "step": 41230 }, { "epoch": 2.801671422747656, "grad_norm": 2.9995532035827637, "learning_rate": 0.0006499184671830411, "loss": 3.3714, "step": 41235 }, { "epoch": 2.802011142818318, "grad_norm": 2.168915033340454, "learning_rate": 0.0006498760021742085, "loss": 3.6156, "step": 41240 }, { "epoch": 2.8023508628889795, "grad_norm": 2.0820019245147705, "learning_rate": 0.0006498335371653757, "loss": 3.4194, "step": 41245 }, { "epoch": 2.802690582959641, "grad_norm": 1.6439716815948486, "learning_rate": 0.0006497910721565429, "loss": 3.3632, "step": 41250 }, { "epoch": 2.8030303030303028, "grad_norm": 1.9481099843978882, "learning_rate": 0.0006497486071477104, "loss": 3.4033, "step": 41255 }, { "epoch": 2.803370023100965, "grad_norm": 2.0526294708251953, "learning_rate": 0.0006497061421388776, "loss": 3.9688, "step": 41260 }, { "epoch": 2.8037097431716265, "grad_norm": 2.0776803493499756, "learning_rate": 0.0006496636771300448, "loss": 3.5576, "step": 41265 }, { "epoch": 2.804049463242288, "grad_norm": 1.573593258857727, "learning_rate": 0.0006496212121212122, "loss": 3.3734, "step": 41270 }, { "epoch": 2.80438918331295, "grad_norm": 1.9031211137771606, "learning_rate": 0.0006495787471123794, "loss": 3.3358, "step": 41275 }, { "epoch": 2.804728903383612, "grad_norm": 1.851203441619873, "learning_rate": 0.0006495362821035466, "loss": 3.4243, "step": 41280 }, { "epoch": 2.8050686234542734, "grad_norm": 1.713723063468933, "learning_rate": 0.000649493817094714, "loss": 3.358, "step": 41285 }, { "epoch": 2.8054083435249355, "grad_norm": 2.2001829147338867, "learning_rate": 0.0006494513520858813, "loss": 3.595, "step": 41290 }, { "epoch": 2.805748063595597, "grad_norm": 2.311052083969116, "learning_rate": 0.0006494088870770485, "loss": 3.2124, "step": 41295 }, { "epoch": 2.8060877836662588, "grad_norm": 1.7250406742095947, "learning_rate": 0.0006493664220682158, "loss": 3.4981, "step": 41300 }, { "epoch": 2.806427503736921, "grad_norm": 1.7264238595962524, "learning_rate": 0.0006493239570593831, "loss": 3.5417, "step": 41305 }, { "epoch": 2.8067672238075825, "grad_norm": 1.6583131551742554, "learning_rate": 0.0006492814920505503, "loss": 3.3515, "step": 41310 }, { "epoch": 2.807106943878244, "grad_norm": 2.6767466068267822, "learning_rate": 0.0006492390270417176, "loss": 3.3499, "step": 41315 }, { "epoch": 2.807446663948906, "grad_norm": 2.256401538848877, "learning_rate": 0.0006491965620328849, "loss": 3.4564, "step": 41320 }, { "epoch": 2.807786384019568, "grad_norm": 1.637000322341919, "learning_rate": 0.0006491540970240522, "loss": 3.5753, "step": 41325 }, { "epoch": 2.8081261040902294, "grad_norm": 2.0478155612945557, "learning_rate": 0.0006491116320152195, "loss": 3.4194, "step": 41330 }, { "epoch": 2.8084658241608915, "grad_norm": 2.0795111656188965, "learning_rate": 0.0006490691670063867, "loss": 3.3973, "step": 41335 }, { "epoch": 2.808805544231553, "grad_norm": 1.9081299304962158, "learning_rate": 0.000649026701997554, "loss": 3.4301, "step": 41340 }, { "epoch": 2.809145264302215, "grad_norm": 1.868143916130066, "learning_rate": 0.0006489842369887213, "loss": 3.5445, "step": 41345 }, { "epoch": 2.809484984372877, "grad_norm": 2.101992607116699, "learning_rate": 0.0006489417719798885, "loss": 3.3332, "step": 41350 }, { "epoch": 2.8098247044435385, "grad_norm": 1.5970321893692017, "learning_rate": 0.0006488993069710558, "loss": 3.5707, "step": 41355 }, { "epoch": 2.8101644245142, "grad_norm": 5.365260601043701, "learning_rate": 0.0006488568419622232, "loss": 3.4092, "step": 41360 }, { "epoch": 2.810504144584862, "grad_norm": 2.519824266433716, "learning_rate": 0.0006488143769533904, "loss": 3.6214, "step": 41365 }, { "epoch": 2.810843864655524, "grad_norm": 2.0792715549468994, "learning_rate": 0.0006487719119445577, "loss": 3.4447, "step": 41370 }, { "epoch": 2.8111835847261855, "grad_norm": 2.101949453353882, "learning_rate": 0.000648729446935725, "loss": 3.5077, "step": 41375 }, { "epoch": 2.8115233047968475, "grad_norm": 1.6869597434997559, "learning_rate": 0.0006486869819268922, "loss": 3.6201, "step": 41380 }, { "epoch": 2.811863024867509, "grad_norm": 1.9649124145507812, "learning_rate": 0.0006486445169180594, "loss": 3.6195, "step": 41385 }, { "epoch": 2.812202744938171, "grad_norm": 1.684089183807373, "learning_rate": 0.0006486020519092269, "loss": 3.5584, "step": 41390 }, { "epoch": 2.812542465008833, "grad_norm": 1.7921468019485474, "learning_rate": 0.0006485595869003941, "loss": 3.583, "step": 41395 }, { "epoch": 2.8128821850794945, "grad_norm": 1.631636619567871, "learning_rate": 0.0006485171218915613, "loss": 3.6009, "step": 41400 }, { "epoch": 2.813221905150156, "grad_norm": 1.9161016941070557, "learning_rate": 0.0006484746568827287, "loss": 3.5628, "step": 41405 }, { "epoch": 2.813561625220818, "grad_norm": 1.5622248649597168, "learning_rate": 0.0006484321918738959, "loss": 3.5325, "step": 41410 }, { "epoch": 2.81390134529148, "grad_norm": 2.042198657989502, "learning_rate": 0.0006483897268650632, "loss": 3.5902, "step": 41415 }, { "epoch": 2.8142410653621415, "grad_norm": 2.700460433959961, "learning_rate": 0.0006483472618562305, "loss": 3.8047, "step": 41420 }, { "epoch": 2.8145807854328035, "grad_norm": 1.770040512084961, "learning_rate": 0.0006483047968473978, "loss": 3.5036, "step": 41425 }, { "epoch": 2.814920505503465, "grad_norm": 2.4902312755584717, "learning_rate": 0.0006482623318385651, "loss": 3.4068, "step": 41430 }, { "epoch": 2.815260225574127, "grad_norm": 1.683815598487854, "learning_rate": 0.0006482198668297324, "loss": 3.3606, "step": 41435 }, { "epoch": 2.815599945644789, "grad_norm": 2.4274344444274902, "learning_rate": 0.0006481774018208996, "loss": 3.4296, "step": 41440 }, { "epoch": 2.8159396657154505, "grad_norm": 2.3099474906921387, "learning_rate": 0.0006481349368120669, "loss": 3.5696, "step": 41445 }, { "epoch": 2.816279385786112, "grad_norm": 1.885784387588501, "learning_rate": 0.0006480924718032341, "loss": 3.8426, "step": 41450 }, { "epoch": 2.816619105856774, "grad_norm": 1.8503258228302002, "learning_rate": 0.0006480500067944014, "loss": 3.5136, "step": 41455 }, { "epoch": 2.816958825927436, "grad_norm": 1.8262149095535278, "learning_rate": 0.0006480075417855688, "loss": 3.617, "step": 41460 }, { "epoch": 2.8172985459980975, "grad_norm": 1.9781805276870728, "learning_rate": 0.000647965076776736, "loss": 3.465, "step": 41465 }, { "epoch": 2.8176382660687596, "grad_norm": 1.8227177858352661, "learning_rate": 0.0006479226117679033, "loss": 3.4897, "step": 41470 }, { "epoch": 2.817977986139421, "grad_norm": 1.5891506671905518, "learning_rate": 0.0006478801467590706, "loss": 3.6409, "step": 41475 }, { "epoch": 2.818317706210083, "grad_norm": 1.5482256412506104, "learning_rate": 0.0006478376817502378, "loss": 3.5118, "step": 41480 }, { "epoch": 2.818657426280745, "grad_norm": 2.067552089691162, "learning_rate": 0.000647795216741405, "loss": 3.6735, "step": 41485 }, { "epoch": 2.8189971463514065, "grad_norm": 2.065706253051758, "learning_rate": 0.0006477527517325724, "loss": 3.4738, "step": 41490 }, { "epoch": 2.819336866422068, "grad_norm": 1.76511549949646, "learning_rate": 0.0006477102867237397, "loss": 3.5867, "step": 41495 }, { "epoch": 2.8196765864927302, "grad_norm": 1.9152251482009888, "learning_rate": 0.0006476678217149069, "loss": 3.3701, "step": 41500 }, { "epoch": 2.820016306563392, "grad_norm": 1.8570533990859985, "learning_rate": 0.0006476253567060743, "loss": 3.6217, "step": 41505 }, { "epoch": 2.8203560266340535, "grad_norm": 1.841868281364441, "learning_rate": 0.0006475828916972415, "loss": 3.5287, "step": 41510 }, { "epoch": 2.8206957467047156, "grad_norm": 2.1927318572998047, "learning_rate": 0.0006475404266884087, "loss": 3.4409, "step": 41515 }, { "epoch": 2.821035466775377, "grad_norm": 2.1206295490264893, "learning_rate": 0.0006474979616795761, "loss": 3.5638, "step": 41520 }, { "epoch": 2.821375186846039, "grad_norm": 1.5704929828643799, "learning_rate": 0.0006474554966707433, "loss": 3.4536, "step": 41525 }, { "epoch": 2.821714906916701, "grad_norm": 1.591810703277588, "learning_rate": 0.0006474130316619106, "loss": 3.5419, "step": 41530 }, { "epoch": 2.8220546269873625, "grad_norm": 1.4780553579330444, "learning_rate": 0.000647370566653078, "loss": 3.5869, "step": 41535 }, { "epoch": 2.822394347058024, "grad_norm": 2.014535427093506, "learning_rate": 0.0006473281016442452, "loss": 3.4122, "step": 41540 }, { "epoch": 2.822734067128686, "grad_norm": 1.3791530132293701, "learning_rate": 0.0006472856366354124, "loss": 3.2945, "step": 41545 }, { "epoch": 2.823073787199348, "grad_norm": 1.621890664100647, "learning_rate": 0.0006472431716265797, "loss": 3.5001, "step": 41550 }, { "epoch": 2.8234135072700095, "grad_norm": 2.2024025917053223, "learning_rate": 0.000647200706617747, "loss": 3.691, "step": 41555 }, { "epoch": 2.823753227340671, "grad_norm": 1.64906644821167, "learning_rate": 0.0006471582416089142, "loss": 3.5453, "step": 41560 }, { "epoch": 2.824092947411333, "grad_norm": 2.063220500946045, "learning_rate": 0.0006471157766000816, "loss": 3.7949, "step": 41565 }, { "epoch": 2.824432667481995, "grad_norm": 1.9204055070877075, "learning_rate": 0.0006470733115912489, "loss": 3.3741, "step": 41570 }, { "epoch": 2.8247723875526565, "grad_norm": 3.6511924266815186, "learning_rate": 0.0006470308465824161, "loss": 3.6579, "step": 41575 }, { "epoch": 2.8251121076233185, "grad_norm": 1.4410384893417358, "learning_rate": 0.0006469883815735834, "loss": 3.4793, "step": 41580 }, { "epoch": 2.82545182769398, "grad_norm": 2.472730875015259, "learning_rate": 0.0006469459165647506, "loss": 3.5428, "step": 41585 }, { "epoch": 2.825791547764642, "grad_norm": 1.921528697013855, "learning_rate": 0.0006469034515559179, "loss": 3.7944, "step": 41590 }, { "epoch": 2.8261312678353034, "grad_norm": 1.8030387163162231, "learning_rate": 0.0006468609865470852, "loss": 3.4677, "step": 41595 }, { "epoch": 2.8264709879059655, "grad_norm": 1.7136526107788086, "learning_rate": 0.0006468185215382525, "loss": 3.6538, "step": 41600 }, { "epoch": 2.826810707976627, "grad_norm": 1.6893507242202759, "learning_rate": 0.0006467760565294198, "loss": 3.4837, "step": 41605 }, { "epoch": 2.8271504280472888, "grad_norm": 1.637447714805603, "learning_rate": 0.0006467335915205871, "loss": 3.443, "step": 41610 }, { "epoch": 2.827490148117951, "grad_norm": 1.8697246313095093, "learning_rate": 0.0006466911265117543, "loss": 3.5009, "step": 41615 }, { "epoch": 2.8278298681886125, "grad_norm": 1.8208725452423096, "learning_rate": 0.0006466486615029216, "loss": 3.4522, "step": 41620 }, { "epoch": 2.828169588259274, "grad_norm": 2.4236226081848145, "learning_rate": 0.0006466061964940889, "loss": 3.3486, "step": 41625 }, { "epoch": 2.828509308329936, "grad_norm": 1.4520903825759888, "learning_rate": 0.0006465637314852561, "loss": 3.6007, "step": 41630 }, { "epoch": 2.828849028400598, "grad_norm": 1.4922889471054077, "learning_rate": 0.0006465212664764234, "loss": 3.4202, "step": 41635 }, { "epoch": 2.8291887484712595, "grad_norm": 1.5390301942825317, "learning_rate": 0.0006464788014675908, "loss": 3.4909, "step": 41640 }, { "epoch": 2.8295284685419215, "grad_norm": 1.8683454990386963, "learning_rate": 0.000646436336458758, "loss": 3.593, "step": 41645 }, { "epoch": 2.829868188612583, "grad_norm": 2.0483720302581787, "learning_rate": 0.0006463938714499252, "loss": 3.7536, "step": 41650 }, { "epoch": 2.830207908683245, "grad_norm": 2.408720016479492, "learning_rate": 0.0006463514064410926, "loss": 3.5282, "step": 41655 }, { "epoch": 2.830547628753907, "grad_norm": 2.312236785888672, "learning_rate": 0.0006463089414322598, "loss": 3.4151, "step": 41660 }, { "epoch": 2.8308873488245685, "grad_norm": 1.9969357252120972, "learning_rate": 0.000646266476423427, "loss": 3.2746, "step": 41665 }, { "epoch": 2.83122706889523, "grad_norm": 2.247830629348755, "learning_rate": 0.0006462240114145945, "loss": 3.6098, "step": 41670 }, { "epoch": 2.831566788965892, "grad_norm": 1.883616328239441, "learning_rate": 0.0006461815464057617, "loss": 3.3783, "step": 41675 }, { "epoch": 2.831906509036554, "grad_norm": 2.0653884410858154, "learning_rate": 0.0006461390813969289, "loss": 3.5138, "step": 41680 }, { "epoch": 2.8322462291072155, "grad_norm": 1.9216803312301636, "learning_rate": 0.0006460966163880962, "loss": 3.5242, "step": 41685 }, { "epoch": 2.8325859491778775, "grad_norm": 1.7138605117797852, "learning_rate": 0.0006460541513792635, "loss": 3.6513, "step": 41690 }, { "epoch": 2.832925669248539, "grad_norm": 1.9152332544326782, "learning_rate": 0.0006460116863704307, "loss": 3.6222, "step": 41695 }, { "epoch": 2.833265389319201, "grad_norm": 2.365680456161499, "learning_rate": 0.000645969221361598, "loss": 3.4917, "step": 41700 }, { "epoch": 2.833605109389863, "grad_norm": 1.791487455368042, "learning_rate": 0.0006459267563527654, "loss": 3.6103, "step": 41705 }, { "epoch": 2.8339448294605245, "grad_norm": 2.047379493713379, "learning_rate": 0.0006458842913439326, "loss": 3.6756, "step": 41710 }, { "epoch": 2.834284549531186, "grad_norm": 2.076319694519043, "learning_rate": 0.0006458418263350999, "loss": 3.4108, "step": 41715 }, { "epoch": 2.834624269601848, "grad_norm": 1.4668360948562622, "learning_rate": 0.0006457993613262672, "loss": 3.4505, "step": 41720 }, { "epoch": 2.83496398967251, "grad_norm": 1.5604170560836792, "learning_rate": 0.0006457568963174344, "loss": 3.5559, "step": 41725 }, { "epoch": 2.8353037097431715, "grad_norm": 2.102180004119873, "learning_rate": 0.0006457144313086017, "loss": 3.7027, "step": 41730 }, { "epoch": 2.8356434298138335, "grad_norm": 1.976547360420227, "learning_rate": 0.0006456719662997689, "loss": 3.3956, "step": 41735 }, { "epoch": 2.835983149884495, "grad_norm": 1.6797493696212769, "learning_rate": 0.0006456295012909363, "loss": 3.5127, "step": 41740 }, { "epoch": 2.836322869955157, "grad_norm": 2.059701919555664, "learning_rate": 0.0006455870362821036, "loss": 3.7408, "step": 41745 }, { "epoch": 2.836662590025819, "grad_norm": 1.774099349975586, "learning_rate": 0.0006455445712732708, "loss": 3.499, "step": 41750 }, { "epoch": 2.8370023100964805, "grad_norm": 1.757793664932251, "learning_rate": 0.0006455021062644382, "loss": 3.3339, "step": 41755 }, { "epoch": 2.837342030167142, "grad_norm": 2.2763547897338867, "learning_rate": 0.0006454596412556054, "loss": 3.7932, "step": 41760 }, { "epoch": 2.8376817502378042, "grad_norm": 1.8749035596847534, "learning_rate": 0.0006454171762467726, "loss": 3.4593, "step": 41765 }, { "epoch": 2.838021470308466, "grad_norm": 2.4571003913879395, "learning_rate": 0.00064537471123794, "loss": 3.6925, "step": 41770 }, { "epoch": 2.8383611903791275, "grad_norm": 1.783488154411316, "learning_rate": 0.0006453322462291073, "loss": 3.5659, "step": 41775 }, { "epoch": 2.8387009104497896, "grad_norm": 1.6393001079559326, "learning_rate": 0.0006452897812202745, "loss": 3.5189, "step": 41780 }, { "epoch": 2.839040630520451, "grad_norm": 1.6737697124481201, "learning_rate": 0.0006452473162114418, "loss": 3.5296, "step": 41785 }, { "epoch": 2.839380350591113, "grad_norm": 1.615766167640686, "learning_rate": 0.0006452048512026091, "loss": 3.4078, "step": 41790 }, { "epoch": 2.839720070661775, "grad_norm": 2.1345698833465576, "learning_rate": 0.0006451623861937763, "loss": 3.5708, "step": 41795 }, { "epoch": 2.8400597907324365, "grad_norm": 2.0727922916412354, "learning_rate": 0.0006451199211849436, "loss": 3.3078, "step": 41800 }, { "epoch": 2.840399510803098, "grad_norm": 1.9670339822769165, "learning_rate": 0.0006450774561761109, "loss": 3.7236, "step": 41805 }, { "epoch": 2.8407392308737602, "grad_norm": 2.1159892082214355, "learning_rate": 0.0006450349911672782, "loss": 3.6556, "step": 41810 }, { "epoch": 2.841078950944422, "grad_norm": 1.5114432573318481, "learning_rate": 0.0006449925261584455, "loss": 3.7651, "step": 41815 }, { "epoch": 2.8414186710150835, "grad_norm": 2.153996229171753, "learning_rate": 0.0006449500611496128, "loss": 3.2266, "step": 41820 }, { "epoch": 2.8417583910857456, "grad_norm": 2.2687716484069824, "learning_rate": 0.00064490759614078, "loss": 3.4488, "step": 41825 }, { "epoch": 2.842098111156407, "grad_norm": 2.388965606689453, "learning_rate": 0.0006448651311319473, "loss": 3.3745, "step": 41830 }, { "epoch": 2.842437831227069, "grad_norm": 1.7215145826339722, "learning_rate": 0.0006448226661231145, "loss": 3.3754, "step": 41835 }, { "epoch": 2.842777551297731, "grad_norm": 1.6824043989181519, "learning_rate": 0.0006447802011142818, "loss": 3.6091, "step": 41840 }, { "epoch": 2.8431172713683925, "grad_norm": 2.0740432739257812, "learning_rate": 0.0006447377361054492, "loss": 3.5401, "step": 41845 }, { "epoch": 2.843456991439054, "grad_norm": 2.4374077320098877, "learning_rate": 0.0006446952710966164, "loss": 3.4139, "step": 41850 }, { "epoch": 2.8437967115097162, "grad_norm": 1.9294673204421997, "learning_rate": 0.0006446528060877837, "loss": 3.5359, "step": 41855 }, { "epoch": 2.844136431580378, "grad_norm": 2.1295156478881836, "learning_rate": 0.000644610341078951, "loss": 3.178, "step": 41860 }, { "epoch": 2.8444761516510395, "grad_norm": 1.9342243671417236, "learning_rate": 0.0006445678760701182, "loss": 3.2832, "step": 41865 }, { "epoch": 2.8448158717217016, "grad_norm": 1.9557281732559204, "learning_rate": 0.0006445254110612854, "loss": 3.3622, "step": 41870 }, { "epoch": 2.845155591792363, "grad_norm": 1.8989999294281006, "learning_rate": 0.0006444829460524528, "loss": 3.3127, "step": 41875 }, { "epoch": 2.845495311863025, "grad_norm": 2.0265941619873047, "learning_rate": 0.0006444404810436201, "loss": 3.2986, "step": 41880 }, { "epoch": 2.8458350319336865, "grad_norm": 1.8858153820037842, "learning_rate": 0.0006443980160347873, "loss": 3.475, "step": 41885 }, { "epoch": 2.8461747520043486, "grad_norm": 1.9481295347213745, "learning_rate": 0.0006443555510259547, "loss": 3.6649, "step": 41890 }, { "epoch": 2.84651447207501, "grad_norm": 2.6994383335113525, "learning_rate": 0.0006443130860171219, "loss": 3.7779, "step": 41895 }, { "epoch": 2.846854192145672, "grad_norm": 1.9913084506988525, "learning_rate": 0.0006442706210082891, "loss": 3.5569, "step": 41900 }, { "epoch": 2.847193912216334, "grad_norm": 1.6653653383255005, "learning_rate": 0.0006442281559994565, "loss": 3.4503, "step": 41905 }, { "epoch": 2.8475336322869955, "grad_norm": 1.7347403764724731, "learning_rate": 0.0006441856909906237, "loss": 3.4075, "step": 41910 }, { "epoch": 2.847873352357657, "grad_norm": 1.7171454429626465, "learning_rate": 0.000644143225981791, "loss": 3.6508, "step": 41915 }, { "epoch": 2.8482130724283192, "grad_norm": 1.975342869758606, "learning_rate": 0.0006441007609729584, "loss": 3.812, "step": 41920 }, { "epoch": 2.848552792498981, "grad_norm": 1.8589273691177368, "learning_rate": 0.0006440582959641256, "loss": 3.564, "step": 41925 }, { "epoch": 2.8488925125696425, "grad_norm": 1.7178536653518677, "learning_rate": 0.0006440158309552928, "loss": 3.441, "step": 41930 }, { "epoch": 2.849232232640304, "grad_norm": 2.1510531902313232, "learning_rate": 0.0006439733659464601, "loss": 3.4613, "step": 41935 }, { "epoch": 2.849571952710966, "grad_norm": 2.3068909645080566, "learning_rate": 0.0006439309009376274, "loss": 3.4236, "step": 41940 }, { "epoch": 2.849911672781628, "grad_norm": 1.8113166093826294, "learning_rate": 0.0006438884359287946, "loss": 3.3751, "step": 41945 }, { "epoch": 2.8502513928522895, "grad_norm": 2.402524948120117, "learning_rate": 0.000643845970919962, "loss": 3.3147, "step": 41950 }, { "epoch": 2.8505911129229515, "grad_norm": 1.7418752908706665, "learning_rate": 0.0006438035059111293, "loss": 3.5352, "step": 41955 }, { "epoch": 2.850930832993613, "grad_norm": 1.7218248844146729, "learning_rate": 0.0006437610409022965, "loss": 3.5628, "step": 41960 }, { "epoch": 2.851270553064275, "grad_norm": 2.0915029048919678, "learning_rate": 0.0006437185758934638, "loss": 3.3537, "step": 41965 }, { "epoch": 2.851610273134937, "grad_norm": 1.3449023962020874, "learning_rate": 0.000643676110884631, "loss": 3.5053, "step": 41970 }, { "epoch": 2.8519499932055985, "grad_norm": 2.0068199634552, "learning_rate": 0.0006436336458757983, "loss": 3.6342, "step": 41975 }, { "epoch": 2.85228971327626, "grad_norm": 1.6937928199768066, "learning_rate": 0.0006435911808669657, "loss": 3.2362, "step": 41980 }, { "epoch": 2.852629433346922, "grad_norm": 2.3767707347869873, "learning_rate": 0.0006435487158581329, "loss": 3.6802, "step": 41985 }, { "epoch": 2.852969153417584, "grad_norm": 2.882430076599121, "learning_rate": 0.0006435062508493002, "loss": 3.4387, "step": 41990 }, { "epoch": 2.8533088734882455, "grad_norm": 2.235783338546753, "learning_rate": 0.0006434637858404675, "loss": 3.5678, "step": 41995 }, { "epoch": 2.8536485935589075, "grad_norm": 1.417557954788208, "learning_rate": 0.0006434213208316347, "loss": 3.7537, "step": 42000 }, { "epoch": 2.853988313629569, "grad_norm": 1.5188748836517334, "learning_rate": 0.000643378855822802, "loss": 3.6032, "step": 42005 }, { "epoch": 2.854328033700231, "grad_norm": 2.2573587894439697, "learning_rate": 0.0006433363908139693, "loss": 3.2726, "step": 42010 }, { "epoch": 2.854667753770893, "grad_norm": 1.9798966646194458, "learning_rate": 0.0006432939258051366, "loss": 3.4715, "step": 42015 }, { "epoch": 2.8550074738415545, "grad_norm": 1.852818489074707, "learning_rate": 0.0006432514607963038, "loss": 3.6807, "step": 42020 }, { "epoch": 2.855347193912216, "grad_norm": 2.0249907970428467, "learning_rate": 0.0006432089957874712, "loss": 3.7649, "step": 42025 }, { "epoch": 2.855686913982878, "grad_norm": 1.4653910398483276, "learning_rate": 0.0006431665307786384, "loss": 3.4263, "step": 42030 }, { "epoch": 2.85602663405354, "grad_norm": 1.8472623825073242, "learning_rate": 0.0006431240657698056, "loss": 3.0862, "step": 42035 }, { "epoch": 2.8563663541242015, "grad_norm": 2.312018871307373, "learning_rate": 0.000643081600760973, "loss": 3.5216, "step": 42040 }, { "epoch": 2.8567060741948636, "grad_norm": 1.875494122505188, "learning_rate": 0.0006430391357521402, "loss": 3.6888, "step": 42045 }, { "epoch": 2.857045794265525, "grad_norm": 1.9612303972244263, "learning_rate": 0.0006429966707433075, "loss": 3.4712, "step": 42050 }, { "epoch": 2.857385514336187, "grad_norm": 2.3038504123687744, "learning_rate": 0.0006429542057344749, "loss": 3.6233, "step": 42055 }, { "epoch": 2.857725234406849, "grad_norm": 1.9194096326828003, "learning_rate": 0.0006429117407256421, "loss": 3.4956, "step": 42060 }, { "epoch": 2.8580649544775105, "grad_norm": 1.464130163192749, "learning_rate": 0.0006428692757168093, "loss": 3.4533, "step": 42065 }, { "epoch": 2.858404674548172, "grad_norm": 2.0113463401794434, "learning_rate": 0.0006428268107079766, "loss": 3.4887, "step": 42070 }, { "epoch": 2.8587443946188342, "grad_norm": 2.60548996925354, "learning_rate": 0.0006427843456991439, "loss": 3.4179, "step": 42075 }, { "epoch": 2.859084114689496, "grad_norm": 2.04736590385437, "learning_rate": 0.0006427418806903111, "loss": 3.4344, "step": 42080 }, { "epoch": 2.8594238347601575, "grad_norm": 1.6085153818130493, "learning_rate": 0.0006426994156814785, "loss": 3.3434, "step": 42085 }, { "epoch": 2.8597635548308196, "grad_norm": 1.577038288116455, "learning_rate": 0.0006426569506726458, "loss": 3.5531, "step": 42090 }, { "epoch": 2.860103274901481, "grad_norm": 1.9399431943893433, "learning_rate": 0.0006426144856638131, "loss": 3.5097, "step": 42095 }, { "epoch": 2.860442994972143, "grad_norm": 1.92392098903656, "learning_rate": 0.0006425720206549803, "loss": 3.6202, "step": 42100 }, { "epoch": 2.860782715042805, "grad_norm": 1.5260034799575806, "learning_rate": 0.0006425295556461476, "loss": 3.2118, "step": 42105 }, { "epoch": 2.8611224351134665, "grad_norm": 1.6515998840332031, "learning_rate": 0.0006424870906373149, "loss": 3.6975, "step": 42110 }, { "epoch": 2.861462155184128, "grad_norm": 2.008115768432617, "learning_rate": 0.0006424446256284821, "loss": 3.4613, "step": 42115 }, { "epoch": 2.8618018752547902, "grad_norm": 1.8442844152450562, "learning_rate": 0.0006424021606196495, "loss": 3.5577, "step": 42120 }, { "epoch": 2.862141595325452, "grad_norm": 2.0443594455718994, "learning_rate": 0.0006423596956108168, "loss": 3.5395, "step": 42125 }, { "epoch": 2.8624813153961135, "grad_norm": 1.9130750894546509, "learning_rate": 0.000642317230601984, "loss": 3.5773, "step": 42130 }, { "epoch": 2.8628210354667756, "grad_norm": 2.6161701679229736, "learning_rate": 0.0006422747655931512, "loss": 3.5283, "step": 42135 }, { "epoch": 2.863160755537437, "grad_norm": 1.8729429244995117, "learning_rate": 0.0006422323005843186, "loss": 3.1404, "step": 42140 }, { "epoch": 2.863500475608099, "grad_norm": 2.0007033348083496, "learning_rate": 0.0006421898355754858, "loss": 3.3824, "step": 42145 }, { "epoch": 2.863840195678761, "grad_norm": 2.071702003479004, "learning_rate": 0.000642147370566653, "loss": 3.4453, "step": 42150 }, { "epoch": 2.8641799157494225, "grad_norm": 1.956206202507019, "learning_rate": 0.0006421049055578205, "loss": 3.5971, "step": 42155 }, { "epoch": 2.864519635820084, "grad_norm": 1.7348774671554565, "learning_rate": 0.0006420624405489877, "loss": 3.3384, "step": 42160 }, { "epoch": 2.8648593558907463, "grad_norm": 1.7004646062850952, "learning_rate": 0.0006420199755401549, "loss": 3.2031, "step": 42165 }, { "epoch": 2.865199075961408, "grad_norm": 2.745138645172119, "learning_rate": 0.0006419775105313223, "loss": 3.5605, "step": 42170 }, { "epoch": 2.8655387960320695, "grad_norm": 1.4017711877822876, "learning_rate": 0.0006419350455224895, "loss": 3.4152, "step": 42175 }, { "epoch": 2.8658785161027316, "grad_norm": 1.9767954349517822, "learning_rate": 0.0006418925805136567, "loss": 3.7196, "step": 42180 }, { "epoch": 2.866218236173393, "grad_norm": 1.935296654701233, "learning_rate": 0.000641850115504824, "loss": 3.2628, "step": 42185 }, { "epoch": 2.866557956244055, "grad_norm": 1.352652907371521, "learning_rate": 0.0006418076504959914, "loss": 3.4925, "step": 42190 }, { "epoch": 2.866897676314717, "grad_norm": 1.9775512218475342, "learning_rate": 0.0006417651854871586, "loss": 3.4788, "step": 42195 }, { "epoch": 2.8672373963853786, "grad_norm": 1.9056004285812378, "learning_rate": 0.0006417227204783259, "loss": 3.5166, "step": 42200 }, { "epoch": 2.86757711645604, "grad_norm": 1.9097093343734741, "learning_rate": 0.0006416802554694932, "loss": 3.4837, "step": 42205 }, { "epoch": 2.8679168365267023, "grad_norm": 1.7765498161315918, "learning_rate": 0.0006416377904606604, "loss": 3.4743, "step": 42210 }, { "epoch": 2.868256556597364, "grad_norm": 1.3119146823883057, "learning_rate": 0.0006415953254518277, "loss": 3.4678, "step": 42215 }, { "epoch": 2.8685962766680255, "grad_norm": 2.2119863033294678, "learning_rate": 0.0006415528604429949, "loss": 3.3058, "step": 42220 }, { "epoch": 2.868935996738687, "grad_norm": 1.9303736686706543, "learning_rate": 0.0006415103954341623, "loss": 3.6606, "step": 42225 }, { "epoch": 2.8692757168093492, "grad_norm": 2.5014708042144775, "learning_rate": 0.0006414679304253296, "loss": 3.4721, "step": 42230 }, { "epoch": 2.869615436880011, "grad_norm": 1.8673462867736816, "learning_rate": 0.0006414254654164968, "loss": 3.5994, "step": 42235 }, { "epoch": 2.8699551569506725, "grad_norm": 1.8215525150299072, "learning_rate": 0.0006413830004076641, "loss": 3.6253, "step": 42240 }, { "epoch": 2.8702948770213346, "grad_norm": 2.181645154953003, "learning_rate": 0.0006413405353988314, "loss": 3.5998, "step": 42245 }, { "epoch": 2.870634597091996, "grad_norm": 1.7606931924819946, "learning_rate": 0.0006412980703899986, "loss": 3.3792, "step": 42250 }, { "epoch": 2.870974317162658, "grad_norm": 2.2802507877349854, "learning_rate": 0.0006412556053811658, "loss": 3.5491, "step": 42255 }, { "epoch": 2.87131403723332, "grad_norm": 1.880257248878479, "learning_rate": 0.0006412131403723333, "loss": 3.4248, "step": 42260 }, { "epoch": 2.8716537573039815, "grad_norm": 1.8415809869766235, "learning_rate": 0.0006411706753635005, "loss": 3.3901, "step": 42265 }, { "epoch": 2.871993477374643, "grad_norm": 2.3032758235931396, "learning_rate": 0.0006411282103546677, "loss": 3.3996, "step": 42270 }, { "epoch": 2.872333197445305, "grad_norm": 2.026512384414673, "learning_rate": 0.0006410857453458351, "loss": 3.6512, "step": 42275 }, { "epoch": 2.872672917515967, "grad_norm": 1.653264045715332, "learning_rate": 0.0006410432803370023, "loss": 3.4797, "step": 42280 }, { "epoch": 2.8730126375866285, "grad_norm": 2.190706491470337, "learning_rate": 0.0006410008153281695, "loss": 3.4268, "step": 42285 }, { "epoch": 2.87335235765729, "grad_norm": 2.0117952823638916, "learning_rate": 0.0006409583503193369, "loss": 3.3847, "step": 42290 }, { "epoch": 2.873692077727952, "grad_norm": 2.054633140563965, "learning_rate": 0.0006409158853105042, "loss": 3.4949, "step": 42295 }, { "epoch": 2.874031797798614, "grad_norm": 1.8777281045913696, "learning_rate": 0.0006408734203016714, "loss": 3.6638, "step": 42300 }, { "epoch": 2.8743715178692755, "grad_norm": 2.3204643726348877, "learning_rate": 0.0006408309552928388, "loss": 3.5381, "step": 42305 }, { "epoch": 2.8747112379399375, "grad_norm": 2.0334091186523438, "learning_rate": 0.000640788490284006, "loss": 3.4213, "step": 42310 }, { "epoch": 2.875050958010599, "grad_norm": 1.8720529079437256, "learning_rate": 0.0006407460252751732, "loss": 3.5824, "step": 42315 }, { "epoch": 2.875390678081261, "grad_norm": 2.087693691253662, "learning_rate": 0.0006407035602663405, "loss": 3.5712, "step": 42320 }, { "epoch": 2.875730398151923, "grad_norm": 1.6856954097747803, "learning_rate": 0.0006406610952575078, "loss": 3.4413, "step": 42325 }, { "epoch": 2.8760701182225845, "grad_norm": 1.836244821548462, "learning_rate": 0.0006406186302486751, "loss": 3.5149, "step": 42330 }, { "epoch": 2.876409838293246, "grad_norm": 2.008533477783203, "learning_rate": 0.0006405761652398424, "loss": 3.3856, "step": 42335 }, { "epoch": 2.876749558363908, "grad_norm": 1.7287490367889404, "learning_rate": 0.0006405337002310097, "loss": 3.4876, "step": 42340 }, { "epoch": 2.87708927843457, "grad_norm": 2.2036612033843994, "learning_rate": 0.0006404912352221769, "loss": 3.4319, "step": 42345 }, { "epoch": 2.8774289985052315, "grad_norm": 1.643738031387329, "learning_rate": 0.0006404487702133442, "loss": 3.57, "step": 42350 }, { "epoch": 2.8777687185758936, "grad_norm": 1.8999145030975342, "learning_rate": 0.0006404063052045115, "loss": 3.5437, "step": 42355 }, { "epoch": 2.878108438646555, "grad_norm": 1.6562211513519287, "learning_rate": 0.0006403638401956787, "loss": 3.4864, "step": 42360 }, { "epoch": 2.878448158717217, "grad_norm": 1.6906723976135254, "learning_rate": 0.0006403213751868461, "loss": 3.3727, "step": 42365 }, { "epoch": 2.878787878787879, "grad_norm": 2.0468292236328125, "learning_rate": 0.0006402789101780133, "loss": 3.7506, "step": 42370 }, { "epoch": 2.8791275988585405, "grad_norm": 1.9838110208511353, "learning_rate": 0.0006402364451691806, "loss": 3.7492, "step": 42375 }, { "epoch": 2.879467318929202, "grad_norm": 2.0596091747283936, "learning_rate": 0.0006401939801603479, "loss": 3.5162, "step": 42380 }, { "epoch": 2.8798070389998642, "grad_norm": 1.5934040546417236, "learning_rate": 0.0006401515151515151, "loss": 3.325, "step": 42385 }, { "epoch": 2.880146759070526, "grad_norm": 1.6136964559555054, "learning_rate": 0.0006401090501426824, "loss": 3.2984, "step": 42390 }, { "epoch": 2.8804864791411875, "grad_norm": 1.920366883277893, "learning_rate": 0.0006400665851338497, "loss": 3.5553, "step": 42395 }, { "epoch": 2.8808261992118496, "grad_norm": 2.2171525955200195, "learning_rate": 0.000640024120125017, "loss": 3.7193, "step": 42400 }, { "epoch": 2.881165919282511, "grad_norm": 1.7849780321121216, "learning_rate": 0.0006399816551161843, "loss": 3.3196, "step": 42405 }, { "epoch": 2.881505639353173, "grad_norm": 1.880568027496338, "learning_rate": 0.0006399391901073516, "loss": 3.5002, "step": 42410 }, { "epoch": 2.881845359423835, "grad_norm": 1.7558997869491577, "learning_rate": 0.0006398967250985188, "loss": 3.5418, "step": 42415 }, { "epoch": 2.8821850794944965, "grad_norm": 2.3737854957580566, "learning_rate": 0.000639854260089686, "loss": 3.756, "step": 42420 }, { "epoch": 2.882524799565158, "grad_norm": 2.0921835899353027, "learning_rate": 0.0006398117950808534, "loss": 3.195, "step": 42425 }, { "epoch": 2.8828645196358202, "grad_norm": 2.25004506111145, "learning_rate": 0.0006397693300720206, "loss": 3.5616, "step": 42430 }, { "epoch": 2.883204239706482, "grad_norm": 1.555390477180481, "learning_rate": 0.000639726865063188, "loss": 3.5097, "step": 42435 }, { "epoch": 2.8835439597771435, "grad_norm": 2.0606019496917725, "learning_rate": 0.0006396844000543553, "loss": 3.4351, "step": 42440 }, { "epoch": 2.8838836798478056, "grad_norm": 1.926924228668213, "learning_rate": 0.0006396419350455225, "loss": 3.8115, "step": 42445 }, { "epoch": 2.884223399918467, "grad_norm": 2.374962091445923, "learning_rate": 0.0006395994700366898, "loss": 3.3311, "step": 42450 }, { "epoch": 2.884563119989129, "grad_norm": 1.727543592453003, "learning_rate": 0.000639557005027857, "loss": 3.5198, "step": 42455 }, { "epoch": 2.884902840059791, "grad_norm": 1.5493378639221191, "learning_rate": 0.0006395145400190243, "loss": 3.5576, "step": 42460 }, { "epoch": 2.8852425601304525, "grad_norm": 1.5011394023895264, "learning_rate": 0.0006394720750101916, "loss": 3.5185, "step": 42465 }, { "epoch": 2.885582280201114, "grad_norm": 2.316208600997925, "learning_rate": 0.000639429610001359, "loss": 3.3929, "step": 42470 }, { "epoch": 2.8859220002717763, "grad_norm": 2.1858537197113037, "learning_rate": 0.0006393871449925262, "loss": 3.6783, "step": 42475 }, { "epoch": 2.886261720342438, "grad_norm": 1.7295188903808594, "learning_rate": 0.0006393446799836935, "loss": 3.5857, "step": 42480 }, { "epoch": 2.8866014404130995, "grad_norm": 1.7190134525299072, "learning_rate": 0.0006393022149748607, "loss": 3.5119, "step": 42485 }, { "epoch": 2.8869411604837616, "grad_norm": 1.4943928718566895, "learning_rate": 0.000639259749966028, "loss": 3.4953, "step": 42490 }, { "epoch": 2.8872808805544232, "grad_norm": 1.7899631261825562, "learning_rate": 0.0006392172849571953, "loss": 3.4796, "step": 42495 }, { "epoch": 2.887620600625085, "grad_norm": 2.0253055095672607, "learning_rate": 0.0006391748199483625, "loss": 3.4794, "step": 42500 }, { "epoch": 2.887960320695747, "grad_norm": 1.8491411209106445, "learning_rate": 0.0006391323549395299, "loss": 3.4882, "step": 42505 }, { "epoch": 2.8883000407664086, "grad_norm": 2.79502010345459, "learning_rate": 0.0006390898899306972, "loss": 3.5587, "step": 42510 }, { "epoch": 2.88863976083707, "grad_norm": 1.9704211950302124, "learning_rate": 0.0006390474249218644, "loss": 3.2441, "step": 42515 }, { "epoch": 2.8889794809077323, "grad_norm": 2.2068188190460205, "learning_rate": 0.0006390049599130316, "loss": 3.7171, "step": 42520 }, { "epoch": 2.889319200978394, "grad_norm": 2.2570149898529053, "learning_rate": 0.000638962494904199, "loss": 3.4107, "step": 42525 }, { "epoch": 2.8896589210490555, "grad_norm": 1.9078435897827148, "learning_rate": 0.0006389200298953662, "loss": 3.6011, "step": 42530 }, { "epoch": 2.8899986411197176, "grad_norm": 1.9522265195846558, "learning_rate": 0.0006388775648865334, "loss": 3.3322, "step": 42535 }, { "epoch": 2.8903383611903792, "grad_norm": 2.7048823833465576, "learning_rate": 0.0006388350998777009, "loss": 3.592, "step": 42540 }, { "epoch": 2.890678081261041, "grad_norm": 1.8889758586883545, "learning_rate": 0.0006387926348688681, "loss": 3.3606, "step": 42545 }, { "epoch": 2.891017801331703, "grad_norm": 2.3193349838256836, "learning_rate": 0.0006387501698600353, "loss": 3.3865, "step": 42550 }, { "epoch": 2.8913575214023646, "grad_norm": 2.3540730476379395, "learning_rate": 0.0006387077048512027, "loss": 3.3827, "step": 42555 }, { "epoch": 2.891697241473026, "grad_norm": 1.6938257217407227, "learning_rate": 0.0006386652398423699, "loss": 3.5747, "step": 42560 }, { "epoch": 2.8920369615436883, "grad_norm": 2.0559725761413574, "learning_rate": 0.0006386227748335371, "loss": 3.6878, "step": 42565 }, { "epoch": 2.89237668161435, "grad_norm": 1.861515998840332, "learning_rate": 0.0006385803098247045, "loss": 3.5317, "step": 42570 }, { "epoch": 2.8927164016850115, "grad_norm": 1.56510591506958, "learning_rate": 0.0006385378448158718, "loss": 3.365, "step": 42575 }, { "epoch": 2.893056121755673, "grad_norm": 1.7176567316055298, "learning_rate": 0.000638495379807039, "loss": 3.3045, "step": 42580 }, { "epoch": 2.8933958418263352, "grad_norm": 1.6540051698684692, "learning_rate": 0.0006384529147982063, "loss": 3.5451, "step": 42585 }, { "epoch": 2.893735561896997, "grad_norm": 2.230523109436035, "learning_rate": 0.0006384104497893736, "loss": 3.503, "step": 42590 }, { "epoch": 2.8940752819676585, "grad_norm": 2.302227258682251, "learning_rate": 0.0006383679847805408, "loss": 3.6147, "step": 42595 }, { "epoch": 2.8944150020383206, "grad_norm": 2.2799293994903564, "learning_rate": 0.0006383255197717081, "loss": 3.5679, "step": 42600 }, { "epoch": 2.894754722108982, "grad_norm": 1.7834604978561401, "learning_rate": 0.0006382830547628755, "loss": 3.2528, "step": 42605 }, { "epoch": 2.895094442179644, "grad_norm": 1.8250093460083008, "learning_rate": 0.0006382405897540427, "loss": 3.4056, "step": 42610 }, { "epoch": 2.8954341622503055, "grad_norm": 1.9771254062652588, "learning_rate": 0.00063819812474521, "loss": 3.4033, "step": 42615 }, { "epoch": 2.8957738823209676, "grad_norm": 1.5599617958068848, "learning_rate": 0.0006381556597363772, "loss": 3.4931, "step": 42620 }, { "epoch": 2.896113602391629, "grad_norm": 2.7404868602752686, "learning_rate": 0.0006381131947275445, "loss": 3.4126, "step": 42625 }, { "epoch": 2.896453322462291, "grad_norm": 2.10709547996521, "learning_rate": 0.0006380707297187118, "loss": 3.502, "step": 42630 }, { "epoch": 2.896793042532953, "grad_norm": 1.9443385601043701, "learning_rate": 0.000638028264709879, "loss": 3.5603, "step": 42635 }, { "epoch": 2.8971327626036145, "grad_norm": 2.1095688343048096, "learning_rate": 0.0006379857997010464, "loss": 3.4661, "step": 42640 }, { "epoch": 2.897472482674276, "grad_norm": 1.7896218299865723, "learning_rate": 0.0006379433346922137, "loss": 3.7346, "step": 42645 }, { "epoch": 2.8978122027449382, "grad_norm": 1.2922190427780151, "learning_rate": 0.0006379008696833809, "loss": 3.4123, "step": 42650 }, { "epoch": 2.8981519228156, "grad_norm": 1.6692099571228027, "learning_rate": 0.0006378584046745481, "loss": 3.4271, "step": 42655 }, { "epoch": 2.8984916428862615, "grad_norm": 2.0441768169403076, "learning_rate": 0.0006378159396657155, "loss": 3.5594, "step": 42660 }, { "epoch": 2.8988313629569236, "grad_norm": 1.6662861108779907, "learning_rate": 0.0006377734746568827, "loss": 3.6792, "step": 42665 }, { "epoch": 2.899171083027585, "grad_norm": 2.0849125385284424, "learning_rate": 0.0006377310096480499, "loss": 3.3776, "step": 42670 }, { "epoch": 2.899510803098247, "grad_norm": 1.8424155712127686, "learning_rate": 0.0006376885446392174, "loss": 3.4998, "step": 42675 }, { "epoch": 2.899850523168909, "grad_norm": 1.9000440835952759, "learning_rate": 0.0006376460796303846, "loss": 3.4268, "step": 42680 }, { "epoch": 2.9001902432395705, "grad_norm": 2.104867696762085, "learning_rate": 0.0006376036146215518, "loss": 3.4929, "step": 42685 }, { "epoch": 2.900529963310232, "grad_norm": 2.2040069103240967, "learning_rate": 0.0006375611496127192, "loss": 3.4544, "step": 42690 }, { "epoch": 2.9008696833808942, "grad_norm": 1.84516441822052, "learning_rate": 0.0006375186846038864, "loss": 3.5689, "step": 42695 }, { "epoch": 2.901209403451556, "grad_norm": 1.5858368873596191, "learning_rate": 0.0006374762195950536, "loss": 3.5151, "step": 42700 }, { "epoch": 2.9015491235222175, "grad_norm": 2.2389137744903564, "learning_rate": 0.000637433754586221, "loss": 3.7831, "step": 42705 }, { "epoch": 2.9018888435928796, "grad_norm": 2.1036155223846436, "learning_rate": 0.0006373912895773883, "loss": 3.4891, "step": 42710 }, { "epoch": 2.902228563663541, "grad_norm": 1.97053861618042, "learning_rate": 0.0006373488245685555, "loss": 3.4978, "step": 42715 }, { "epoch": 2.902568283734203, "grad_norm": 1.8893131017684937, "learning_rate": 0.0006373063595597228, "loss": 3.477, "step": 42720 }, { "epoch": 2.902908003804865, "grad_norm": 2.081777572631836, "learning_rate": 0.0006372638945508901, "loss": 3.6258, "step": 42725 }, { "epoch": 2.9032477238755265, "grad_norm": 1.9981895685195923, "learning_rate": 0.0006372214295420573, "loss": 3.5887, "step": 42730 }, { "epoch": 2.903587443946188, "grad_norm": 1.5604450702667236, "learning_rate": 0.0006371789645332246, "loss": 3.4417, "step": 42735 }, { "epoch": 2.9039271640168502, "grad_norm": 2.4502370357513428, "learning_rate": 0.0006371364995243919, "loss": 3.5998, "step": 42740 }, { "epoch": 2.904266884087512, "grad_norm": 1.9697996377944946, "learning_rate": 0.0006370940345155592, "loss": 3.4381, "step": 42745 }, { "epoch": 2.9046066041581735, "grad_norm": 1.7462444305419922, "learning_rate": 0.0006370515695067265, "loss": 3.4007, "step": 42750 }, { "epoch": 2.9049463242288356, "grad_norm": 1.6497973203659058, "learning_rate": 0.0006370091044978937, "loss": 3.4683, "step": 42755 }, { "epoch": 2.905286044299497, "grad_norm": 1.8979334831237793, "learning_rate": 0.000636966639489061, "loss": 3.4475, "step": 42760 }, { "epoch": 2.905625764370159, "grad_norm": 1.9562320709228516, "learning_rate": 0.0006369241744802283, "loss": 3.4376, "step": 42765 }, { "epoch": 2.905965484440821, "grad_norm": 1.5392330884933472, "learning_rate": 0.0006368817094713955, "loss": 3.5259, "step": 42770 }, { "epoch": 2.9063052045114826, "grad_norm": 1.6227035522460938, "learning_rate": 0.0006368392444625629, "loss": 3.6767, "step": 42775 }, { "epoch": 2.906644924582144, "grad_norm": 2.153754949569702, "learning_rate": 0.0006367967794537302, "loss": 3.5213, "step": 42780 }, { "epoch": 2.9069846446528063, "grad_norm": 2.1757185459136963, "learning_rate": 0.0006367543144448974, "loss": 3.221, "step": 42785 }, { "epoch": 2.907324364723468, "grad_norm": 2.113992691040039, "learning_rate": 0.0006367118494360648, "loss": 3.584, "step": 42790 }, { "epoch": 2.9076640847941295, "grad_norm": 1.9096157550811768, "learning_rate": 0.000636669384427232, "loss": 3.3415, "step": 42795 }, { "epoch": 2.9080038048647916, "grad_norm": 1.659761667251587, "learning_rate": 0.0006366269194183992, "loss": 3.556, "step": 42800 }, { "epoch": 2.9083435249354532, "grad_norm": 2.893517017364502, "learning_rate": 0.0006365844544095666, "loss": 3.4164, "step": 42805 }, { "epoch": 2.908683245006115, "grad_norm": 1.7603976726531982, "learning_rate": 0.0006365419894007338, "loss": 3.6903, "step": 42810 }, { "epoch": 2.909022965076777, "grad_norm": 2.1390533447265625, "learning_rate": 0.0006364995243919011, "loss": 3.5954, "step": 42815 }, { "epoch": 2.9093626851474386, "grad_norm": 1.7865296602249146, "learning_rate": 0.0006364570593830684, "loss": 3.5101, "step": 42820 }, { "epoch": 2.9097024052181, "grad_norm": 1.7318636178970337, "learning_rate": 0.0006364145943742357, "loss": 3.5028, "step": 42825 }, { "epoch": 2.9100421252887623, "grad_norm": 1.6163958311080933, "learning_rate": 0.0006363721293654029, "loss": 3.3644, "step": 42830 }, { "epoch": 2.910381845359424, "grad_norm": 1.9096941947937012, "learning_rate": 0.0006363296643565702, "loss": 3.6651, "step": 42835 }, { "epoch": 2.9107215654300855, "grad_norm": 1.7151683568954468, "learning_rate": 0.0006362871993477375, "loss": 3.5969, "step": 42840 }, { "epoch": 2.9110612855007476, "grad_norm": 1.4854774475097656, "learning_rate": 0.0006362447343389047, "loss": 3.365, "step": 42845 }, { "epoch": 2.9114010055714092, "grad_norm": 1.768612265586853, "learning_rate": 0.0006362022693300721, "loss": 3.4596, "step": 42850 }, { "epoch": 2.911740725642071, "grad_norm": 2.348733901977539, "learning_rate": 0.0006361598043212394, "loss": 3.4038, "step": 42855 }, { "epoch": 2.912080445712733, "grad_norm": 1.8103344440460205, "learning_rate": 0.0006361173393124066, "loss": 3.5597, "step": 42860 }, { "epoch": 2.9124201657833946, "grad_norm": 1.9774258136749268, "learning_rate": 0.0006360748743035739, "loss": 3.4963, "step": 42865 }, { "epoch": 2.912759885854056, "grad_norm": 1.7694578170776367, "learning_rate": 0.0006360324092947411, "loss": 3.6134, "step": 42870 }, { "epoch": 2.9130996059247183, "grad_norm": 1.7592442035675049, "learning_rate": 0.0006359899442859084, "loss": 3.5411, "step": 42875 }, { "epoch": 2.91343932599538, "grad_norm": 1.4635244607925415, "learning_rate": 0.0006359474792770757, "loss": 3.3821, "step": 42880 }, { "epoch": 2.9137790460660415, "grad_norm": 1.471614122390747, "learning_rate": 0.000635905014268243, "loss": 3.7218, "step": 42885 }, { "epoch": 2.9141187661367036, "grad_norm": 1.6952468156814575, "learning_rate": 0.0006358625492594103, "loss": 3.3668, "step": 42890 }, { "epoch": 2.9144584862073653, "grad_norm": 1.732400894165039, "learning_rate": 0.0006358200842505776, "loss": 3.5942, "step": 42895 }, { "epoch": 2.914798206278027, "grad_norm": 1.9801380634307861, "learning_rate": 0.0006357776192417448, "loss": 3.3399, "step": 42900 }, { "epoch": 2.915137926348689, "grad_norm": 1.5618321895599365, "learning_rate": 0.000635735154232912, "loss": 3.5669, "step": 42905 }, { "epoch": 2.9154776464193506, "grad_norm": 2.2075421810150146, "learning_rate": 0.0006356926892240794, "loss": 3.4677, "step": 42910 }, { "epoch": 2.915817366490012, "grad_norm": 1.522576928138733, "learning_rate": 0.0006356502242152466, "loss": 3.5479, "step": 42915 }, { "epoch": 2.916157086560674, "grad_norm": 2.412626028060913, "learning_rate": 0.0006356077592064139, "loss": 3.4842, "step": 42920 }, { "epoch": 2.916496806631336, "grad_norm": 1.9081199169158936, "learning_rate": 0.0006355652941975813, "loss": 3.5891, "step": 42925 }, { "epoch": 2.9168365267019976, "grad_norm": 2.1723787784576416, "learning_rate": 0.0006355228291887485, "loss": 3.4195, "step": 42930 }, { "epoch": 2.917176246772659, "grad_norm": 2.1077797412872314, "learning_rate": 0.0006354803641799157, "loss": 3.6431, "step": 42935 }, { "epoch": 2.9175159668433213, "grad_norm": 1.3726375102996826, "learning_rate": 0.0006354378991710831, "loss": 3.7259, "step": 42940 }, { "epoch": 2.917855686913983, "grad_norm": 1.4928292036056519, "learning_rate": 0.0006353954341622503, "loss": 3.6142, "step": 42945 }, { "epoch": 2.9181954069846445, "grad_norm": 1.7864465713500977, "learning_rate": 0.0006353529691534175, "loss": 3.436, "step": 42950 }, { "epoch": 2.918535127055306, "grad_norm": 2.3931522369384766, "learning_rate": 0.000635310504144585, "loss": 3.4188, "step": 42955 }, { "epoch": 2.9188748471259682, "grad_norm": 1.9389615058898926, "learning_rate": 0.0006352680391357522, "loss": 3.5223, "step": 42960 }, { "epoch": 2.91921456719663, "grad_norm": 1.5859524011611938, "learning_rate": 0.0006352255741269194, "loss": 3.5185, "step": 42965 }, { "epoch": 2.9195542872672915, "grad_norm": 2.4792768955230713, "learning_rate": 0.0006351831091180867, "loss": 3.6416, "step": 42970 }, { "epoch": 2.9198940073379536, "grad_norm": 1.5029126405715942, "learning_rate": 0.000635140644109254, "loss": 3.3675, "step": 42975 }, { "epoch": 2.920233727408615, "grad_norm": 1.7072577476501465, "learning_rate": 0.0006350981791004212, "loss": 3.6106, "step": 42980 }, { "epoch": 2.920573447479277, "grad_norm": 2.331123113632202, "learning_rate": 0.0006350557140915885, "loss": 3.2531, "step": 42985 }, { "epoch": 2.920913167549939, "grad_norm": 2.3215081691741943, "learning_rate": 0.0006350132490827559, "loss": 3.4869, "step": 42990 }, { "epoch": 2.9212528876206005, "grad_norm": 1.6206471920013428, "learning_rate": 0.0006349707840739231, "loss": 3.4803, "step": 42995 }, { "epoch": 2.921592607691262, "grad_norm": 1.7624471187591553, "learning_rate": 0.0006349283190650904, "loss": 3.3777, "step": 43000 }, { "epoch": 2.9219323277619242, "grad_norm": 1.7774004936218262, "learning_rate": 0.0006348858540562576, "loss": 3.69, "step": 43005 }, { "epoch": 2.922272047832586, "grad_norm": 1.4223815202713013, "learning_rate": 0.0006348518820491916, "loss": 3.4804, "step": 43010 }, { "epoch": 2.9226117679032475, "grad_norm": 2.3111608028411865, "learning_rate": 0.0006348094170403588, "loss": 3.1874, "step": 43015 }, { "epoch": 2.9229514879739096, "grad_norm": 2.0465235710144043, "learning_rate": 0.000634766952031526, "loss": 3.6229, "step": 43020 }, { "epoch": 2.923291208044571, "grad_norm": 1.796665906906128, "learning_rate": 0.0006347244870226934, "loss": 3.6959, "step": 43025 }, { "epoch": 2.923630928115233, "grad_norm": 2.0631237030029297, "learning_rate": 0.0006346820220138606, "loss": 3.5374, "step": 43030 }, { "epoch": 2.923970648185895, "grad_norm": 1.451206922531128, "learning_rate": 0.0006346395570050278, "loss": 3.4761, "step": 43035 }, { "epoch": 2.9243103682565565, "grad_norm": 1.6207683086395264, "learning_rate": 0.0006345970919961951, "loss": 3.5337, "step": 43040 }, { "epoch": 2.924650088327218, "grad_norm": 1.665557861328125, "learning_rate": 0.0006345546269873625, "loss": 3.5583, "step": 43045 }, { "epoch": 2.9249898083978803, "grad_norm": 1.7275182008743286, "learning_rate": 0.0006345121619785297, "loss": 3.6254, "step": 43050 }, { "epoch": 2.925329528468542, "grad_norm": 1.9025490283966064, "learning_rate": 0.000634469696969697, "loss": 3.4102, "step": 43055 }, { "epoch": 2.9256692485392035, "grad_norm": 1.9308338165283203, "learning_rate": 0.0006344272319608643, "loss": 3.4604, "step": 43060 }, { "epoch": 2.9260089686098656, "grad_norm": 1.9466991424560547, "learning_rate": 0.0006343847669520315, "loss": 3.5784, "step": 43065 }, { "epoch": 2.926348688680527, "grad_norm": 1.8370192050933838, "learning_rate": 0.0006343423019431988, "loss": 3.4343, "step": 43070 }, { "epoch": 2.926688408751189, "grad_norm": 1.8844462633132935, "learning_rate": 0.0006342998369343661, "loss": 3.5605, "step": 43075 }, { "epoch": 2.927028128821851, "grad_norm": 2.742198944091797, "learning_rate": 0.0006342573719255334, "loss": 3.6417, "step": 43080 }, { "epoch": 2.9273678488925126, "grad_norm": 2.1176376342773438, "learning_rate": 0.0006342149069167007, "loss": 3.5612, "step": 43085 }, { "epoch": 2.927707568963174, "grad_norm": 1.7151541709899902, "learning_rate": 0.000634172441907868, "loss": 3.2649, "step": 43090 }, { "epoch": 2.9280472890338363, "grad_norm": 1.5873744487762451, "learning_rate": 0.0006341299768990352, "loss": 3.2373, "step": 43095 }, { "epoch": 2.928387009104498, "grad_norm": 1.8108792304992676, "learning_rate": 0.0006340875118902025, "loss": 3.3867, "step": 43100 }, { "epoch": 2.9287267291751595, "grad_norm": 2.174034357070923, "learning_rate": 0.0006340450468813697, "loss": 3.4889, "step": 43105 }, { "epoch": 2.9290664492458216, "grad_norm": 1.814001202583313, "learning_rate": 0.000634002581872537, "loss": 3.4547, "step": 43110 }, { "epoch": 2.9294061693164832, "grad_norm": 1.5935173034667969, "learning_rate": 0.0006339601168637044, "loss": 3.6032, "step": 43115 }, { "epoch": 2.929745889387145, "grad_norm": 2.21931791305542, "learning_rate": 0.0006339176518548716, "loss": 3.2573, "step": 43120 }, { "epoch": 2.930085609457807, "grad_norm": 1.4850651025772095, "learning_rate": 0.0006338751868460389, "loss": 3.5392, "step": 43125 }, { "epoch": 2.9304253295284686, "grad_norm": 2.0473732948303223, "learning_rate": 0.0006338327218372062, "loss": 3.5786, "step": 43130 }, { "epoch": 2.93076504959913, "grad_norm": 2.1491024494171143, "learning_rate": 0.0006337902568283734, "loss": 3.2368, "step": 43135 }, { "epoch": 2.9311047696697923, "grad_norm": 2.114506483078003, "learning_rate": 0.0006337477918195406, "loss": 3.4494, "step": 43140 }, { "epoch": 2.931444489740454, "grad_norm": 1.3558933734893799, "learning_rate": 0.000633705326810708, "loss": 3.6441, "step": 43145 }, { "epoch": 2.9317842098111155, "grad_norm": 1.7628638744354248, "learning_rate": 0.0006336628618018753, "loss": 3.8142, "step": 43150 }, { "epoch": 2.9321239298817776, "grad_norm": 2.085980176925659, "learning_rate": 0.0006336203967930425, "loss": 3.6028, "step": 43155 }, { "epoch": 2.9324636499524392, "grad_norm": 1.750032663345337, "learning_rate": 0.0006335779317842099, "loss": 3.5192, "step": 43160 }, { "epoch": 2.932803370023101, "grad_norm": 1.7963166236877441, "learning_rate": 0.0006335354667753771, "loss": 3.6129, "step": 43165 }, { "epoch": 2.933143090093763, "grad_norm": 2.00886607170105, "learning_rate": 0.0006334930017665443, "loss": 3.2178, "step": 43170 }, { "epoch": 2.9334828101644246, "grad_norm": 2.169008731842041, "learning_rate": 0.0006334505367577117, "loss": 3.438, "step": 43175 }, { "epoch": 2.933822530235086, "grad_norm": 2.2410151958465576, "learning_rate": 0.0006334080717488789, "loss": 3.6417, "step": 43180 }, { "epoch": 2.9341622503057483, "grad_norm": 1.441442847251892, "learning_rate": 0.0006333656067400462, "loss": 3.3245, "step": 43185 }, { "epoch": 2.93450197037641, "grad_norm": 1.9235481023788452, "learning_rate": 0.0006333231417312136, "loss": 3.4896, "step": 43190 }, { "epoch": 2.9348416904470715, "grad_norm": 2.381692409515381, "learning_rate": 0.0006332806767223808, "loss": 3.5634, "step": 43195 }, { "epoch": 2.9351814105177336, "grad_norm": 2.407341957092285, "learning_rate": 0.000633238211713548, "loss": 3.5473, "step": 43200 }, { "epoch": 2.9355211305883953, "grad_norm": 2.0445001125335693, "learning_rate": 0.0006331957467047153, "loss": 3.6825, "step": 43205 }, { "epoch": 2.935860850659057, "grad_norm": 1.5647633075714111, "learning_rate": 0.0006331532816958826, "loss": 3.596, "step": 43210 }, { "epoch": 2.936200570729719, "grad_norm": 1.7588504552841187, "learning_rate": 0.0006331108166870498, "loss": 3.4603, "step": 43215 }, { "epoch": 2.9365402908003806, "grad_norm": 1.6928274631500244, "learning_rate": 0.0006330683516782172, "loss": 3.4738, "step": 43220 }, { "epoch": 2.9368800108710422, "grad_norm": 1.8990612030029297, "learning_rate": 0.0006330258866693845, "loss": 3.4613, "step": 43225 }, { "epoch": 2.9372197309417043, "grad_norm": 1.9904530048370361, "learning_rate": 0.0006329834216605517, "loss": 3.485, "step": 43230 }, { "epoch": 2.937559451012366, "grad_norm": 1.9827226400375366, "learning_rate": 0.000632940956651719, "loss": 3.4044, "step": 43235 }, { "epoch": 2.9378991710830276, "grad_norm": 2.4457507133483887, "learning_rate": 0.0006328984916428862, "loss": 3.4637, "step": 43240 }, { "epoch": 2.9382388911536896, "grad_norm": 1.6880288124084473, "learning_rate": 0.0006328560266340535, "loss": 3.364, "step": 43245 }, { "epoch": 2.9385786112243513, "grad_norm": 1.7020286321640015, "learning_rate": 0.0006328135616252208, "loss": 3.4197, "step": 43250 }, { "epoch": 2.938918331295013, "grad_norm": 1.657723307609558, "learning_rate": 0.0006327710966163881, "loss": 3.6692, "step": 43255 }, { "epoch": 2.9392580513656745, "grad_norm": 2.1257381439208984, "learning_rate": 0.0006327286316075554, "loss": 3.4322, "step": 43260 }, { "epoch": 2.9395977714363366, "grad_norm": 2.2746424674987793, "learning_rate": 0.0006326861665987227, "loss": 3.4158, "step": 43265 }, { "epoch": 2.9399374915069982, "grad_norm": 1.6260111331939697, "learning_rate": 0.0006326437015898899, "loss": 3.316, "step": 43270 }, { "epoch": 2.94027721157766, "grad_norm": 1.4347296953201294, "learning_rate": 0.0006326012365810572, "loss": 3.384, "step": 43275 }, { "epoch": 2.940616931648322, "grad_norm": 2.390371084213257, "learning_rate": 0.0006325587715722245, "loss": 3.4787, "step": 43280 }, { "epoch": 2.9409566517189836, "grad_norm": 1.9835481643676758, "learning_rate": 0.0006325163065633917, "loss": 3.5878, "step": 43285 }, { "epoch": 2.941296371789645, "grad_norm": 1.628227949142456, "learning_rate": 0.000632473841554559, "loss": 3.4395, "step": 43290 }, { "epoch": 2.941636091860307, "grad_norm": 1.5029449462890625, "learning_rate": 0.0006324313765457264, "loss": 3.585, "step": 43295 }, { "epoch": 2.941975811930969, "grad_norm": 2.1430602073669434, "learning_rate": 0.0006323889115368936, "loss": 3.6511, "step": 43300 }, { "epoch": 2.9423155320016305, "grad_norm": 1.7164522409439087, "learning_rate": 0.0006323464465280608, "loss": 3.2583, "step": 43305 }, { "epoch": 2.942655252072292, "grad_norm": 1.753530740737915, "learning_rate": 0.0006323039815192282, "loss": 3.4273, "step": 43310 }, { "epoch": 2.9429949721429542, "grad_norm": 2.184676170349121, "learning_rate": 0.0006322615165103954, "loss": 3.4378, "step": 43315 }, { "epoch": 2.943334692213616, "grad_norm": 2.209334135055542, "learning_rate": 0.0006322190515015628, "loss": 3.7168, "step": 43320 }, { "epoch": 2.9436744122842775, "grad_norm": 2.5600087642669678, "learning_rate": 0.0006321765864927301, "loss": 3.403, "step": 43325 }, { "epoch": 2.9440141323549396, "grad_norm": 2.095860481262207, "learning_rate": 0.0006321341214838973, "loss": 3.4067, "step": 43330 }, { "epoch": 2.944353852425601, "grad_norm": 1.5807260274887085, "learning_rate": 0.0006320916564750646, "loss": 3.3812, "step": 43335 }, { "epoch": 2.944693572496263, "grad_norm": 1.423992395401001, "learning_rate": 0.0006320491914662318, "loss": 3.6736, "step": 43340 }, { "epoch": 2.945033292566925, "grad_norm": 2.222257614135742, "learning_rate": 0.0006320067264573991, "loss": 3.5802, "step": 43345 }, { "epoch": 2.9453730126375866, "grad_norm": 1.9155770540237427, "learning_rate": 0.0006319642614485664, "loss": 3.5703, "step": 43350 }, { "epoch": 2.945712732708248, "grad_norm": 1.9488401412963867, "learning_rate": 0.0006319217964397337, "loss": 3.6749, "step": 43355 }, { "epoch": 2.9460524527789103, "grad_norm": 1.7166746854782104, "learning_rate": 0.000631879331430901, "loss": 3.4405, "step": 43360 }, { "epoch": 2.946392172849572, "grad_norm": 1.6986221075057983, "learning_rate": 0.0006318368664220683, "loss": 3.3488, "step": 43365 }, { "epoch": 2.9467318929202335, "grad_norm": 2.6322975158691406, "learning_rate": 0.0006317944014132355, "loss": 3.6595, "step": 43370 }, { "epoch": 2.9470716129908956, "grad_norm": 1.6523849964141846, "learning_rate": 0.0006317519364044028, "loss": 3.7135, "step": 43375 }, { "epoch": 2.9474113330615572, "grad_norm": 1.8185304403305054, "learning_rate": 0.0006317094713955701, "loss": 3.4059, "step": 43380 }, { "epoch": 2.947751053132219, "grad_norm": 1.7089166641235352, "learning_rate": 0.0006316670063867373, "loss": 3.3603, "step": 43385 }, { "epoch": 2.948090773202881, "grad_norm": 1.718779444694519, "learning_rate": 0.0006316245413779046, "loss": 3.5611, "step": 43390 }, { "epoch": 2.9484304932735426, "grad_norm": 1.811293601989746, "learning_rate": 0.000631582076369072, "loss": 3.4527, "step": 43395 }, { "epoch": 2.948770213344204, "grad_norm": 1.8509095907211304, "learning_rate": 0.0006315396113602392, "loss": 3.5795, "step": 43400 }, { "epoch": 2.9491099334148663, "grad_norm": 1.630957841873169, "learning_rate": 0.0006314971463514064, "loss": 3.3746, "step": 43405 }, { "epoch": 2.949449653485528, "grad_norm": 2.4717962741851807, "learning_rate": 0.0006314546813425738, "loss": 3.3696, "step": 43410 }, { "epoch": 2.9497893735561895, "grad_norm": 1.9456508159637451, "learning_rate": 0.000631412216333741, "loss": 3.3324, "step": 43415 }, { "epoch": 2.9501290936268516, "grad_norm": 2.13915753364563, "learning_rate": 0.0006313697513249082, "loss": 3.4249, "step": 43420 }, { "epoch": 2.9504688136975132, "grad_norm": 1.7646197080612183, "learning_rate": 0.0006313272863160757, "loss": 3.6138, "step": 43425 }, { "epoch": 2.950808533768175, "grad_norm": 2.476766347885132, "learning_rate": 0.0006312848213072429, "loss": 3.7201, "step": 43430 }, { "epoch": 2.951148253838837, "grad_norm": 2.2545411586761475, "learning_rate": 0.0006312423562984101, "loss": 3.789, "step": 43435 }, { "epoch": 2.9514879739094986, "grad_norm": 1.8447474241256714, "learning_rate": 0.0006311998912895774, "loss": 3.5653, "step": 43440 }, { "epoch": 2.95182769398016, "grad_norm": 1.8144023418426514, "learning_rate": 0.0006311574262807447, "loss": 3.6846, "step": 43445 }, { "epoch": 2.9521674140508223, "grad_norm": 2.1302788257598877, "learning_rate": 0.0006311149612719119, "loss": 3.2878, "step": 43450 }, { "epoch": 2.952507134121484, "grad_norm": 2.145468235015869, "learning_rate": 0.0006310724962630792, "loss": 3.4197, "step": 43455 }, { "epoch": 2.9528468541921455, "grad_norm": 1.9021437168121338, "learning_rate": 0.0006310300312542466, "loss": 3.4747, "step": 43460 }, { "epoch": 2.9531865742628076, "grad_norm": 1.7694041728973389, "learning_rate": 0.0006309875662454138, "loss": 3.4119, "step": 43465 }, { "epoch": 2.9535262943334692, "grad_norm": 2.0293309688568115, "learning_rate": 0.0006309451012365811, "loss": 3.6571, "step": 43470 }, { "epoch": 2.953866014404131, "grad_norm": 2.217607259750366, "learning_rate": 0.0006309026362277484, "loss": 3.4188, "step": 43475 }, { "epoch": 2.954205734474793, "grad_norm": 2.1096227169036865, "learning_rate": 0.0006308601712189156, "loss": 3.5476, "step": 43480 }, { "epoch": 2.9545454545454546, "grad_norm": 1.4276113510131836, "learning_rate": 0.0006308177062100829, "loss": 3.5719, "step": 43485 }, { "epoch": 2.954885174616116, "grad_norm": 1.7909080982208252, "learning_rate": 0.0006307752412012501, "loss": 3.6948, "step": 43490 }, { "epoch": 2.9552248946867783, "grad_norm": 1.5143693685531616, "learning_rate": 0.0006307327761924175, "loss": 3.3083, "step": 43495 }, { "epoch": 2.95556461475744, "grad_norm": 2.1223785877227783, "learning_rate": 0.0006306903111835848, "loss": 3.3595, "step": 43500 }, { "epoch": 2.9559043348281016, "grad_norm": 2.0281894207000732, "learning_rate": 0.000630647846174752, "loss": 3.2813, "step": 43505 }, { "epoch": 2.9562440548987636, "grad_norm": 1.9526005983352661, "learning_rate": 0.0006306053811659193, "loss": 3.6543, "step": 43510 }, { "epoch": 2.9565837749694253, "grad_norm": 2.4472646713256836, "learning_rate": 0.0006305629161570866, "loss": 3.1716, "step": 43515 }, { "epoch": 2.956923495040087, "grad_norm": 1.4706674814224243, "learning_rate": 0.0006305204511482538, "loss": 3.3184, "step": 43520 }, { "epoch": 2.957263215110749, "grad_norm": 2.2161543369293213, "learning_rate": 0.000630477986139421, "loss": 3.4477, "step": 43525 }, { "epoch": 2.9576029351814106, "grad_norm": 1.797940969467163, "learning_rate": 0.0006304355211305885, "loss": 3.5408, "step": 43530 }, { "epoch": 2.9579426552520722, "grad_norm": 1.9837146997451782, "learning_rate": 0.0006303930561217557, "loss": 3.2769, "step": 43535 }, { "epoch": 2.9582823753227343, "grad_norm": 1.9147837162017822, "learning_rate": 0.0006303505911129229, "loss": 3.3174, "step": 43540 }, { "epoch": 2.958622095393396, "grad_norm": 1.7778844833374023, "learning_rate": 0.0006303081261040903, "loss": 3.5229, "step": 43545 }, { "epoch": 2.9589618154640576, "grad_norm": 1.828647255897522, "learning_rate": 0.0006302656610952575, "loss": 3.7162, "step": 43550 }, { "epoch": 2.9593015355347196, "grad_norm": 1.955010175704956, "learning_rate": 0.0006302231960864247, "loss": 3.1883, "step": 43555 }, { "epoch": 2.9596412556053813, "grad_norm": 2.003575563430786, "learning_rate": 0.0006301807310775921, "loss": 3.5659, "step": 43560 }, { "epoch": 2.959980975676043, "grad_norm": 1.8651357889175415, "learning_rate": 0.0006301382660687594, "loss": 3.1603, "step": 43565 }, { "epoch": 2.960320695746705, "grad_norm": 2.6925289630889893, "learning_rate": 0.0006300958010599266, "loss": 3.3274, "step": 43570 }, { "epoch": 2.9606604158173666, "grad_norm": 2.0416741371154785, "learning_rate": 0.000630053336051094, "loss": 3.5958, "step": 43575 }, { "epoch": 2.9610001358880282, "grad_norm": 2.008946180343628, "learning_rate": 0.0006300108710422612, "loss": 3.5652, "step": 43580 }, { "epoch": 2.9613398559586903, "grad_norm": 2.139085054397583, "learning_rate": 0.0006299684060334284, "loss": 3.4373, "step": 43585 }, { "epoch": 2.961679576029352, "grad_norm": 1.4715005159378052, "learning_rate": 0.0006299259410245957, "loss": 3.7255, "step": 43590 }, { "epoch": 2.9620192961000136, "grad_norm": 1.8561373949050903, "learning_rate": 0.000629883476015763, "loss": 3.4603, "step": 43595 }, { "epoch": 2.962359016170675, "grad_norm": 1.798680067062378, "learning_rate": 0.0006298410110069303, "loss": 3.6276, "step": 43600 }, { "epoch": 2.9626987362413373, "grad_norm": 1.427308440208435, "learning_rate": 0.0006297985459980976, "loss": 3.5526, "step": 43605 }, { "epoch": 2.963038456311999, "grad_norm": 1.9909290075302124, "learning_rate": 0.0006297560809892649, "loss": 3.4301, "step": 43610 }, { "epoch": 2.9633781763826605, "grad_norm": 1.8252475261688232, "learning_rate": 0.0006297136159804321, "loss": 3.3972, "step": 43615 }, { "epoch": 2.9637178964533226, "grad_norm": 1.8197247982025146, "learning_rate": 0.0006296711509715994, "loss": 3.5264, "step": 43620 }, { "epoch": 2.9640576165239843, "grad_norm": 1.7559473514556885, "learning_rate": 0.0006296286859627666, "loss": 3.5259, "step": 43625 }, { "epoch": 2.964397336594646, "grad_norm": 2.3912456035614014, "learning_rate": 0.0006295862209539339, "loss": 3.3151, "step": 43630 }, { "epoch": 2.9647370566653075, "grad_norm": 1.937391757965088, "learning_rate": 0.0006295437559451013, "loss": 3.5254, "step": 43635 }, { "epoch": 2.9650767767359696, "grad_norm": 2.2991745471954346, "learning_rate": 0.0006295012909362685, "loss": 3.5603, "step": 43640 }, { "epoch": 2.965416496806631, "grad_norm": 1.6176708936691284, "learning_rate": 0.0006294588259274358, "loss": 3.5284, "step": 43645 }, { "epoch": 2.965756216877293, "grad_norm": 2.3289384841918945, "learning_rate": 0.0006294163609186031, "loss": 3.5124, "step": 43650 }, { "epoch": 2.966095936947955, "grad_norm": 1.5848668813705444, "learning_rate": 0.0006293738959097703, "loss": 3.5973, "step": 43655 }, { "epoch": 2.9664356570186166, "grad_norm": 1.765097975730896, "learning_rate": 0.0006293314309009377, "loss": 3.6528, "step": 43660 }, { "epoch": 2.966775377089278, "grad_norm": 1.758548617362976, "learning_rate": 0.0006292889658921049, "loss": 3.4795, "step": 43665 }, { "epoch": 2.9671150971599403, "grad_norm": 1.7782336473464966, "learning_rate": 0.0006292465008832722, "loss": 3.6089, "step": 43670 }, { "epoch": 2.967454817230602, "grad_norm": 1.5580532550811768, "learning_rate": 0.0006292040358744396, "loss": 3.3891, "step": 43675 }, { "epoch": 2.9677945373012635, "grad_norm": 1.8711988925933838, "learning_rate": 0.0006291615708656068, "loss": 3.5487, "step": 43680 }, { "epoch": 2.9681342573719256, "grad_norm": 1.5906553268432617, "learning_rate": 0.000629119105856774, "loss": 3.5692, "step": 43685 }, { "epoch": 2.9684739774425872, "grad_norm": 1.7604053020477295, "learning_rate": 0.0006290766408479413, "loss": 3.4049, "step": 43690 }, { "epoch": 2.968813697513249, "grad_norm": 2.060274124145508, "learning_rate": 0.0006290341758391086, "loss": 3.6369, "step": 43695 }, { "epoch": 2.969153417583911, "grad_norm": 2.3989126682281494, "learning_rate": 0.0006289917108302758, "loss": 3.5014, "step": 43700 }, { "epoch": 2.9694931376545726, "grad_norm": 2.280729293823242, "learning_rate": 0.0006289492458214432, "loss": 3.5487, "step": 43705 }, { "epoch": 2.969832857725234, "grad_norm": 1.6330872774124146, "learning_rate": 0.0006289067808126105, "loss": 3.4921, "step": 43710 }, { "epoch": 2.9701725777958963, "grad_norm": 2.082425355911255, "learning_rate": 0.0006288643158037777, "loss": 3.3556, "step": 43715 }, { "epoch": 2.970512297866558, "grad_norm": 1.642863154411316, "learning_rate": 0.000628821850794945, "loss": 3.3251, "step": 43720 }, { "epoch": 2.9708520179372195, "grad_norm": 2.0614466667175293, "learning_rate": 0.0006287793857861122, "loss": 3.4085, "step": 43725 }, { "epoch": 2.9711917380078816, "grad_norm": 2.293222665786743, "learning_rate": 0.0006287369207772795, "loss": 3.6118, "step": 43730 }, { "epoch": 2.9715314580785432, "grad_norm": 1.739133358001709, "learning_rate": 0.0006286944557684468, "loss": 3.4587, "step": 43735 }, { "epoch": 2.971871178149205, "grad_norm": 2.0297369956970215, "learning_rate": 0.0006286519907596141, "loss": 3.4712, "step": 43740 }, { "epoch": 2.972210898219867, "grad_norm": 1.7662674188613892, "learning_rate": 0.0006286095257507814, "loss": 3.6076, "step": 43745 }, { "epoch": 2.9725506182905286, "grad_norm": 1.5138972997665405, "learning_rate": 0.0006285670607419487, "loss": 3.5127, "step": 43750 }, { "epoch": 2.97289033836119, "grad_norm": 2.0984785556793213, "learning_rate": 0.0006285245957331159, "loss": 3.4834, "step": 43755 }, { "epoch": 2.9732300584318523, "grad_norm": 1.9907646179199219, "learning_rate": 0.0006284821307242832, "loss": 3.7428, "step": 43760 }, { "epoch": 2.973569778502514, "grad_norm": 2.699019193649292, "learning_rate": 0.0006284396657154505, "loss": 3.4203, "step": 43765 }, { "epoch": 2.9739094985731755, "grad_norm": 1.4431170225143433, "learning_rate": 0.0006284056937083843, "loss": 3.5014, "step": 43770 }, { "epoch": 2.9742492186438376, "grad_norm": 2.4071178436279297, "learning_rate": 0.0006283632286995515, "loss": 3.2442, "step": 43775 }, { "epoch": 2.9745889387144993, "grad_norm": 1.7228349447250366, "learning_rate": 0.0006283207636907189, "loss": 3.5821, "step": 43780 }, { "epoch": 2.974928658785161, "grad_norm": 1.5858893394470215, "learning_rate": 0.0006282782986818861, "loss": 3.4853, "step": 43785 }, { "epoch": 2.975268378855823, "grad_norm": 1.6000983715057373, "learning_rate": 0.0006282358336730533, "loss": 3.5403, "step": 43790 }, { "epoch": 2.9756080989264846, "grad_norm": 2.3167994022369385, "learning_rate": 0.0006281933686642208, "loss": 3.5408, "step": 43795 }, { "epoch": 2.975947818997146, "grad_norm": 1.7460438013076782, "learning_rate": 0.000628150903655388, "loss": 3.4376, "step": 43800 }, { "epoch": 2.9762875390678083, "grad_norm": 1.6907615661621094, "learning_rate": 0.0006281084386465552, "loss": 3.529, "step": 43805 }, { "epoch": 2.97662725913847, "grad_norm": 2.303821086883545, "learning_rate": 0.0006280659736377226, "loss": 3.4476, "step": 43810 }, { "epoch": 2.9769669792091316, "grad_norm": 2.0386555194854736, "learning_rate": 0.0006280235086288898, "loss": 3.6392, "step": 43815 }, { "epoch": 2.9773066992797936, "grad_norm": 1.9932165145874023, "learning_rate": 0.000627981043620057, "loss": 3.6746, "step": 43820 }, { "epoch": 2.9776464193504553, "grad_norm": 1.5160177946090698, "learning_rate": 0.0006279385786112243, "loss": 3.424, "step": 43825 }, { "epoch": 2.977986139421117, "grad_norm": 1.8673654794692993, "learning_rate": 0.0006278961136023917, "loss": 3.4397, "step": 43830 }, { "epoch": 2.978325859491779, "grad_norm": 2.1917643547058105, "learning_rate": 0.0006278536485935589, "loss": 3.0897, "step": 43835 }, { "epoch": 2.9786655795624406, "grad_norm": 1.732232689857483, "learning_rate": 0.0006278111835847262, "loss": 3.5583, "step": 43840 }, { "epoch": 2.9790052996331022, "grad_norm": 1.6145479679107666, "learning_rate": 0.0006277687185758935, "loss": 3.5048, "step": 43845 }, { "epoch": 2.9793450197037643, "grad_norm": 1.8816438913345337, "learning_rate": 0.0006277262535670607, "loss": 3.5649, "step": 43850 }, { "epoch": 2.979684739774426, "grad_norm": 1.557904839515686, "learning_rate": 0.000627683788558228, "loss": 3.3944, "step": 43855 }, { "epoch": 2.9800244598450876, "grad_norm": 2.266875982284546, "learning_rate": 0.0006276413235493952, "loss": 3.5676, "step": 43860 }, { "epoch": 2.9803641799157496, "grad_norm": 1.8709533214569092, "learning_rate": 0.0006275988585405627, "loss": 3.4018, "step": 43865 }, { "epoch": 2.9807038999864113, "grad_norm": 1.7189655303955078, "learning_rate": 0.0006275563935317299, "loss": 3.5125, "step": 43870 }, { "epoch": 2.981043620057073, "grad_norm": 2.0460281372070312, "learning_rate": 0.0006275139285228971, "loss": 3.3794, "step": 43875 }, { "epoch": 2.981383340127735, "grad_norm": 1.8888295888900757, "learning_rate": 0.0006274714635140645, "loss": 3.7162, "step": 43880 }, { "epoch": 2.9817230601983966, "grad_norm": 2.2997512817382812, "learning_rate": 0.0006274289985052317, "loss": 3.3391, "step": 43885 }, { "epoch": 2.9820627802690582, "grad_norm": 2.20277738571167, "learning_rate": 0.0006273865334963989, "loss": 3.2729, "step": 43890 }, { "epoch": 2.9824025003397203, "grad_norm": 2.0427303314208984, "learning_rate": 0.0006273440684875663, "loss": 3.9077, "step": 43895 }, { "epoch": 2.982742220410382, "grad_norm": 2.0670430660247803, "learning_rate": 0.0006273016034787336, "loss": 3.4152, "step": 43900 }, { "epoch": 2.9830819404810436, "grad_norm": 2.5140812397003174, "learning_rate": 0.0006272591384699008, "loss": 3.5471, "step": 43905 }, { "epoch": 2.9834216605517057, "grad_norm": 2.1457087993621826, "learning_rate": 0.0006272166734610682, "loss": 3.5297, "step": 43910 }, { "epoch": 2.9837613806223673, "grad_norm": 1.7957100868225098, "learning_rate": 0.0006271742084522354, "loss": 3.5633, "step": 43915 }, { "epoch": 2.984101100693029, "grad_norm": 1.852510690689087, "learning_rate": 0.0006271317434434026, "loss": 3.3741, "step": 43920 }, { "epoch": 2.984440820763691, "grad_norm": 1.8435059785842896, "learning_rate": 0.0006270892784345699, "loss": 3.546, "step": 43925 }, { "epoch": 2.9847805408343526, "grad_norm": 2.6307122707366943, "learning_rate": 0.0006270468134257372, "loss": 3.3827, "step": 43930 }, { "epoch": 2.9851202609050143, "grad_norm": 2.753084182739258, "learning_rate": 0.0006270043484169045, "loss": 3.2249, "step": 43935 }, { "epoch": 2.985459980975676, "grad_norm": 1.9890670776367188, "learning_rate": 0.0006269618834080718, "loss": 3.4511, "step": 43940 }, { "epoch": 2.985799701046338, "grad_norm": 1.9497747421264648, "learning_rate": 0.0006269194183992391, "loss": 3.3456, "step": 43945 }, { "epoch": 2.9861394211169996, "grad_norm": 1.8452454805374146, "learning_rate": 0.0006268769533904063, "loss": 3.5469, "step": 43950 }, { "epoch": 2.9864791411876612, "grad_norm": 2.1016526222229004, "learning_rate": 0.0006268344883815736, "loss": 3.5488, "step": 43955 }, { "epoch": 2.9868188612583233, "grad_norm": 2.8184359073638916, "learning_rate": 0.0006267920233727408, "loss": 3.758, "step": 43960 }, { "epoch": 2.987158581328985, "grad_norm": 1.8053702116012573, "learning_rate": 0.0006267495583639081, "loss": 3.5597, "step": 43965 }, { "epoch": 2.9874983013996466, "grad_norm": 1.9352554082870483, "learning_rate": 0.0006267070933550755, "loss": 3.6958, "step": 43970 }, { "epoch": 2.987838021470308, "grad_norm": 1.580631971359253, "learning_rate": 0.0006266646283462427, "loss": 3.5135, "step": 43975 }, { "epoch": 2.9881777415409703, "grad_norm": 1.7541743516921997, "learning_rate": 0.00062662216333741, "loss": 3.4635, "step": 43980 }, { "epoch": 2.988517461611632, "grad_norm": 1.6589406728744507, "learning_rate": 0.0006265796983285773, "loss": 3.2838, "step": 43985 }, { "epoch": 2.9888571816822935, "grad_norm": 3.022352933883667, "learning_rate": 0.0006265372333197445, "loss": 3.6805, "step": 43990 }, { "epoch": 2.9891969017529556, "grad_norm": 1.9676512479782104, "learning_rate": 0.0006264947683109118, "loss": 3.67, "step": 43995 }, { "epoch": 2.9895366218236172, "grad_norm": 1.9327844381332397, "learning_rate": 0.0006264523033020791, "loss": 3.4157, "step": 44000 }, { "epoch": 2.989876341894279, "grad_norm": 2.0276992321014404, "learning_rate": 0.0006264098382932464, "loss": 3.5284, "step": 44005 }, { "epoch": 2.990216061964941, "grad_norm": 2.045403242111206, "learning_rate": 0.0006263673732844136, "loss": 3.3019, "step": 44010 }, { "epoch": 2.9905557820356026, "grad_norm": 1.9860916137695312, "learning_rate": 0.000626324908275581, "loss": 3.6522, "step": 44015 }, { "epoch": 2.990895502106264, "grad_norm": 2.026737928390503, "learning_rate": 0.0006262824432667482, "loss": 3.4716, "step": 44020 }, { "epoch": 2.9912352221769263, "grad_norm": 1.9374607801437378, "learning_rate": 0.0006262399782579154, "loss": 3.3979, "step": 44025 }, { "epoch": 2.991574942247588, "grad_norm": 2.0307443141937256, "learning_rate": 0.0006261975132490828, "loss": 3.9792, "step": 44030 }, { "epoch": 2.9919146623182495, "grad_norm": 1.866872787475586, "learning_rate": 0.00062615504824025, "loss": 3.4755, "step": 44035 }, { "epoch": 2.9922543823889116, "grad_norm": 1.6907920837402344, "learning_rate": 0.0006261125832314173, "loss": 3.4544, "step": 44040 }, { "epoch": 2.9925941024595732, "grad_norm": 1.5181210041046143, "learning_rate": 0.0006260701182225847, "loss": 3.5669, "step": 44045 }, { "epoch": 2.992933822530235, "grad_norm": 1.8280613422393799, "learning_rate": 0.0006260276532137519, "loss": 3.6713, "step": 44050 }, { "epoch": 2.993273542600897, "grad_norm": 1.9892979860305786, "learning_rate": 0.0006259851882049191, "loss": 3.6431, "step": 44055 }, { "epoch": 2.9936132626715586, "grad_norm": 1.9869734048843384, "learning_rate": 0.0006259427231960864, "loss": 3.5967, "step": 44060 }, { "epoch": 2.99395298274222, "grad_norm": 1.9414771795272827, "learning_rate": 0.0006259002581872537, "loss": 3.3741, "step": 44065 }, { "epoch": 2.9942927028128823, "grad_norm": 1.9498153924942017, "learning_rate": 0.0006258577931784209, "loss": 3.5322, "step": 44070 }, { "epoch": 2.994632422883544, "grad_norm": 2.042484998703003, "learning_rate": 0.0006258153281695883, "loss": 3.4808, "step": 44075 }, { "epoch": 2.9949721429542056, "grad_norm": 2.422210454940796, "learning_rate": 0.0006257728631607556, "loss": 3.4185, "step": 44080 }, { "epoch": 2.9953118630248676, "grad_norm": 2.0787994861602783, "learning_rate": 0.0006257303981519228, "loss": 3.6195, "step": 44085 }, { "epoch": 2.9956515830955293, "grad_norm": 1.9739718437194824, "learning_rate": 0.0006256879331430901, "loss": 3.437, "step": 44090 }, { "epoch": 2.995991303166191, "grad_norm": 1.7001831531524658, "learning_rate": 0.0006256454681342574, "loss": 3.6393, "step": 44095 }, { "epoch": 2.996331023236853, "grad_norm": 2.1355226039886475, "learning_rate": 0.0006256030031254246, "loss": 3.6619, "step": 44100 }, { "epoch": 2.9966707433075146, "grad_norm": 1.9812486171722412, "learning_rate": 0.000625560538116592, "loss": 3.6764, "step": 44105 }, { "epoch": 2.9970104633781762, "grad_norm": 1.9974082708358765, "learning_rate": 0.0006255180731077593, "loss": 3.5077, "step": 44110 }, { "epoch": 2.9973501834488383, "grad_norm": 2.188871383666992, "learning_rate": 0.0006254756080989265, "loss": 3.3251, "step": 44115 }, { "epoch": 2.9976899035195, "grad_norm": 1.8055528402328491, "learning_rate": 0.0006254331430900938, "loss": 3.5979, "step": 44120 }, { "epoch": 2.9980296235901616, "grad_norm": 1.5603268146514893, "learning_rate": 0.000625390678081261, "loss": 3.6113, "step": 44125 }, { "epoch": 2.9983693436608236, "grad_norm": 2.001499891281128, "learning_rate": 0.0006253482130724283, "loss": 3.599, "step": 44130 }, { "epoch": 2.9987090637314853, "grad_norm": 1.7288753986358643, "learning_rate": 0.0006253057480635956, "loss": 3.5523, "step": 44135 }, { "epoch": 2.999048783802147, "grad_norm": 1.746629238128662, "learning_rate": 0.0006252632830547629, "loss": 3.2303, "step": 44140 }, { "epoch": 2.999388503872809, "grad_norm": 1.7690781354904175, "learning_rate": 0.0006252208180459302, "loss": 3.4563, "step": 44145 }, { "epoch": 2.9997282239434706, "grad_norm": 1.9247727394104004, "learning_rate": 0.0006251783530370975, "loss": 3.4667, "step": 44150 }, { "epoch": 3.0, "eval_bertscore": { "f1": 0.8281256647832704, "precision": 0.826831142013301, "recall": 0.8307247131540165 }, "eval_bleu_4": 0.005605263346903342, "eval_exact_match": 0.0, "eval_loss": 3.4653944969177246, "eval_meteor": 0.0954928157740334, "eval_rouge": { "rouge1": 0.12110804067537709, "rouge2": 0.011479067196380784, "rougeL": 0.10353287161802166, "rougeLsum": 0.10350439500746311 }, "eval_runtime": 1647.4593, "eval_samples_per_second": 6.264, "eval_steps_per_second": 0.783, "step": 44154 }, { "epoch": 3.0000679440141322, "grad_norm": 1.690854549407959, "learning_rate": 0.0006251358880282647, "loss": 3.3691, "step": 44155 }, { "epoch": 3.0004076640847943, "grad_norm": 1.8767343759536743, "learning_rate": 0.0006250934230194319, "loss": 3.5003, "step": 44160 }, { "epoch": 3.000747384155456, "grad_norm": 2.2703018188476562, "learning_rate": 0.0006250509580105993, "loss": 3.5542, "step": 44165 }, { "epoch": 3.0010871042261176, "grad_norm": 2.0047614574432373, "learning_rate": 0.0006250084930017665, "loss": 3.298, "step": 44170 }, { "epoch": 3.0014268242967796, "grad_norm": 2.1421546936035156, "learning_rate": 0.0006249660279929338, "loss": 3.4169, "step": 44175 }, { "epoch": 3.0017665443674413, "grad_norm": 2.0308597087860107, "learning_rate": 0.0006249235629841012, "loss": 3.4596, "step": 44180 }, { "epoch": 3.002106264438103, "grad_norm": 2.1784589290618896, "learning_rate": 0.0006248810979752684, "loss": 3.2861, "step": 44185 }, { "epoch": 3.002445984508765, "grad_norm": 1.5986171960830688, "learning_rate": 0.0006248386329664356, "loss": 3.5499, "step": 44190 }, { "epoch": 3.0027857045794266, "grad_norm": 1.964514970779419, "learning_rate": 0.000624796167957603, "loss": 3.3249, "step": 44195 }, { "epoch": 3.0031254246500882, "grad_norm": 1.5980480909347534, "learning_rate": 0.0006247537029487702, "loss": 3.2687, "step": 44200 }, { "epoch": 3.0034651447207503, "grad_norm": 2.2704241275787354, "learning_rate": 0.0006247112379399374, "loss": 3.5167, "step": 44205 }, { "epoch": 3.003804864791412, "grad_norm": 1.78876531124115, "learning_rate": 0.0006246687729311049, "loss": 3.5165, "step": 44210 }, { "epoch": 3.0041445848620736, "grad_norm": 1.7355620861053467, "learning_rate": 0.0006246263079222721, "loss": 3.3001, "step": 44215 }, { "epoch": 3.004484304932735, "grad_norm": 2.234586477279663, "learning_rate": 0.0006245838429134394, "loss": 3.4533, "step": 44220 }, { "epoch": 3.0048240250033973, "grad_norm": 2.0414528846740723, "learning_rate": 0.0006245413779046066, "loss": 3.5426, "step": 44225 }, { "epoch": 3.005163745074059, "grad_norm": 1.9860442876815796, "learning_rate": 0.0006244989128957739, "loss": 3.3252, "step": 44230 }, { "epoch": 3.0055034651447206, "grad_norm": 2.177370548248291, "learning_rate": 0.0006244564478869412, "loss": 3.2949, "step": 44235 }, { "epoch": 3.0058431852153826, "grad_norm": 1.602779746055603, "learning_rate": 0.0006244139828781084, "loss": 3.4589, "step": 44240 }, { "epoch": 3.0061829052860443, "grad_norm": 2.2380003929138184, "learning_rate": 0.0006243715178692758, "loss": 3.505, "step": 44245 }, { "epoch": 3.006522625356706, "grad_norm": 2.4031357765197754, "learning_rate": 0.0006243290528604431, "loss": 3.4081, "step": 44250 }, { "epoch": 3.006862345427368, "grad_norm": 2.0145621299743652, "learning_rate": 0.0006242865878516103, "loss": 3.3606, "step": 44255 }, { "epoch": 3.0072020654980296, "grad_norm": 2.0770862102508545, "learning_rate": 0.0006242441228427775, "loss": 3.7033, "step": 44260 }, { "epoch": 3.0075417855686912, "grad_norm": 1.8920314311981201, "learning_rate": 0.0006242016578339449, "loss": 3.4133, "step": 44265 }, { "epoch": 3.0078815056393533, "grad_norm": 1.9884583950042725, "learning_rate": 0.0006241591928251121, "loss": 3.14, "step": 44270 }, { "epoch": 3.008221225710015, "grad_norm": 2.0589537620544434, "learning_rate": 0.0006241167278162793, "loss": 3.2812, "step": 44275 }, { "epoch": 3.0085609457806766, "grad_norm": 1.7166658639907837, "learning_rate": 0.0006240742628074468, "loss": 3.4732, "step": 44280 }, { "epoch": 3.0089006658513386, "grad_norm": 2.2981972694396973, "learning_rate": 0.000624031797798614, "loss": 3.3058, "step": 44285 }, { "epoch": 3.0092403859220003, "grad_norm": 2.148878574371338, "learning_rate": 0.0006239893327897812, "loss": 3.4204, "step": 44290 }, { "epoch": 3.009580105992662, "grad_norm": 1.6311357021331787, "learning_rate": 0.0006239468677809486, "loss": 3.464, "step": 44295 }, { "epoch": 3.009919826063324, "grad_norm": 1.616284728050232, "learning_rate": 0.0006239044027721158, "loss": 3.3891, "step": 44300 }, { "epoch": 3.0102595461339856, "grad_norm": 1.7524454593658447, "learning_rate": 0.000623861937763283, "loss": 3.3248, "step": 44305 }, { "epoch": 3.0105992662046472, "grad_norm": 1.9122337102890015, "learning_rate": 0.0006238194727544503, "loss": 3.5886, "step": 44310 }, { "epoch": 3.0109389862753093, "grad_norm": 2.404172658920288, "learning_rate": 0.0006237770077456177, "loss": 3.6064, "step": 44315 }, { "epoch": 3.011278706345971, "grad_norm": 2.0561459064483643, "learning_rate": 0.0006237345427367849, "loss": 3.3392, "step": 44320 }, { "epoch": 3.0116184264166326, "grad_norm": 2.094421863555908, "learning_rate": 0.0006236920777279522, "loss": 3.44, "step": 44325 }, { "epoch": 3.0119581464872947, "grad_norm": 1.7619820833206177, "learning_rate": 0.0006236496127191195, "loss": 3.6587, "step": 44330 }, { "epoch": 3.0122978665579563, "grad_norm": 1.9656175374984741, "learning_rate": 0.0006236071477102867, "loss": 3.4575, "step": 44335 }, { "epoch": 3.012637586628618, "grad_norm": 1.9288911819458008, "learning_rate": 0.000623564682701454, "loss": 3.3068, "step": 44340 }, { "epoch": 3.01297730669928, "grad_norm": 2.483086109161377, "learning_rate": 0.0006235222176926213, "loss": 3.5522, "step": 44345 }, { "epoch": 3.0133170267699416, "grad_norm": 1.6443318128585815, "learning_rate": 0.0006234797526837886, "loss": 3.4075, "step": 44350 }, { "epoch": 3.0136567468406033, "grad_norm": 1.9797946214675903, "learning_rate": 0.0006234372876749559, "loss": 3.5097, "step": 44355 }, { "epoch": 3.0139964669112653, "grad_norm": 1.5453038215637207, "learning_rate": 0.0006233948226661231, "loss": 3.4462, "step": 44360 }, { "epoch": 3.014336186981927, "grad_norm": 1.7534035444259644, "learning_rate": 0.0006233523576572904, "loss": 3.3717, "step": 44365 }, { "epoch": 3.0146759070525886, "grad_norm": 1.8229186534881592, "learning_rate": 0.0006233098926484577, "loss": 3.3577, "step": 44370 }, { "epoch": 3.01501562712325, "grad_norm": 1.817114233970642, "learning_rate": 0.0006232674276396249, "loss": 3.6262, "step": 44375 }, { "epoch": 3.0153553471939123, "grad_norm": 1.9929851293563843, "learning_rate": 0.0006232249626307922, "loss": 3.2577, "step": 44380 }, { "epoch": 3.015695067264574, "grad_norm": 1.8562302589416504, "learning_rate": 0.0006231824976219596, "loss": 3.5625, "step": 44385 }, { "epoch": 3.0160347873352356, "grad_norm": 2.107234477996826, "learning_rate": 0.0006231400326131268, "loss": 3.4348, "step": 44390 }, { "epoch": 3.0163745074058976, "grad_norm": 1.5762012004852295, "learning_rate": 0.000623097567604294, "loss": 3.2497, "step": 44395 }, { "epoch": 3.0167142274765593, "grad_norm": 2.003066301345825, "learning_rate": 0.0006230551025954614, "loss": 3.6278, "step": 44400 }, { "epoch": 3.017053947547221, "grad_norm": 1.8887076377868652, "learning_rate": 0.0006230126375866286, "loss": 3.3006, "step": 44405 }, { "epoch": 3.017393667617883, "grad_norm": 2.1795413494110107, "learning_rate": 0.0006229701725777958, "loss": 3.314, "step": 44410 }, { "epoch": 3.0177333876885446, "grad_norm": 2.099198341369629, "learning_rate": 0.0006229277075689632, "loss": 3.7448, "step": 44415 }, { "epoch": 3.0180731077592062, "grad_norm": 2.256209373474121, "learning_rate": 0.0006228852425601305, "loss": 3.6102, "step": 44420 }, { "epoch": 3.0184128278298683, "grad_norm": 2.0169320106506348, "learning_rate": 0.0006228427775512977, "loss": 3.6229, "step": 44425 }, { "epoch": 3.01875254790053, "grad_norm": 1.5148504972457886, "learning_rate": 0.0006228003125424651, "loss": 3.5313, "step": 44430 }, { "epoch": 3.0190922679711916, "grad_norm": 1.768136978149414, "learning_rate": 0.0006227578475336323, "loss": 3.2345, "step": 44435 }, { "epoch": 3.0194319880418536, "grad_norm": 1.849257469177246, "learning_rate": 0.0006227153825247995, "loss": 3.7613, "step": 44440 }, { "epoch": 3.0197717081125153, "grad_norm": 2.0596764087677, "learning_rate": 0.0006226729175159669, "loss": 3.4874, "step": 44445 }, { "epoch": 3.020111428183177, "grad_norm": 2.1659560203552246, "learning_rate": 0.0006226304525071341, "loss": 3.3522, "step": 44450 }, { "epoch": 3.020451148253839, "grad_norm": 2.2467005252838135, "learning_rate": 0.0006225879874983014, "loss": 3.2985, "step": 44455 }, { "epoch": 3.0207908683245006, "grad_norm": 1.9328978061676025, "learning_rate": 0.0006225455224894687, "loss": 3.3949, "step": 44460 }, { "epoch": 3.0211305883951622, "grad_norm": 2.3691813945770264, "learning_rate": 0.000622503057480636, "loss": 3.5796, "step": 44465 }, { "epoch": 3.0214703084658243, "grad_norm": 2.2091269493103027, "learning_rate": 0.0006224605924718032, "loss": 3.3446, "step": 44470 }, { "epoch": 3.021810028536486, "grad_norm": 1.961007833480835, "learning_rate": 0.0006224181274629705, "loss": 3.4458, "step": 44475 }, { "epoch": 3.0221497486071476, "grad_norm": 2.118194341659546, "learning_rate": 0.0006223756624541378, "loss": 3.6021, "step": 44480 }, { "epoch": 3.0224894686778097, "grad_norm": 2.137531280517578, "learning_rate": 0.000622333197445305, "loss": 3.652, "step": 44485 }, { "epoch": 3.0228291887484713, "grad_norm": 1.5313873291015625, "learning_rate": 0.0006222907324364724, "loss": 3.1546, "step": 44490 }, { "epoch": 3.023168908819133, "grad_norm": 1.9204070568084717, "learning_rate": 0.0006222482674276397, "loss": 3.4133, "step": 44495 }, { "epoch": 3.023508628889795, "grad_norm": 1.7128576040267944, "learning_rate": 0.0006222058024188069, "loss": 3.4562, "step": 44500 }, { "epoch": 3.0238483489604566, "grad_norm": 2.2562978267669678, "learning_rate": 0.0006221633374099742, "loss": 3.331, "step": 44505 }, { "epoch": 3.0241880690311183, "grad_norm": 2.0489156246185303, "learning_rate": 0.0006221208724011414, "loss": 3.4573, "step": 44510 }, { "epoch": 3.0245277891017803, "grad_norm": 2.080387830734253, "learning_rate": 0.0006220784073923087, "loss": 3.51, "step": 44515 }, { "epoch": 3.024867509172442, "grad_norm": 2.0814476013183594, "learning_rate": 0.000622035942383476, "loss": 3.4118, "step": 44520 }, { "epoch": 3.0252072292431036, "grad_norm": 2.0692968368530273, "learning_rate": 0.0006219934773746433, "loss": 3.3498, "step": 44525 }, { "epoch": 3.0255469493137657, "grad_norm": 1.9154837131500244, "learning_rate": 0.0006219510123658106, "loss": 3.5912, "step": 44530 }, { "epoch": 3.0258866693844273, "grad_norm": 1.9792492389678955, "learning_rate": 0.0006219085473569779, "loss": 3.3835, "step": 44535 }, { "epoch": 3.026226389455089, "grad_norm": 1.7366917133331299, "learning_rate": 0.0006218660823481451, "loss": 3.9053, "step": 44540 }, { "epoch": 3.026566109525751, "grad_norm": 2.0303728580474854, "learning_rate": 0.0006218236173393123, "loss": 3.161, "step": 44545 }, { "epoch": 3.0269058295964126, "grad_norm": 2.473932981491089, "learning_rate": 0.0006217811523304797, "loss": 3.4964, "step": 44550 }, { "epoch": 3.0272455496670743, "grad_norm": 1.5148627758026123, "learning_rate": 0.0006217386873216469, "loss": 3.4736, "step": 44555 }, { "epoch": 3.027585269737736, "grad_norm": 2.1131985187530518, "learning_rate": 0.0006216962223128143, "loss": 3.3593, "step": 44560 }, { "epoch": 3.027924989808398, "grad_norm": 1.7628505229949951, "learning_rate": 0.0006216537573039816, "loss": 3.4735, "step": 44565 }, { "epoch": 3.0282647098790596, "grad_norm": 1.5177279710769653, "learning_rate": 0.0006216112922951488, "loss": 3.3929, "step": 44570 }, { "epoch": 3.0286044299497212, "grad_norm": 1.8453569412231445, "learning_rate": 0.0006215688272863161, "loss": 3.4604, "step": 44575 }, { "epoch": 3.0289441500203833, "grad_norm": 2.2352118492126465, "learning_rate": 0.0006215263622774834, "loss": 3.2896, "step": 44580 }, { "epoch": 3.029283870091045, "grad_norm": 2.04201078414917, "learning_rate": 0.0006214838972686506, "loss": 3.4601, "step": 44585 }, { "epoch": 3.0296235901617066, "grad_norm": 1.834359049797058, "learning_rate": 0.0006214414322598179, "loss": 3.4613, "step": 44590 }, { "epoch": 3.0299633102323686, "grad_norm": 2.258631467819214, "learning_rate": 0.0006213989672509853, "loss": 3.3486, "step": 44595 }, { "epoch": 3.0303030303030303, "grad_norm": 1.8523845672607422, "learning_rate": 0.0006213565022421525, "loss": 3.4416, "step": 44600 }, { "epoch": 3.030642750373692, "grad_norm": 2.312774658203125, "learning_rate": 0.0006213140372333198, "loss": 3.3938, "step": 44605 }, { "epoch": 3.030982470444354, "grad_norm": 1.71056067943573, "learning_rate": 0.000621271572224487, "loss": 3.2947, "step": 44610 }, { "epoch": 3.0313221905150156, "grad_norm": 2.002852201461792, "learning_rate": 0.0006212291072156543, "loss": 3.3341, "step": 44615 }, { "epoch": 3.0316619105856772, "grad_norm": 1.9116346836090088, "learning_rate": 0.0006211866422068216, "loss": 3.4028, "step": 44620 }, { "epoch": 3.0320016306563393, "grad_norm": 2.3365366458892822, "learning_rate": 0.0006211441771979888, "loss": 3.4831, "step": 44625 }, { "epoch": 3.032341350727001, "grad_norm": 1.9691694974899292, "learning_rate": 0.0006211017121891562, "loss": 3.638, "step": 44630 }, { "epoch": 3.0326810707976626, "grad_norm": 2.0711801052093506, "learning_rate": 0.0006210592471803235, "loss": 3.3419, "step": 44635 }, { "epoch": 3.0330207908683247, "grad_norm": 1.8062864542007446, "learning_rate": 0.0006210167821714907, "loss": 3.1116, "step": 44640 }, { "epoch": 3.0333605109389863, "grad_norm": 2.1357619762420654, "learning_rate": 0.000620974317162658, "loss": 3.6109, "step": 44645 }, { "epoch": 3.033700231009648, "grad_norm": 1.838737964630127, "learning_rate": 0.0006209318521538253, "loss": 3.1536, "step": 44650 }, { "epoch": 3.03403995108031, "grad_norm": 2.395359992980957, "learning_rate": 0.0006208893871449925, "loss": 3.4572, "step": 44655 }, { "epoch": 3.0343796711509716, "grad_norm": 1.990049123764038, "learning_rate": 0.0006208469221361597, "loss": 3.3965, "step": 44660 }, { "epoch": 3.0347193912216333, "grad_norm": 1.7126152515411377, "learning_rate": 0.0006208044571273272, "loss": 3.5881, "step": 44665 }, { "epoch": 3.0350591112922953, "grad_norm": 2.347442626953125, "learning_rate": 0.0006207619921184944, "loss": 3.2219, "step": 44670 }, { "epoch": 3.035398831362957, "grad_norm": 1.809110164642334, "learning_rate": 0.0006207195271096616, "loss": 3.2631, "step": 44675 }, { "epoch": 3.0357385514336186, "grad_norm": 2.247004508972168, "learning_rate": 0.000620677062100829, "loss": 3.5332, "step": 44680 }, { "epoch": 3.0360782715042807, "grad_norm": 2.1578123569488525, "learning_rate": 0.0006206345970919962, "loss": 3.5647, "step": 44685 }, { "epoch": 3.0364179915749423, "grad_norm": 1.7656017541885376, "learning_rate": 0.0006205921320831634, "loss": 3.3514, "step": 44690 }, { "epoch": 3.036757711645604, "grad_norm": 2.4058408737182617, "learning_rate": 0.0006205496670743309, "loss": 3.3157, "step": 44695 }, { "epoch": 3.037097431716266, "grad_norm": 1.9660767316818237, "learning_rate": 0.0006205072020654981, "loss": 3.5819, "step": 44700 }, { "epoch": 3.0374371517869276, "grad_norm": 2.249389886856079, "learning_rate": 0.0006204647370566653, "loss": 3.3228, "step": 44705 }, { "epoch": 3.0377768718575893, "grad_norm": 1.6055089235305786, "learning_rate": 0.0006204222720478326, "loss": 3.5964, "step": 44710 }, { "epoch": 3.038116591928251, "grad_norm": 2.1657979488372803, "learning_rate": 0.0006203798070389999, "loss": 3.2162, "step": 44715 }, { "epoch": 3.038456311998913, "grad_norm": 1.6324516534805298, "learning_rate": 0.0006203373420301671, "loss": 3.3701, "step": 44720 }, { "epoch": 3.0387960320695746, "grad_norm": 1.4990938901901245, "learning_rate": 0.0006202948770213344, "loss": 3.266, "step": 44725 }, { "epoch": 3.0391357521402362, "grad_norm": 2.306063413619995, "learning_rate": 0.0006202524120125018, "loss": 3.5332, "step": 44730 }, { "epoch": 3.0394754722108983, "grad_norm": 1.8365141153335571, "learning_rate": 0.000620209947003669, "loss": 3.5749, "step": 44735 }, { "epoch": 3.03981519228156, "grad_norm": 2.0167529582977295, "learning_rate": 0.0006201674819948363, "loss": 3.125, "step": 44740 }, { "epoch": 3.0401549123522216, "grad_norm": 2.052720785140991, "learning_rate": 0.0006201250169860035, "loss": 3.5378, "step": 44745 }, { "epoch": 3.0404946324228836, "grad_norm": 1.4344521760940552, "learning_rate": 0.0006200825519771708, "loss": 3.3991, "step": 44750 }, { "epoch": 3.0408343524935453, "grad_norm": 1.7206860780715942, "learning_rate": 0.0006200400869683381, "loss": 3.1867, "step": 44755 }, { "epoch": 3.041174072564207, "grad_norm": 2.1315360069274902, "learning_rate": 0.0006199976219595053, "loss": 3.2898, "step": 44760 }, { "epoch": 3.041513792634869, "grad_norm": 1.9136921167373657, "learning_rate": 0.0006199551569506727, "loss": 3.2684, "step": 44765 }, { "epoch": 3.0418535127055306, "grad_norm": 1.8965773582458496, "learning_rate": 0.00061991269194184, "loss": 2.9979, "step": 44770 }, { "epoch": 3.0421932327761922, "grad_norm": 2.245112419128418, "learning_rate": 0.0006198702269330072, "loss": 3.5666, "step": 44775 }, { "epoch": 3.0425329528468543, "grad_norm": 2.0090086460113525, "learning_rate": 0.0006198277619241745, "loss": 3.4149, "step": 44780 }, { "epoch": 3.042872672917516, "grad_norm": 2.0418145656585693, "learning_rate": 0.0006197852969153418, "loss": 3.4043, "step": 44785 }, { "epoch": 3.0432123929881776, "grad_norm": 2.041917085647583, "learning_rate": 0.000619742831906509, "loss": 3.1841, "step": 44790 }, { "epoch": 3.0435521130588397, "grad_norm": 2.1143128871917725, "learning_rate": 0.0006197003668976762, "loss": 3.4406, "step": 44795 }, { "epoch": 3.0438918331295013, "grad_norm": 1.6090418100357056, "learning_rate": 0.0006196579018888437, "loss": 3.5524, "step": 44800 }, { "epoch": 3.044231553200163, "grad_norm": 2.4838449954986572, "learning_rate": 0.0006196154368800109, "loss": 3.4091, "step": 44805 }, { "epoch": 3.044571273270825, "grad_norm": 1.64095938205719, "learning_rate": 0.0006195729718711781, "loss": 3.161, "step": 44810 }, { "epoch": 3.0449109933414866, "grad_norm": 1.7503230571746826, "learning_rate": 0.0006195305068623455, "loss": 3.6805, "step": 44815 }, { "epoch": 3.0452507134121483, "grad_norm": 2.1877644062042236, "learning_rate": 0.0006194880418535127, "loss": 3.537, "step": 44820 }, { "epoch": 3.0455904334828103, "grad_norm": 1.422289490699768, "learning_rate": 0.0006194455768446799, "loss": 3.4836, "step": 44825 }, { "epoch": 3.045930153553472, "grad_norm": 1.5810929536819458, "learning_rate": 0.0006194031118358473, "loss": 3.5572, "step": 44830 }, { "epoch": 3.0462698736241336, "grad_norm": 1.918929100036621, "learning_rate": 0.0006193606468270146, "loss": 3.2351, "step": 44835 }, { "epoch": 3.0466095936947957, "grad_norm": 2.0064308643341064, "learning_rate": 0.0006193181818181818, "loss": 3.3545, "step": 44840 }, { "epoch": 3.0469493137654573, "grad_norm": 1.9906519651412964, "learning_rate": 0.0006192757168093492, "loss": 3.293, "step": 44845 }, { "epoch": 3.047289033836119, "grad_norm": 1.9564114809036255, "learning_rate": 0.0006192332518005164, "loss": 3.5984, "step": 44850 }, { "epoch": 3.047628753906781, "grad_norm": 1.6433161497116089, "learning_rate": 0.0006191907867916836, "loss": 3.4606, "step": 44855 }, { "epoch": 3.0479684739774426, "grad_norm": 1.8052898645401, "learning_rate": 0.0006191483217828509, "loss": 3.372, "step": 44860 }, { "epoch": 3.0483081940481043, "grad_norm": 2.613095283508301, "learning_rate": 0.0006191058567740182, "loss": 3.4318, "step": 44865 }, { "epoch": 3.0486479141187663, "grad_norm": 1.629652976989746, "learning_rate": 0.0006190633917651855, "loss": 3.337, "step": 44870 }, { "epoch": 3.048987634189428, "grad_norm": 2.064788579940796, "learning_rate": 0.0006190209267563528, "loss": 3.4118, "step": 44875 }, { "epoch": 3.0493273542600896, "grad_norm": 2.054687738418579, "learning_rate": 0.0006189784617475201, "loss": 3.2072, "step": 44880 }, { "epoch": 3.0496670743307517, "grad_norm": 2.2967405319213867, "learning_rate": 0.0006189359967386873, "loss": 3.5763, "step": 44885 }, { "epoch": 3.0500067944014133, "grad_norm": 1.8968502283096313, "learning_rate": 0.0006188935317298546, "loss": 3.5527, "step": 44890 }, { "epoch": 3.050346514472075, "grad_norm": 2.042799949645996, "learning_rate": 0.0006188510667210218, "loss": 3.3322, "step": 44895 }, { "epoch": 3.0506862345427366, "grad_norm": 2.4692234992980957, "learning_rate": 0.0006188086017121892, "loss": 3.5603, "step": 44900 }, { "epoch": 3.0510259546133986, "grad_norm": 1.7631397247314453, "learning_rate": 0.0006187661367033565, "loss": 3.2532, "step": 44905 }, { "epoch": 3.0513656746840603, "grad_norm": 2.1964783668518066, "learning_rate": 0.0006187236716945237, "loss": 3.3968, "step": 44910 }, { "epoch": 3.051705394754722, "grad_norm": 1.5211479663848877, "learning_rate": 0.0006186812066856911, "loss": 3.5379, "step": 44915 }, { "epoch": 3.052045114825384, "grad_norm": 1.6790030002593994, "learning_rate": 0.0006186387416768583, "loss": 3.5775, "step": 44920 }, { "epoch": 3.0523848348960456, "grad_norm": 1.9022331237792969, "learning_rate": 0.0006185962766680255, "loss": 3.3729, "step": 44925 }, { "epoch": 3.0527245549667072, "grad_norm": 2.1588706970214844, "learning_rate": 0.0006185538116591929, "loss": 3.5545, "step": 44930 }, { "epoch": 3.0530642750373693, "grad_norm": 2.1267995834350586, "learning_rate": 0.0006185113466503601, "loss": 3.2317, "step": 44935 }, { "epoch": 3.053403995108031, "grad_norm": 2.040699005126953, "learning_rate": 0.0006184688816415274, "loss": 3.3571, "step": 44940 }, { "epoch": 3.0537437151786926, "grad_norm": 1.7123355865478516, "learning_rate": 0.0006184264166326948, "loss": 3.4444, "step": 44945 }, { "epoch": 3.0540834352493547, "grad_norm": 1.7389349937438965, "learning_rate": 0.000618383951623862, "loss": 3.4285, "step": 44950 }, { "epoch": 3.0544231553200163, "grad_norm": 1.7280665636062622, "learning_rate": 0.0006183414866150292, "loss": 3.3495, "step": 44955 }, { "epoch": 3.054762875390678, "grad_norm": 1.5694432258605957, "learning_rate": 0.0006182990216061965, "loss": 3.4949, "step": 44960 }, { "epoch": 3.05510259546134, "grad_norm": 2.189251661300659, "learning_rate": 0.0006182565565973638, "loss": 3.4202, "step": 44965 }, { "epoch": 3.0554423155320016, "grad_norm": 1.9305908679962158, "learning_rate": 0.000618214091588531, "loss": 3.2797, "step": 44970 }, { "epoch": 3.0557820356026633, "grad_norm": 2.092027425765991, "learning_rate": 0.0006181716265796984, "loss": 3.5687, "step": 44975 }, { "epoch": 3.0561217556733253, "grad_norm": 1.9008827209472656, "learning_rate": 0.0006181291615708657, "loss": 3.5469, "step": 44980 }, { "epoch": 3.056461475743987, "grad_norm": 1.7335869073867798, "learning_rate": 0.0006180866965620329, "loss": 3.5321, "step": 44985 }, { "epoch": 3.0568011958146486, "grad_norm": 1.6893846988677979, "learning_rate": 0.0006180442315532002, "loss": 3.4503, "step": 44990 }, { "epoch": 3.0571409158853107, "grad_norm": 1.8230336904525757, "learning_rate": 0.0006180017665443674, "loss": 3.1227, "step": 44995 }, { "epoch": 3.0574806359559723, "grad_norm": 1.7095543146133423, "learning_rate": 0.0006179593015355347, "loss": 3.3926, "step": 45000 }, { "epoch": 3.057820356026634, "grad_norm": 1.9981228113174438, "learning_rate": 0.000617916836526702, "loss": 3.2757, "step": 45005 }, { "epoch": 3.058160076097296, "grad_norm": 1.804058313369751, "learning_rate": 0.0006178743715178693, "loss": 3.2375, "step": 45010 }, { "epoch": 3.0584997961679576, "grad_norm": 1.9570671319961548, "learning_rate": 0.0006178319065090366, "loss": 3.5465, "step": 45015 }, { "epoch": 3.0588395162386193, "grad_norm": 1.8168220520019531, "learning_rate": 0.0006177894415002039, "loss": 3.2304, "step": 45020 }, { "epoch": 3.0591792363092813, "grad_norm": 1.6835081577301025, "learning_rate": 0.0006177469764913711, "loss": 3.474, "step": 45025 }, { "epoch": 3.059518956379943, "grad_norm": 1.8429701328277588, "learning_rate": 0.0006177045114825384, "loss": 3.538, "step": 45030 }, { "epoch": 3.0598586764506046, "grad_norm": 2.270899772644043, "learning_rate": 0.0006176620464737057, "loss": 3.328, "step": 45035 }, { "epoch": 3.0601983965212667, "grad_norm": 1.8882132768630981, "learning_rate": 0.0006176195814648729, "loss": 3.5638, "step": 45040 }, { "epoch": 3.0605381165919283, "grad_norm": 1.7897015810012817, "learning_rate": 0.0006175771164560402, "loss": 3.1758, "step": 45045 }, { "epoch": 3.06087783666259, "grad_norm": 2.171107292175293, "learning_rate": 0.0006175346514472076, "loss": 3.3633, "step": 45050 }, { "epoch": 3.0612175567332516, "grad_norm": 2.018388032913208, "learning_rate": 0.0006174921864383748, "loss": 3.3671, "step": 45055 }, { "epoch": 3.0615572768039137, "grad_norm": 1.7060078382492065, "learning_rate": 0.000617449721429542, "loss": 3.5062, "step": 45060 }, { "epoch": 3.0618969968745753, "grad_norm": 2.0884249210357666, "learning_rate": 0.0006174072564207094, "loss": 3.4265, "step": 45065 }, { "epoch": 3.062236716945237, "grad_norm": 2.579716205596924, "learning_rate": 0.0006173647914118766, "loss": 3.3169, "step": 45070 }, { "epoch": 3.062576437015899, "grad_norm": 1.9555174112319946, "learning_rate": 0.0006173223264030438, "loss": 3.6854, "step": 45075 }, { "epoch": 3.0629161570865606, "grad_norm": 1.670964002609253, "learning_rate": 0.0006172798613942113, "loss": 3.4567, "step": 45080 }, { "epoch": 3.0632558771572223, "grad_norm": 1.7616411447525024, "learning_rate": 0.0006172373963853785, "loss": 3.6043, "step": 45085 }, { "epoch": 3.0635955972278843, "grad_norm": 1.8361759185791016, "learning_rate": 0.0006171949313765457, "loss": 3.3494, "step": 45090 }, { "epoch": 3.063935317298546, "grad_norm": 1.6193220615386963, "learning_rate": 0.000617152466367713, "loss": 3.3996, "step": 45095 }, { "epoch": 3.0642750373692076, "grad_norm": 2.707695245742798, "learning_rate": 0.0006171100013588803, "loss": 3.3875, "step": 45100 }, { "epoch": 3.0646147574398697, "grad_norm": 2.304713249206543, "learning_rate": 0.0006170675363500475, "loss": 3.5552, "step": 45105 }, { "epoch": 3.0649544775105313, "grad_norm": 1.6009202003479004, "learning_rate": 0.0006170250713412148, "loss": 3.5755, "step": 45110 }, { "epoch": 3.065294197581193, "grad_norm": 2.0991668701171875, "learning_rate": 0.0006169826063323822, "loss": 3.3918, "step": 45115 }, { "epoch": 3.065633917651855, "grad_norm": 1.9899450540542603, "learning_rate": 0.0006169401413235494, "loss": 3.5019, "step": 45120 }, { "epoch": 3.0659736377225166, "grad_norm": 2.1031763553619385, "learning_rate": 0.0006168976763147167, "loss": 3.3723, "step": 45125 }, { "epoch": 3.0663133577931783, "grad_norm": 1.9623630046844482, "learning_rate": 0.000616855211305884, "loss": 3.4715, "step": 45130 }, { "epoch": 3.0666530778638403, "grad_norm": 1.6868919134140015, "learning_rate": 0.0006168127462970512, "loss": 3.4079, "step": 45135 }, { "epoch": 3.066992797934502, "grad_norm": 2.318587303161621, "learning_rate": 0.0006167702812882185, "loss": 3.3509, "step": 45140 }, { "epoch": 3.0673325180051636, "grad_norm": 1.945180058479309, "learning_rate": 0.0006167278162793857, "loss": 3.2425, "step": 45145 }, { "epoch": 3.0676722380758257, "grad_norm": 1.8404533863067627, "learning_rate": 0.0006166853512705531, "loss": 3.4337, "step": 45150 }, { "epoch": 3.0680119581464873, "grad_norm": 2.2710134983062744, "learning_rate": 0.0006166428862617204, "loss": 3.4584, "step": 45155 }, { "epoch": 3.068351678217149, "grad_norm": 1.7926809787750244, "learning_rate": 0.0006166004212528876, "loss": 3.3571, "step": 45160 }, { "epoch": 3.068691398287811, "grad_norm": 1.6715401411056519, "learning_rate": 0.0006165579562440549, "loss": 3.4964, "step": 45165 }, { "epoch": 3.0690311183584726, "grad_norm": 1.8357632160186768, "learning_rate": 0.0006165154912352222, "loss": 3.4988, "step": 45170 }, { "epoch": 3.0693708384291343, "grad_norm": 2.176988363265991, "learning_rate": 0.0006164730262263894, "loss": 3.3905, "step": 45175 }, { "epoch": 3.0697105584997963, "grad_norm": 1.9470988512039185, "learning_rate": 0.0006164305612175566, "loss": 3.39, "step": 45180 }, { "epoch": 3.070050278570458, "grad_norm": 2.3650996685028076, "learning_rate": 0.0006163880962087241, "loss": 3.4274, "step": 45185 }, { "epoch": 3.0703899986411196, "grad_norm": 1.8732980489730835, "learning_rate": 0.0006163456311998913, "loss": 3.5037, "step": 45190 }, { "epoch": 3.0707297187117817, "grad_norm": 2.183248996734619, "learning_rate": 0.0006163031661910585, "loss": 3.293, "step": 45195 }, { "epoch": 3.0710694387824433, "grad_norm": 2.0473153591156006, "learning_rate": 0.0006162607011822259, "loss": 3.4467, "step": 45200 }, { "epoch": 3.071409158853105, "grad_norm": 2.0844621658325195, "learning_rate": 0.0006162182361733931, "loss": 3.1333, "step": 45205 }, { "epoch": 3.071748878923767, "grad_norm": 2.2856125831604004, "learning_rate": 0.0006161757711645603, "loss": 3.5348, "step": 45210 }, { "epoch": 3.0720885989944287, "grad_norm": 2.5317888259887695, "learning_rate": 0.0006161333061557277, "loss": 3.5288, "step": 45215 }, { "epoch": 3.0724283190650903, "grad_norm": 1.857208251953125, "learning_rate": 0.000616090841146895, "loss": 3.3509, "step": 45220 }, { "epoch": 3.0727680391357524, "grad_norm": 1.934151530265808, "learning_rate": 0.0006160483761380622, "loss": 3.4652, "step": 45225 }, { "epoch": 3.073107759206414, "grad_norm": 1.7857314348220825, "learning_rate": 0.0006160059111292296, "loss": 3.252, "step": 45230 }, { "epoch": 3.0734474792770756, "grad_norm": 2.1930718421936035, "learning_rate": 0.0006159634461203968, "loss": 3.198, "step": 45235 }, { "epoch": 3.0737871993477373, "grad_norm": 1.8334646224975586, "learning_rate": 0.0006159209811115641, "loss": 3.4374, "step": 45240 }, { "epoch": 3.0741269194183993, "grad_norm": 2.383758306503296, "learning_rate": 0.0006158785161027313, "loss": 3.6882, "step": 45245 }, { "epoch": 3.074466639489061, "grad_norm": 2.559720516204834, "learning_rate": 0.0006158360510938986, "loss": 3.786, "step": 45250 }, { "epoch": 3.0748063595597226, "grad_norm": 1.5886874198913574, "learning_rate": 0.000615793586085066, "loss": 3.39, "step": 45255 }, { "epoch": 3.0751460796303847, "grad_norm": 2.311070680618286, "learning_rate": 0.0006157511210762332, "loss": 3.2469, "step": 45260 }, { "epoch": 3.0754857997010463, "grad_norm": 1.993981122970581, "learning_rate": 0.0006157086560674005, "loss": 3.4712, "step": 45265 }, { "epoch": 3.075825519771708, "grad_norm": 2.239410877227783, "learning_rate": 0.0006156661910585678, "loss": 3.2836, "step": 45270 }, { "epoch": 3.07616523984237, "grad_norm": 1.7896265983581543, "learning_rate": 0.000615623726049735, "loss": 3.2588, "step": 45275 }, { "epoch": 3.0765049599130316, "grad_norm": 1.6984585523605347, "learning_rate": 0.0006155812610409022, "loss": 3.2983, "step": 45280 }, { "epoch": 3.0768446799836933, "grad_norm": 1.5983797311782837, "learning_rate": 0.0006155387960320697, "loss": 3.6569, "step": 45285 }, { "epoch": 3.0771844000543553, "grad_norm": 1.523942470550537, "learning_rate": 0.0006154963310232369, "loss": 3.5002, "step": 45290 }, { "epoch": 3.077524120125017, "grad_norm": 2.2631537914276123, "learning_rate": 0.0006154538660144041, "loss": 3.369, "step": 45295 }, { "epoch": 3.0778638401956786, "grad_norm": 2.0458273887634277, "learning_rate": 0.0006154114010055715, "loss": 3.5045, "step": 45300 }, { "epoch": 3.0782035602663407, "grad_norm": 1.6230829954147339, "learning_rate": 0.0006153689359967387, "loss": 3.4116, "step": 45305 }, { "epoch": 3.0785432803370023, "grad_norm": 2.502563714981079, "learning_rate": 0.0006153264709879059, "loss": 3.5292, "step": 45310 }, { "epoch": 3.078883000407664, "grad_norm": 1.5376287698745728, "learning_rate": 0.0006152840059790733, "loss": 3.5692, "step": 45315 }, { "epoch": 3.079222720478326, "grad_norm": 2.1776647567749023, "learning_rate": 0.0006152415409702406, "loss": 3.367, "step": 45320 }, { "epoch": 3.0795624405489876, "grad_norm": 1.6812329292297363, "learning_rate": 0.0006151990759614078, "loss": 3.4245, "step": 45325 }, { "epoch": 3.0799021606196493, "grad_norm": 1.8624430894851685, "learning_rate": 0.0006151566109525752, "loss": 3.4874, "step": 45330 }, { "epoch": 3.0802418806903114, "grad_norm": 2.2126522064208984, "learning_rate": 0.0006151141459437424, "loss": 3.452, "step": 45335 }, { "epoch": 3.080581600760973, "grad_norm": 1.8482669591903687, "learning_rate": 0.0006150716809349096, "loss": 3.2603, "step": 45340 }, { "epoch": 3.0809213208316346, "grad_norm": 1.9650732278823853, "learning_rate": 0.0006150292159260769, "loss": 3.6001, "step": 45345 }, { "epoch": 3.0812610409022967, "grad_norm": 2.2597439289093018, "learning_rate": 0.0006149867509172442, "loss": 3.4559, "step": 45350 }, { "epoch": 3.0816007609729583, "grad_norm": 2.631051778793335, "learning_rate": 0.0006149442859084115, "loss": 3.4405, "step": 45355 }, { "epoch": 3.08194048104362, "grad_norm": 2.281184434890747, "learning_rate": 0.0006149018208995788, "loss": 3.5686, "step": 45360 }, { "epoch": 3.082280201114282, "grad_norm": 1.9922394752502441, "learning_rate": 0.0006148593558907461, "loss": 3.4554, "step": 45365 }, { "epoch": 3.0826199211849437, "grad_norm": 2.1458542346954346, "learning_rate": 0.0006148168908819133, "loss": 3.4259, "step": 45370 }, { "epoch": 3.0829596412556053, "grad_norm": 1.9779623746871948, "learning_rate": 0.0006147744258730806, "loss": 3.1058, "step": 45375 }, { "epoch": 3.0832993613262674, "grad_norm": 2.1971371173858643, "learning_rate": 0.0006147319608642478, "loss": 3.4791, "step": 45380 }, { "epoch": 3.083639081396929, "grad_norm": 1.7927110195159912, "learning_rate": 0.0006146894958554151, "loss": 3.3987, "step": 45385 }, { "epoch": 3.0839788014675906, "grad_norm": 2.4195022583007812, "learning_rate": 0.0006146470308465825, "loss": 3.5162, "step": 45390 }, { "epoch": 3.0843185215382523, "grad_norm": 1.6830992698669434, "learning_rate": 0.0006146045658377497, "loss": 3.5543, "step": 45395 }, { "epoch": 3.0846582416089143, "grad_norm": 2.057947874069214, "learning_rate": 0.000614562100828917, "loss": 3.4414, "step": 45400 }, { "epoch": 3.084997961679576, "grad_norm": 2.746680736541748, "learning_rate": 0.0006145196358200843, "loss": 3.3347, "step": 45405 }, { "epoch": 3.0853376817502376, "grad_norm": 2.6207709312438965, "learning_rate": 0.0006144771708112515, "loss": 3.2882, "step": 45410 }, { "epoch": 3.0856774018208997, "grad_norm": 2.104421615600586, "learning_rate": 0.0006144347058024188, "loss": 3.2398, "step": 45415 }, { "epoch": 3.0860171218915613, "grad_norm": 1.9243069887161255, "learning_rate": 0.0006143922407935861, "loss": 3.3488, "step": 45420 }, { "epoch": 3.086356841962223, "grad_norm": 2.104055643081665, "learning_rate": 0.0006143497757847534, "loss": 3.4501, "step": 45425 }, { "epoch": 3.086696562032885, "grad_norm": 2.1196374893188477, "learning_rate": 0.0006143073107759206, "loss": 3.481, "step": 45430 }, { "epoch": 3.0870362821035466, "grad_norm": 1.7072354555130005, "learning_rate": 0.000614264845767088, "loss": 3.4796, "step": 45435 }, { "epoch": 3.0873760021742083, "grad_norm": 1.602037787437439, "learning_rate": 0.0006142223807582552, "loss": 3.3434, "step": 45440 }, { "epoch": 3.0877157222448703, "grad_norm": 2.2141404151916504, "learning_rate": 0.0006141799157494224, "loss": 3.4298, "step": 45445 }, { "epoch": 3.088055442315532, "grad_norm": 1.983786940574646, "learning_rate": 0.0006141374507405898, "loss": 3.7962, "step": 45450 }, { "epoch": 3.0883951623861936, "grad_norm": 1.7672022581100464, "learning_rate": 0.000614094985731757, "loss": 3.3367, "step": 45455 }, { "epoch": 3.0887348824568557, "grad_norm": 2.1450998783111572, "learning_rate": 0.0006140525207229243, "loss": 3.3942, "step": 45460 }, { "epoch": 3.0890746025275173, "grad_norm": 1.9066559076309204, "learning_rate": 0.0006140100557140917, "loss": 3.2785, "step": 45465 }, { "epoch": 3.089414322598179, "grad_norm": 1.7556657791137695, "learning_rate": 0.0006139675907052589, "loss": 3.3461, "step": 45470 }, { "epoch": 3.089754042668841, "grad_norm": 2.2673425674438477, "learning_rate": 0.0006139251256964261, "loss": 3.2705, "step": 45475 }, { "epoch": 3.0900937627395026, "grad_norm": 1.8501089811325073, "learning_rate": 0.0006138826606875934, "loss": 3.2678, "step": 45480 }, { "epoch": 3.0904334828101643, "grad_norm": 1.724615216255188, "learning_rate": 0.0006138401956787607, "loss": 3.3372, "step": 45485 }, { "epoch": 3.0907732028808264, "grad_norm": 1.8448102474212646, "learning_rate": 0.0006137977306699279, "loss": 3.6234, "step": 45490 }, { "epoch": 3.091112922951488, "grad_norm": 1.985400676727295, "learning_rate": 0.0006137552656610953, "loss": 3.3396, "step": 45495 }, { "epoch": 3.0914526430221496, "grad_norm": 2.2301676273345947, "learning_rate": 0.0006137128006522626, "loss": 3.3526, "step": 45500 }, { "epoch": 3.0917923630928117, "grad_norm": 2.5586464405059814, "learning_rate": 0.0006136703356434298, "loss": 3.3549, "step": 45505 }, { "epoch": 3.0921320831634733, "grad_norm": 2.0494351387023926, "learning_rate": 0.0006136278706345971, "loss": 3.5606, "step": 45510 }, { "epoch": 3.092471803234135, "grad_norm": 1.8548598289489746, "learning_rate": 0.0006135854056257644, "loss": 3.5868, "step": 45515 }, { "epoch": 3.092811523304797, "grad_norm": 1.3718417882919312, "learning_rate": 0.0006135429406169316, "loss": 3.63, "step": 45520 }, { "epoch": 3.0931512433754587, "grad_norm": 2.012566089630127, "learning_rate": 0.0006135004756080989, "loss": 3.4635, "step": 45525 }, { "epoch": 3.0934909634461203, "grad_norm": 1.9171521663665771, "learning_rate": 0.0006134580105992663, "loss": 3.5913, "step": 45530 }, { "epoch": 3.0938306835167824, "grad_norm": 2.2141778469085693, "learning_rate": 0.0006134155455904335, "loss": 3.4215, "step": 45535 }, { "epoch": 3.094170403587444, "grad_norm": 1.42699134349823, "learning_rate": 0.0006133730805816008, "loss": 3.4611, "step": 45540 }, { "epoch": 3.0945101236581056, "grad_norm": 1.6275830268859863, "learning_rate": 0.000613330615572768, "loss": 3.6281, "step": 45545 }, { "epoch": 3.0948498437287677, "grad_norm": 2.0368194580078125, "learning_rate": 0.0006132881505639353, "loss": 3.3796, "step": 45550 }, { "epoch": 3.0951895637994293, "grad_norm": 1.8042266368865967, "learning_rate": 0.0006132456855551026, "loss": 3.5566, "step": 45555 }, { "epoch": 3.095529283870091, "grad_norm": 1.9606857299804688, "learning_rate": 0.0006132032205462698, "loss": 3.5314, "step": 45560 }, { "epoch": 3.095869003940753, "grad_norm": 2.1372122764587402, "learning_rate": 0.0006131607555374372, "loss": 3.4576, "step": 45565 }, { "epoch": 3.0962087240114147, "grad_norm": 1.9265056848526, "learning_rate": 0.0006131182905286045, "loss": 3.4492, "step": 45570 }, { "epoch": 3.0965484440820763, "grad_norm": 1.6784273386001587, "learning_rate": 0.0006130758255197717, "loss": 3.3747, "step": 45575 }, { "epoch": 3.096888164152738, "grad_norm": 1.8943012952804565, "learning_rate": 0.000613033360510939, "loss": 3.186, "step": 45580 }, { "epoch": 3.0972278842234, "grad_norm": 1.9162840843200684, "learning_rate": 0.0006129908955021063, "loss": 3.6459, "step": 45585 }, { "epoch": 3.0975676042940616, "grad_norm": 1.9340413808822632, "learning_rate": 0.0006129484304932735, "loss": 3.3208, "step": 45590 }, { "epoch": 3.0979073243647233, "grad_norm": 1.461920142173767, "learning_rate": 0.0006129059654844408, "loss": 3.3621, "step": 45595 }, { "epoch": 3.0982470444353853, "grad_norm": 2.0980169773101807, "learning_rate": 0.0006128635004756082, "loss": 3.2578, "step": 45600 }, { "epoch": 3.098586764506047, "grad_norm": 1.3268171548843384, "learning_rate": 0.0006128210354667754, "loss": 3.2896, "step": 45605 }, { "epoch": 3.0989264845767086, "grad_norm": 1.8928534984588623, "learning_rate": 0.0006127785704579427, "loss": 3.2672, "step": 45610 }, { "epoch": 3.0992662046473707, "grad_norm": 2.0789687633514404, "learning_rate": 0.00061273610544911, "loss": 3.2651, "step": 45615 }, { "epoch": 3.0996059247180323, "grad_norm": 1.6985546350479126, "learning_rate": 0.0006126936404402772, "loss": 3.4827, "step": 45620 }, { "epoch": 3.099945644788694, "grad_norm": 2.873655319213867, "learning_rate": 0.0006126511754314445, "loss": 3.1869, "step": 45625 }, { "epoch": 3.100285364859356, "grad_norm": 1.9194880723953247, "learning_rate": 0.0006126087104226117, "loss": 3.3202, "step": 45630 }, { "epoch": 3.1006250849300176, "grad_norm": 2.109572649002075, "learning_rate": 0.0006125662454137791, "loss": 3.5182, "step": 45635 }, { "epoch": 3.1009648050006793, "grad_norm": 1.7898308038711548, "learning_rate": 0.0006125237804049464, "loss": 3.1451, "step": 45640 }, { "epoch": 3.1013045250713414, "grad_norm": 2.202420949935913, "learning_rate": 0.0006124813153961136, "loss": 3.4665, "step": 45645 }, { "epoch": 3.101644245142003, "grad_norm": 2.126514196395874, "learning_rate": 0.0006124388503872809, "loss": 3.3412, "step": 45650 }, { "epoch": 3.1019839652126646, "grad_norm": 1.9844192266464233, "learning_rate": 0.0006123963853784482, "loss": 3.4089, "step": 45655 }, { "epoch": 3.1023236852833267, "grad_norm": 2.516287326812744, "learning_rate": 0.0006123539203696154, "loss": 3.684, "step": 45660 }, { "epoch": 3.1026634053539883, "grad_norm": 2.3413267135620117, "learning_rate": 0.0006123114553607826, "loss": 3.337, "step": 45665 }, { "epoch": 3.10300312542465, "grad_norm": 1.9734584093093872, "learning_rate": 0.0006122689903519501, "loss": 3.3175, "step": 45670 }, { "epoch": 3.103342845495312, "grad_norm": 1.8013954162597656, "learning_rate": 0.0006122265253431173, "loss": 3.0602, "step": 45675 }, { "epoch": 3.1036825655659737, "grad_norm": 2.003417491912842, "learning_rate": 0.0006121840603342845, "loss": 3.5109, "step": 45680 }, { "epoch": 3.1040222856366353, "grad_norm": 1.625849962234497, "learning_rate": 0.0006121415953254519, "loss": 3.5892, "step": 45685 }, { "epoch": 3.1043620057072974, "grad_norm": 2.2210047245025635, "learning_rate": 0.0006120991303166191, "loss": 3.338, "step": 45690 }, { "epoch": 3.104701725777959, "grad_norm": 2.458652973175049, "learning_rate": 0.0006120566653077863, "loss": 3.4164, "step": 45695 }, { "epoch": 3.1050414458486206, "grad_norm": 2.2806317806243896, "learning_rate": 0.0006120142002989537, "loss": 3.6929, "step": 45700 }, { "epoch": 3.1053811659192827, "grad_norm": 2.5018389225006104, "learning_rate": 0.000611971735290121, "loss": 3.4879, "step": 45705 }, { "epoch": 3.1057208859899443, "grad_norm": 2.314851760864258, "learning_rate": 0.0006119292702812882, "loss": 3.5603, "step": 45710 }, { "epoch": 3.106060606060606, "grad_norm": 2.2001450061798096, "learning_rate": 0.0006118868052724556, "loss": 3.3429, "step": 45715 }, { "epoch": 3.106400326131268, "grad_norm": 1.64515221118927, "learning_rate": 0.0006118443402636228, "loss": 3.3453, "step": 45720 }, { "epoch": 3.1067400462019297, "grad_norm": 1.593125820159912, "learning_rate": 0.00061180187525479, "loss": 3.4123, "step": 45725 }, { "epoch": 3.1070797662725913, "grad_norm": 1.6801568269729614, "learning_rate": 0.0006117594102459573, "loss": 3.29, "step": 45730 }, { "epoch": 3.107419486343253, "grad_norm": 2.035670280456543, "learning_rate": 0.0006117169452371246, "loss": 3.3723, "step": 45735 }, { "epoch": 3.107759206413915, "grad_norm": 2.46874737739563, "learning_rate": 0.0006116744802282919, "loss": 3.4942, "step": 45740 }, { "epoch": 3.1080989264845766, "grad_norm": 2.0744593143463135, "learning_rate": 0.0006116320152194592, "loss": 3.5348, "step": 45745 }, { "epoch": 3.1084386465552383, "grad_norm": 2.039886474609375, "learning_rate": 0.0006115895502106265, "loss": 3.5818, "step": 45750 }, { "epoch": 3.1087783666259003, "grad_norm": 1.9451794624328613, "learning_rate": 0.0006115470852017937, "loss": 3.352, "step": 45755 }, { "epoch": 3.109118086696562, "grad_norm": 2.112020969390869, "learning_rate": 0.000611504620192961, "loss": 3.4842, "step": 45760 }, { "epoch": 3.1094578067672236, "grad_norm": 2.037158489227295, "learning_rate": 0.0006114621551841283, "loss": 3.2923, "step": 45765 }, { "epoch": 3.1097975268378857, "grad_norm": 1.9952319860458374, "learning_rate": 0.0006114196901752955, "loss": 3.39, "step": 45770 }, { "epoch": 3.1101372469085473, "grad_norm": 1.809239149093628, "learning_rate": 0.0006113772251664629, "loss": 3.4398, "step": 45775 }, { "epoch": 3.110476966979209, "grad_norm": 1.5586154460906982, "learning_rate": 0.0006113347601576301, "loss": 3.5146, "step": 45780 }, { "epoch": 3.110816687049871, "grad_norm": 2.0210134983062744, "learning_rate": 0.0006112922951487974, "loss": 3.5297, "step": 45785 }, { "epoch": 3.1111564071205327, "grad_norm": 1.7963565587997437, "learning_rate": 0.0006112498301399647, "loss": 3.7289, "step": 45790 }, { "epoch": 3.1114961271911943, "grad_norm": 2.581242322921753, "learning_rate": 0.0006112073651311319, "loss": 3.6092, "step": 45795 }, { "epoch": 3.1118358472618564, "grad_norm": 1.9972606897354126, "learning_rate": 0.0006111649001222992, "loss": 3.6043, "step": 45800 }, { "epoch": 3.112175567332518, "grad_norm": 2.1144704818725586, "learning_rate": 0.0006111224351134666, "loss": 3.4357, "step": 45805 }, { "epoch": 3.1125152874031796, "grad_norm": 1.6649256944656372, "learning_rate": 0.0006110799701046338, "loss": 3.2329, "step": 45810 }, { "epoch": 3.1128550074738417, "grad_norm": 1.6552588939666748, "learning_rate": 0.000611037505095801, "loss": 3.3575, "step": 45815 }, { "epoch": 3.1131947275445033, "grad_norm": 2.1110455989837646, "learning_rate": 0.0006109950400869684, "loss": 3.6298, "step": 45820 }, { "epoch": 3.113534447615165, "grad_norm": 2.0672965049743652, "learning_rate": 0.0006109525750781356, "loss": 3.3081, "step": 45825 }, { "epoch": 3.113874167685827, "grad_norm": 1.4442535638809204, "learning_rate": 0.0006109101100693028, "loss": 3.5825, "step": 45830 }, { "epoch": 3.1142138877564887, "grad_norm": 1.9427340030670166, "learning_rate": 0.0006108676450604702, "loss": 3.4639, "step": 45835 }, { "epoch": 3.1145536078271503, "grad_norm": 1.8161559104919434, "learning_rate": 0.0006108251800516375, "loss": 3.4185, "step": 45840 }, { "epoch": 3.1148933278978124, "grad_norm": 1.7379372119903564, "learning_rate": 0.0006107827150428047, "loss": 3.4933, "step": 45845 }, { "epoch": 3.115233047968474, "grad_norm": 1.8888925313949585, "learning_rate": 0.0006107402500339721, "loss": 3.4934, "step": 45850 }, { "epoch": 3.1155727680391356, "grad_norm": 2.5882086753845215, "learning_rate": 0.0006106977850251393, "loss": 3.4381, "step": 45855 }, { "epoch": 3.1159124881097977, "grad_norm": 2.015817165374756, "learning_rate": 0.0006106553200163065, "loss": 3.3121, "step": 45860 }, { "epoch": 3.1162522081804593, "grad_norm": 1.5397025346755981, "learning_rate": 0.0006106128550074739, "loss": 3.4548, "step": 45865 }, { "epoch": 3.116591928251121, "grad_norm": 2.2151527404785156, "learning_rate": 0.0006105703899986411, "loss": 3.2642, "step": 45870 }, { "epoch": 3.116931648321783, "grad_norm": 1.6736114025115967, "learning_rate": 0.0006105279249898084, "loss": 3.4105, "step": 45875 }, { "epoch": 3.1172713683924447, "grad_norm": 1.5965497493743896, "learning_rate": 0.0006104854599809757, "loss": 3.6237, "step": 45880 }, { "epoch": 3.1176110884631063, "grad_norm": 1.8954558372497559, "learning_rate": 0.000610442994972143, "loss": 3.4579, "step": 45885 }, { "epoch": 3.1179508085337684, "grad_norm": 2.1568074226379395, "learning_rate": 0.0006104005299633102, "loss": 3.5961, "step": 45890 }, { "epoch": 3.11829052860443, "grad_norm": 1.9919242858886719, "learning_rate": 0.0006103580649544775, "loss": 3.414, "step": 45895 }, { "epoch": 3.1186302486750916, "grad_norm": 2.5319342613220215, "learning_rate": 0.0006103155999456448, "loss": 3.5992, "step": 45900 }, { "epoch": 3.1189699687457537, "grad_norm": 1.683001160621643, "learning_rate": 0.000610273134936812, "loss": 3.5176, "step": 45905 }, { "epoch": 3.1193096888164153, "grad_norm": 2.391507625579834, "learning_rate": 0.0006102306699279794, "loss": 3.5019, "step": 45910 }, { "epoch": 3.119649408887077, "grad_norm": 1.8952208757400513, "learning_rate": 0.0006101882049191467, "loss": 3.5159, "step": 45915 }, { "epoch": 3.1199891289577386, "grad_norm": 1.8458836078643799, "learning_rate": 0.000610145739910314, "loss": 3.3923, "step": 45920 }, { "epoch": 3.1203288490284007, "grad_norm": 1.7357523441314697, "learning_rate": 0.0006101032749014812, "loss": 3.2031, "step": 45925 }, { "epoch": 3.1206685690990623, "grad_norm": 1.9028205871582031, "learning_rate": 0.0006100608098926484, "loss": 3.3556, "step": 45930 }, { "epoch": 3.121008289169724, "grad_norm": 2.1561641693115234, "learning_rate": 0.0006100183448838158, "loss": 3.44, "step": 45935 }, { "epoch": 3.121348009240386, "grad_norm": 2.1003105640411377, "learning_rate": 0.000609975879874983, "loss": 3.3129, "step": 45940 }, { "epoch": 3.1216877293110477, "grad_norm": 2.0340378284454346, "learning_rate": 0.0006099334148661503, "loss": 3.5367, "step": 45945 }, { "epoch": 3.1220274493817093, "grad_norm": 1.8847748041152954, "learning_rate": 0.0006098909498573177, "loss": 3.5147, "step": 45950 }, { "epoch": 3.1223671694523714, "grad_norm": 1.8266880512237549, "learning_rate": 0.0006098484848484849, "loss": 3.1891, "step": 45955 }, { "epoch": 3.122706889523033, "grad_norm": 2.0748424530029297, "learning_rate": 0.0006098060198396521, "loss": 3.5258, "step": 45960 }, { "epoch": 3.1230466095936946, "grad_norm": 1.65531325340271, "learning_rate": 0.0006097635548308195, "loss": 3.3085, "step": 45965 }, { "epoch": 3.1233863296643567, "grad_norm": 2.522783041000366, "learning_rate": 0.0006097210898219867, "loss": 3.4271, "step": 45970 }, { "epoch": 3.1237260497350183, "grad_norm": 2.38778018951416, "learning_rate": 0.0006096786248131539, "loss": 3.4673, "step": 45975 }, { "epoch": 3.12406576980568, "grad_norm": 1.5941482782363892, "learning_rate": 0.0006096361598043213, "loss": 3.294, "step": 45980 }, { "epoch": 3.124405489876342, "grad_norm": 1.6259955167770386, "learning_rate": 0.0006095936947954886, "loss": 3.5865, "step": 45985 }, { "epoch": 3.1247452099470037, "grad_norm": 1.6317861080169678, "learning_rate": 0.0006095512297866558, "loss": 3.2309, "step": 45990 }, { "epoch": 3.1250849300176653, "grad_norm": 1.7134010791778564, "learning_rate": 0.0006095087647778231, "loss": 3.5151, "step": 45995 }, { "epoch": 3.1254246500883274, "grad_norm": 1.7730333805084229, "learning_rate": 0.0006094662997689904, "loss": 3.68, "step": 46000 }, { "epoch": 3.125764370158989, "grad_norm": 1.7681587934494019, "learning_rate": 0.0006094238347601576, "loss": 3.2744, "step": 46005 }, { "epoch": 3.1261040902296506, "grad_norm": 1.738876461982727, "learning_rate": 0.0006093813697513249, "loss": 3.5088, "step": 46010 }, { "epoch": 3.1264438103003127, "grad_norm": 2.054438352584839, "learning_rate": 0.0006093389047424923, "loss": 3.35, "step": 46015 }, { "epoch": 3.1267835303709743, "grad_norm": 2.075803756713867, "learning_rate": 0.0006092964397336595, "loss": 3.4644, "step": 46020 }, { "epoch": 3.127123250441636, "grad_norm": 1.6011666059494019, "learning_rate": 0.0006092539747248268, "loss": 3.3258, "step": 46025 }, { "epoch": 3.127462970512298, "grad_norm": 2.1206395626068115, "learning_rate": 0.000609211509715994, "loss": 3.4716, "step": 46030 }, { "epoch": 3.1278026905829597, "grad_norm": 2.1293249130249023, "learning_rate": 0.0006091690447071613, "loss": 3.2364, "step": 46035 }, { "epoch": 3.1281424106536213, "grad_norm": 2.662228584289551, "learning_rate": 0.0006091265796983286, "loss": 3.4993, "step": 46040 }, { "epoch": 3.1284821307242834, "grad_norm": 1.9369388818740845, "learning_rate": 0.0006090841146894958, "loss": 3.2961, "step": 46045 }, { "epoch": 3.128821850794945, "grad_norm": 2.297208547592163, "learning_rate": 0.0006090416496806632, "loss": 3.5064, "step": 46050 }, { "epoch": 3.1291615708656066, "grad_norm": 1.8756977319717407, "learning_rate": 0.0006089991846718305, "loss": 3.3291, "step": 46055 }, { "epoch": 3.1295012909362687, "grad_norm": 1.7491443157196045, "learning_rate": 0.0006089567196629977, "loss": 3.43, "step": 46060 }, { "epoch": 3.1298410110069304, "grad_norm": 2.317493438720703, "learning_rate": 0.000608914254654165, "loss": 3.5121, "step": 46065 }, { "epoch": 3.130180731077592, "grad_norm": 1.8529564142227173, "learning_rate": 0.0006088717896453323, "loss": 3.7023, "step": 46070 }, { "epoch": 3.1305204511482536, "grad_norm": 2.7594614028930664, "learning_rate": 0.0006088293246364995, "loss": 3.3991, "step": 46075 }, { "epoch": 3.1308601712189157, "grad_norm": 2.1382861137390137, "learning_rate": 0.0006087868596276667, "loss": 3.3303, "step": 46080 }, { "epoch": 3.1311998912895773, "grad_norm": 1.6785409450531006, "learning_rate": 0.0006087443946188342, "loss": 3.274, "step": 46085 }, { "epoch": 3.131539611360239, "grad_norm": 1.7565412521362305, "learning_rate": 0.0006087019296100014, "loss": 3.6846, "step": 46090 }, { "epoch": 3.131879331430901, "grad_norm": 1.6210932731628418, "learning_rate": 0.0006086594646011686, "loss": 3.6451, "step": 46095 }, { "epoch": 3.1322190515015627, "grad_norm": 1.694029688835144, "learning_rate": 0.000608616999592336, "loss": 3.6139, "step": 46100 }, { "epoch": 3.1325587715722243, "grad_norm": 2.8056890964508057, "learning_rate": 0.0006085745345835032, "loss": 3.7604, "step": 46105 }, { "epoch": 3.1328984916428864, "grad_norm": 2.047269821166992, "learning_rate": 0.0006085320695746704, "loss": 3.5182, "step": 46110 }, { "epoch": 3.133238211713548, "grad_norm": 2.2754194736480713, "learning_rate": 0.0006084980975676043, "loss": 3.2627, "step": 46115 }, { "epoch": 3.1335779317842096, "grad_norm": 2.1647608280181885, "learning_rate": 0.0006084556325587716, "loss": 3.3956, "step": 46120 }, { "epoch": 3.1339176518548717, "grad_norm": 1.9490015506744385, "learning_rate": 0.0006084131675499389, "loss": 3.7023, "step": 46125 }, { "epoch": 3.1342573719255333, "grad_norm": 1.6421921253204346, "learning_rate": 0.0006083707025411061, "loss": 3.093, "step": 46130 }, { "epoch": 3.134597091996195, "grad_norm": 2.217076539993286, "learning_rate": 0.0006083282375322734, "loss": 3.4149, "step": 46135 }, { "epoch": 3.134936812066857, "grad_norm": 1.6466963291168213, "learning_rate": 0.0006082857725234408, "loss": 3.1662, "step": 46140 }, { "epoch": 3.1352765321375187, "grad_norm": 2.301023244857788, "learning_rate": 0.000608243307514608, "loss": 3.4562, "step": 46145 }, { "epoch": 3.1356162522081803, "grad_norm": 2.3734493255615234, "learning_rate": 0.0006082008425057753, "loss": 3.4372, "step": 46150 }, { "epoch": 3.1359559722788424, "grad_norm": 1.610697627067566, "learning_rate": 0.0006081583774969426, "loss": 3.5356, "step": 46155 }, { "epoch": 3.136295692349504, "grad_norm": 2.0045552253723145, "learning_rate": 0.0006081159124881098, "loss": 3.3523, "step": 46160 }, { "epoch": 3.1366354124201656, "grad_norm": 2.143048048019409, "learning_rate": 0.000608073447479277, "loss": 3.5388, "step": 46165 }, { "epoch": 3.1369751324908277, "grad_norm": 1.6462647914886475, "learning_rate": 0.0006080309824704444, "loss": 3.4606, "step": 46170 }, { "epoch": 3.1373148525614893, "grad_norm": 2.139655828475952, "learning_rate": 0.0006079885174616117, "loss": 3.5509, "step": 46175 }, { "epoch": 3.137654572632151, "grad_norm": 2.2806344032287598, "learning_rate": 0.0006079460524527789, "loss": 3.5087, "step": 46180 }, { "epoch": 3.137994292702813, "grad_norm": 1.930222511291504, "learning_rate": 0.0006079035874439463, "loss": 3.4885, "step": 46185 }, { "epoch": 3.1383340127734747, "grad_norm": 2.2930748462677, "learning_rate": 0.0006078611224351135, "loss": 3.6518, "step": 46190 }, { "epoch": 3.1386737328441363, "grad_norm": 1.7837855815887451, "learning_rate": 0.0006078186574262807, "loss": 3.7138, "step": 46195 }, { "epoch": 3.1390134529147984, "grad_norm": 2.538649082183838, "learning_rate": 0.000607776192417448, "loss": 3.4304, "step": 46200 }, { "epoch": 3.13935317298546, "grad_norm": 2.170504331588745, "learning_rate": 0.0006077337274086153, "loss": 3.3458, "step": 46205 }, { "epoch": 3.1396928930561216, "grad_norm": 2.108042001724243, "learning_rate": 0.0006076912623997826, "loss": 3.2927, "step": 46210 }, { "epoch": 3.1400326131267837, "grad_norm": 1.8933132886886597, "learning_rate": 0.00060764879739095, "loss": 3.7682, "step": 46215 }, { "epoch": 3.1403723331974454, "grad_norm": 1.6355077028274536, "learning_rate": 0.0006076063323821172, "loss": 3.4506, "step": 46220 }, { "epoch": 3.140712053268107, "grad_norm": 1.7695293426513672, "learning_rate": 0.0006075638673732844, "loss": 3.4668, "step": 46225 }, { "epoch": 3.141051773338769, "grad_norm": 2.120692014694214, "learning_rate": 0.0006075214023644517, "loss": 3.2786, "step": 46230 }, { "epoch": 3.1413914934094307, "grad_norm": 2.3571817874908447, "learning_rate": 0.000607478937355619, "loss": 3.3728, "step": 46235 }, { "epoch": 3.1417312134800923, "grad_norm": 1.5617185831069946, "learning_rate": 0.0006074364723467862, "loss": 3.3997, "step": 46240 }, { "epoch": 3.1420709335507544, "grad_norm": 2.7203264236450195, "learning_rate": 0.0006073940073379536, "loss": 3.3771, "step": 46245 }, { "epoch": 3.142410653621416, "grad_norm": 1.8481450080871582, "learning_rate": 0.0006073515423291209, "loss": 3.5122, "step": 46250 }, { "epoch": 3.1427503736920777, "grad_norm": 2.6369681358337402, "learning_rate": 0.0006073090773202881, "loss": 3.4333, "step": 46255 }, { "epoch": 3.1430900937627397, "grad_norm": 2.2531745433807373, "learning_rate": 0.0006072666123114554, "loss": 3.4804, "step": 46260 }, { "epoch": 3.1434298138334014, "grad_norm": 2.2552974224090576, "learning_rate": 0.0006072241473026226, "loss": 3.4765, "step": 46265 }, { "epoch": 3.143769533904063, "grad_norm": 2.6174821853637695, "learning_rate": 0.0006071816822937899, "loss": 3.6747, "step": 46270 }, { "epoch": 3.1441092539747246, "grad_norm": 2.2156877517700195, "learning_rate": 0.0006071392172849572, "loss": 3.6499, "step": 46275 }, { "epoch": 3.1444489740453867, "grad_norm": 1.771356463432312, "learning_rate": 0.0006070967522761245, "loss": 3.4149, "step": 46280 }, { "epoch": 3.1447886941160483, "grad_norm": 2.109560966491699, "learning_rate": 0.0006070542872672918, "loss": 3.5006, "step": 46285 }, { "epoch": 3.14512841418671, "grad_norm": 1.5639950037002563, "learning_rate": 0.0006070118222584591, "loss": 3.4854, "step": 46290 }, { "epoch": 3.145468134257372, "grad_norm": 1.9849371910095215, "learning_rate": 0.0006069693572496263, "loss": 3.3652, "step": 46295 }, { "epoch": 3.1458078543280337, "grad_norm": 2.0257678031921387, "learning_rate": 0.0006069268922407935, "loss": 3.6443, "step": 46300 }, { "epoch": 3.1461475743986953, "grad_norm": 2.181265354156494, "learning_rate": 0.0006068844272319609, "loss": 3.6177, "step": 46305 }, { "epoch": 3.1464872944693574, "grad_norm": 2.0352373123168945, "learning_rate": 0.0006068419622231281, "loss": 3.4529, "step": 46310 }, { "epoch": 3.146827014540019, "grad_norm": 1.6725280284881592, "learning_rate": 0.0006067994972142954, "loss": 3.2752, "step": 46315 }, { "epoch": 3.1471667346106806, "grad_norm": 1.9800407886505127, "learning_rate": 0.0006067570322054628, "loss": 3.2428, "step": 46320 }, { "epoch": 3.1475064546813427, "grad_norm": 2.8844003677368164, "learning_rate": 0.00060671456719663, "loss": 3.7331, "step": 46325 }, { "epoch": 3.1478461747520043, "grad_norm": 1.8440651893615723, "learning_rate": 0.0006066721021877972, "loss": 3.6083, "step": 46330 }, { "epoch": 3.148185894822666, "grad_norm": 2.276073694229126, "learning_rate": 0.0006066296371789646, "loss": 3.5307, "step": 46335 }, { "epoch": 3.148525614893328, "grad_norm": 2.0800838470458984, "learning_rate": 0.0006065871721701318, "loss": 3.3918, "step": 46340 }, { "epoch": 3.1488653349639897, "grad_norm": 2.0527093410491943, "learning_rate": 0.000606544707161299, "loss": 3.4726, "step": 46345 }, { "epoch": 3.1492050550346513, "grad_norm": 1.6609469652175903, "learning_rate": 0.0006065022421524665, "loss": 3.387, "step": 46350 }, { "epoch": 3.1495447751053134, "grad_norm": 2.1231932640075684, "learning_rate": 0.0006064597771436337, "loss": 3.3596, "step": 46355 }, { "epoch": 3.149884495175975, "grad_norm": 1.9496066570281982, "learning_rate": 0.0006064173121348009, "loss": 3.5529, "step": 46360 }, { "epoch": 3.1502242152466366, "grad_norm": 1.6957215070724487, "learning_rate": 0.0006063748471259682, "loss": 3.6988, "step": 46365 }, { "epoch": 3.1505639353172987, "grad_norm": 2.382507085800171, "learning_rate": 0.0006063323821171355, "loss": 3.2762, "step": 46370 }, { "epoch": 3.1509036553879604, "grad_norm": 1.5734654664993286, "learning_rate": 0.0006062899171083027, "loss": 3.3528, "step": 46375 }, { "epoch": 3.151243375458622, "grad_norm": 1.8526537418365479, "learning_rate": 0.00060624745209947, "loss": 3.3074, "step": 46380 }, { "epoch": 3.151583095529284, "grad_norm": 1.7324395179748535, "learning_rate": 0.0006062049870906374, "loss": 3.473, "step": 46385 }, { "epoch": 3.1519228155999457, "grad_norm": 1.601330280303955, "learning_rate": 0.0006061625220818046, "loss": 3.4299, "step": 46390 }, { "epoch": 3.1522625356706073, "grad_norm": 1.8551836013793945, "learning_rate": 0.0006061200570729719, "loss": 3.5062, "step": 46395 }, { "epoch": 3.1526022557412694, "grad_norm": 2.0245792865753174, "learning_rate": 0.0006060775920641391, "loss": 3.6808, "step": 46400 }, { "epoch": 3.152941975811931, "grad_norm": 3.047545909881592, "learning_rate": 0.0006060351270553064, "loss": 3.4372, "step": 46405 }, { "epoch": 3.1532816958825927, "grad_norm": 1.4346318244934082, "learning_rate": 0.0006059926620464737, "loss": 3.4541, "step": 46410 }, { "epoch": 3.1536214159532543, "grad_norm": 1.7921096086502075, "learning_rate": 0.0006059501970376409, "loss": 3.3402, "step": 46415 }, { "epoch": 3.1539611360239164, "grad_norm": 2.0153121948242188, "learning_rate": 0.0006059077320288083, "loss": 3.4601, "step": 46420 }, { "epoch": 3.154300856094578, "grad_norm": 2.325359582901001, "learning_rate": 0.0006058652670199756, "loss": 3.6594, "step": 46425 }, { "epoch": 3.1546405761652396, "grad_norm": 1.6747119426727295, "learning_rate": 0.0006058228020111428, "loss": 3.5218, "step": 46430 }, { "epoch": 3.1549802962359017, "grad_norm": 2.0612142086029053, "learning_rate": 0.0006057803370023101, "loss": 3.2994, "step": 46435 }, { "epoch": 3.1553200163065633, "grad_norm": 2.133486032485962, "learning_rate": 0.0006057378719934774, "loss": 3.4645, "step": 46440 }, { "epoch": 3.155659736377225, "grad_norm": 1.3386461734771729, "learning_rate": 0.0006056954069846446, "loss": 3.5076, "step": 46445 }, { "epoch": 3.155999456447887, "grad_norm": 1.528611183166504, "learning_rate": 0.0006056529419758118, "loss": 3.6226, "step": 46450 }, { "epoch": 3.1563391765185487, "grad_norm": 1.840536117553711, "learning_rate": 0.0006056104769669793, "loss": 3.648, "step": 46455 }, { "epoch": 3.1566788965892103, "grad_norm": 2.3783762454986572, "learning_rate": 0.0006055680119581465, "loss": 3.3135, "step": 46460 }, { "epoch": 3.1570186166598724, "grad_norm": 1.41673743724823, "learning_rate": 0.0006055255469493138, "loss": 3.3502, "step": 46465 }, { "epoch": 3.157358336730534, "grad_norm": 2.2998013496398926, "learning_rate": 0.0006054830819404811, "loss": 3.6853, "step": 46470 }, { "epoch": 3.1576980568011956, "grad_norm": 1.6820735931396484, "learning_rate": 0.0006054406169316483, "loss": 3.2563, "step": 46475 }, { "epoch": 3.1580377768718577, "grad_norm": 1.7073205709457397, "learning_rate": 0.0006053981519228156, "loss": 3.349, "step": 46480 }, { "epoch": 3.1583774969425193, "grad_norm": 1.6459283828735352, "learning_rate": 0.0006053556869139829, "loss": 3.3869, "step": 46485 }, { "epoch": 3.158717217013181, "grad_norm": 1.765915036201477, "learning_rate": 0.0006053132219051502, "loss": 3.3987, "step": 46490 }, { "epoch": 3.159056937083843, "grad_norm": 2.225425958633423, "learning_rate": 0.0006052707568963175, "loss": 3.4908, "step": 46495 }, { "epoch": 3.1593966571545047, "grad_norm": 1.5569101572036743, "learning_rate": 0.0006052282918874847, "loss": 3.0332, "step": 46500 }, { "epoch": 3.1597363772251663, "grad_norm": 2.136824607849121, "learning_rate": 0.000605185826878652, "loss": 3.4678, "step": 46505 }, { "epoch": 3.1600760972958284, "grad_norm": 2.6013035774230957, "learning_rate": 0.0006051433618698193, "loss": 3.5571, "step": 46510 }, { "epoch": 3.16041581736649, "grad_norm": 2.236302614212036, "learning_rate": 0.0006051008968609865, "loss": 3.6007, "step": 46515 }, { "epoch": 3.1607555374371517, "grad_norm": 1.9117940664291382, "learning_rate": 0.0006050584318521538, "loss": 3.6686, "step": 46520 }, { "epoch": 3.1610952575078137, "grad_norm": 2.3681092262268066, "learning_rate": 0.0006050159668433212, "loss": 3.3456, "step": 46525 }, { "epoch": 3.1614349775784754, "grad_norm": 1.8514366149902344, "learning_rate": 0.0006049735018344884, "loss": 3.4466, "step": 46530 }, { "epoch": 3.161774697649137, "grad_norm": 1.4592491388320923, "learning_rate": 0.0006049310368256557, "loss": 3.5529, "step": 46535 }, { "epoch": 3.162114417719799, "grad_norm": 2.5322046279907227, "learning_rate": 0.000604888571816823, "loss": 3.2923, "step": 46540 }, { "epoch": 3.1624541377904607, "grad_norm": 2.437303304672241, "learning_rate": 0.0006048461068079902, "loss": 3.3508, "step": 46545 }, { "epoch": 3.1627938578611223, "grad_norm": 1.3856559991836548, "learning_rate": 0.0006048036417991574, "loss": 3.5008, "step": 46550 }, { "epoch": 3.1631335779317844, "grad_norm": 2.5307328701019287, "learning_rate": 0.0006047611767903248, "loss": 3.518, "step": 46555 }, { "epoch": 3.163473298002446, "grad_norm": 2.417654275894165, "learning_rate": 0.0006047187117814921, "loss": 3.42, "step": 46560 }, { "epoch": 3.1638130180731077, "grad_norm": 1.4167031049728394, "learning_rate": 0.0006046762467726593, "loss": 3.5472, "step": 46565 }, { "epoch": 3.1641527381437697, "grad_norm": 2.1794166564941406, "learning_rate": 0.0006046337817638267, "loss": 3.4046, "step": 46570 }, { "epoch": 3.1644924582144314, "grad_norm": 2.131580352783203, "learning_rate": 0.0006045913167549939, "loss": 3.6188, "step": 46575 }, { "epoch": 3.164832178285093, "grad_norm": 1.6446542739868164, "learning_rate": 0.0006045488517461611, "loss": 3.6994, "step": 46580 }, { "epoch": 3.165171898355755, "grad_norm": 2.411186695098877, "learning_rate": 0.0006045063867373285, "loss": 3.5478, "step": 46585 }, { "epoch": 3.1655116184264167, "grad_norm": 2.0714845657348633, "learning_rate": 0.0006044639217284957, "loss": 3.3949, "step": 46590 }, { "epoch": 3.1658513384970783, "grad_norm": 2.057429313659668, "learning_rate": 0.000604421456719663, "loss": 3.5652, "step": 46595 }, { "epoch": 3.1661910585677404, "grad_norm": 1.8069709539413452, "learning_rate": 0.0006043789917108304, "loss": 3.4677, "step": 46600 }, { "epoch": 3.166530778638402, "grad_norm": 1.9406356811523438, "learning_rate": 0.0006043365267019976, "loss": 3.5373, "step": 46605 }, { "epoch": 3.1668704987090637, "grad_norm": 2.231606960296631, "learning_rate": 0.0006042940616931648, "loss": 3.3555, "step": 46610 }, { "epoch": 3.1672102187797253, "grad_norm": 2.104053020477295, "learning_rate": 0.0006042515966843321, "loss": 3.4433, "step": 46615 }, { "epoch": 3.1675499388503874, "grad_norm": 2.1917638778686523, "learning_rate": 0.0006042091316754994, "loss": 3.5754, "step": 46620 }, { "epoch": 3.167889658921049, "grad_norm": 2.0040290355682373, "learning_rate": 0.0006041666666666666, "loss": 3.7163, "step": 46625 }, { "epoch": 3.1682293789917106, "grad_norm": 1.7643274068832397, "learning_rate": 0.000604124201657834, "loss": 3.3125, "step": 46630 }, { "epoch": 3.1685690990623727, "grad_norm": 1.8267505168914795, "learning_rate": 0.0006040817366490013, "loss": 3.4989, "step": 46635 }, { "epoch": 3.1689088191330343, "grad_norm": 1.5797868967056274, "learning_rate": 0.0006040392716401685, "loss": 3.5107, "step": 46640 }, { "epoch": 3.169248539203696, "grad_norm": 1.8635289669036865, "learning_rate": 0.0006039968066313358, "loss": 3.3669, "step": 46645 }, { "epoch": 3.169588259274358, "grad_norm": 2.820446491241455, "learning_rate": 0.000603954341622503, "loss": 3.5449, "step": 46650 }, { "epoch": 3.1699279793450197, "grad_norm": 1.9787110090255737, "learning_rate": 0.0006039118766136703, "loss": 3.4231, "step": 46655 }, { "epoch": 3.1702676994156813, "grad_norm": 1.79529869556427, "learning_rate": 0.0006038694116048377, "loss": 3.6687, "step": 46660 }, { "epoch": 3.1706074194863434, "grad_norm": 1.4445067644119263, "learning_rate": 0.0006038269465960049, "loss": 3.371, "step": 46665 }, { "epoch": 3.170947139557005, "grad_norm": 1.9564768075942993, "learning_rate": 0.0006037844815871722, "loss": 3.4758, "step": 46670 }, { "epoch": 3.1712868596276667, "grad_norm": 2.0501766204833984, "learning_rate": 0.0006037420165783395, "loss": 3.5587, "step": 46675 }, { "epoch": 3.1716265796983287, "grad_norm": 1.9679396152496338, "learning_rate": 0.0006036995515695067, "loss": 3.5833, "step": 46680 }, { "epoch": 3.1719662997689904, "grad_norm": 2.0574302673339844, "learning_rate": 0.000603657086560674, "loss": 3.3501, "step": 46685 }, { "epoch": 3.172306019839652, "grad_norm": 3.234692096710205, "learning_rate": 0.0006036146215518413, "loss": 3.4518, "step": 46690 }, { "epoch": 3.172645739910314, "grad_norm": 1.963008999824524, "learning_rate": 0.0006035721565430086, "loss": 3.6943, "step": 46695 }, { "epoch": 3.1729854599809757, "grad_norm": 2.122539758682251, "learning_rate": 0.0006035296915341758, "loss": 3.312, "step": 46700 }, { "epoch": 3.1733251800516373, "grad_norm": 1.8570300340652466, "learning_rate": 0.0006034872265253432, "loss": 3.3885, "step": 46705 }, { "epoch": 3.1736649001222994, "grad_norm": 2.1342904567718506, "learning_rate": 0.0006034447615165104, "loss": 3.4134, "step": 46710 }, { "epoch": 3.174004620192961, "grad_norm": 2.6468236446380615, "learning_rate": 0.0006034022965076776, "loss": 3.7242, "step": 46715 }, { "epoch": 3.1743443402636227, "grad_norm": 1.6451992988586426, "learning_rate": 0.000603359831498845, "loss": 3.4423, "step": 46720 }, { "epoch": 3.1746840603342847, "grad_norm": 1.9595768451690674, "learning_rate": 0.0006033173664900122, "loss": 3.3943, "step": 46725 }, { "epoch": 3.1750237804049464, "grad_norm": 1.778268575668335, "learning_rate": 0.0006032749014811795, "loss": 3.4383, "step": 46730 }, { "epoch": 3.175363500475608, "grad_norm": 2.1267471313476562, "learning_rate": 0.0006032324364723469, "loss": 3.4239, "step": 46735 }, { "epoch": 3.17570322054627, "grad_norm": 2.038337469100952, "learning_rate": 0.0006031899714635141, "loss": 3.5217, "step": 46740 }, { "epoch": 3.1760429406169317, "grad_norm": 1.911956787109375, "learning_rate": 0.0006031475064546813, "loss": 3.7148, "step": 46745 }, { "epoch": 3.1763826606875933, "grad_norm": 1.7326998710632324, "learning_rate": 0.0006031050414458486, "loss": 3.473, "step": 46750 }, { "epoch": 3.176722380758255, "grad_norm": 1.8985685110092163, "learning_rate": 0.0006030625764370159, "loss": 3.1743, "step": 46755 }, { "epoch": 3.177062100828917, "grad_norm": 2.1382434368133545, "learning_rate": 0.0006030201114281831, "loss": 3.4805, "step": 46760 }, { "epoch": 3.1774018208995787, "grad_norm": 1.7827430963516235, "learning_rate": 0.0006029776464193505, "loss": 3.2889, "step": 46765 }, { "epoch": 3.1777415409702403, "grad_norm": 2.0912413597106934, "learning_rate": 0.0006029351814105178, "loss": 3.4569, "step": 46770 }, { "epoch": 3.1780812610409024, "grad_norm": 2.1263303756713867, "learning_rate": 0.000602892716401685, "loss": 3.2675, "step": 46775 }, { "epoch": 3.178420981111564, "grad_norm": 1.9013363122940063, "learning_rate": 0.0006028502513928523, "loss": 3.461, "step": 46780 }, { "epoch": 3.1787607011822256, "grad_norm": 2.32454514503479, "learning_rate": 0.0006028077863840196, "loss": 3.4051, "step": 46785 }, { "epoch": 3.1791004212528877, "grad_norm": 1.730140209197998, "learning_rate": 0.0006027653213751868, "loss": 3.7262, "step": 46790 }, { "epoch": 3.1794401413235494, "grad_norm": 1.7513750791549683, "learning_rate": 0.0006027228563663541, "loss": 3.3942, "step": 46795 }, { "epoch": 3.179779861394211, "grad_norm": 2.8690502643585205, "learning_rate": 0.0006026803913575214, "loss": 3.2891, "step": 46800 }, { "epoch": 3.180119581464873, "grad_norm": 1.6543059349060059, "learning_rate": 0.0006026379263486888, "loss": 3.2431, "step": 46805 }, { "epoch": 3.1804593015355347, "grad_norm": 2.4052071571350098, "learning_rate": 0.000602595461339856, "loss": 3.4156, "step": 46810 }, { "epoch": 3.1807990216061963, "grad_norm": 1.9830909967422485, "learning_rate": 0.0006025529963310232, "loss": 3.2743, "step": 46815 }, { "epoch": 3.1811387416768584, "grad_norm": 2.1382691860198975, "learning_rate": 0.0006025105313221906, "loss": 3.3395, "step": 46820 }, { "epoch": 3.18147846174752, "grad_norm": 1.63387930393219, "learning_rate": 0.0006024680663133578, "loss": 3.4644, "step": 46825 }, { "epoch": 3.1818181818181817, "grad_norm": 1.6242973804473877, "learning_rate": 0.000602425601304525, "loss": 3.4138, "step": 46830 }, { "epoch": 3.1821579018888437, "grad_norm": 1.4955689907073975, "learning_rate": 0.0006023831362956925, "loss": 3.5111, "step": 46835 }, { "epoch": 3.1824976219595054, "grad_norm": 2.213700532913208, "learning_rate": 0.0006023406712868597, "loss": 3.3089, "step": 46840 }, { "epoch": 3.182837342030167, "grad_norm": 1.8397380113601685, "learning_rate": 0.0006022982062780269, "loss": 3.4307, "step": 46845 }, { "epoch": 3.183177062100829, "grad_norm": 1.6430563926696777, "learning_rate": 0.0006022557412691942, "loss": 3.4942, "step": 46850 }, { "epoch": 3.1835167821714907, "grad_norm": 1.711501955986023, "learning_rate": 0.0006022132762603615, "loss": 3.4293, "step": 46855 }, { "epoch": 3.1838565022421523, "grad_norm": 2.429673671722412, "learning_rate": 0.0006021708112515287, "loss": 3.6638, "step": 46860 }, { "epoch": 3.1841962223128144, "grad_norm": 2.0483651161193848, "learning_rate": 0.000602128346242696, "loss": 3.4716, "step": 46865 }, { "epoch": 3.184535942383476, "grad_norm": 1.6418002843856812, "learning_rate": 0.0006020858812338634, "loss": 3.5634, "step": 46870 }, { "epoch": 3.1848756624541377, "grad_norm": 2.087580680847168, "learning_rate": 0.0006020434162250306, "loss": 3.5283, "step": 46875 }, { "epoch": 3.1852153825247997, "grad_norm": 1.622108817100525, "learning_rate": 0.0006020009512161979, "loss": 3.5255, "step": 46880 }, { "epoch": 3.1855551025954614, "grad_norm": 1.868749976158142, "learning_rate": 0.0006019584862073652, "loss": 3.707, "step": 46885 }, { "epoch": 3.185894822666123, "grad_norm": 2.1877481937408447, "learning_rate": 0.0006019160211985324, "loss": 3.4152, "step": 46890 }, { "epoch": 3.186234542736785, "grad_norm": 2.4211559295654297, "learning_rate": 0.0006018735561896997, "loss": 3.4321, "step": 46895 }, { "epoch": 3.1865742628074467, "grad_norm": 2.0299477577209473, "learning_rate": 0.0006018310911808669, "loss": 3.5737, "step": 46900 }, { "epoch": 3.1869139828781083, "grad_norm": 1.910976529121399, "learning_rate": 0.0006017886261720343, "loss": 3.3055, "step": 46905 }, { "epoch": 3.1872537029487704, "grad_norm": 1.8762580156326294, "learning_rate": 0.0006017461611632016, "loss": 3.6067, "step": 46910 }, { "epoch": 3.187593423019432, "grad_norm": 1.8797879219055176, "learning_rate": 0.0006017036961543688, "loss": 3.3067, "step": 46915 }, { "epoch": 3.1879331430900937, "grad_norm": 2.099519968032837, "learning_rate": 0.0006016612311455361, "loss": 3.6157, "step": 46920 }, { "epoch": 3.1882728631607558, "grad_norm": 1.9013537168502808, "learning_rate": 0.0006016187661367034, "loss": 3.5813, "step": 46925 }, { "epoch": 3.1886125832314174, "grad_norm": 1.8012961149215698, "learning_rate": 0.0006015763011278706, "loss": 3.4803, "step": 46930 }, { "epoch": 3.188952303302079, "grad_norm": 2.131364345550537, "learning_rate": 0.0006015338361190378, "loss": 3.4266, "step": 46935 }, { "epoch": 3.189292023372741, "grad_norm": 2.3276984691619873, "learning_rate": 0.0006014913711102053, "loss": 3.5433, "step": 46940 }, { "epoch": 3.1896317434434027, "grad_norm": 1.5028327703475952, "learning_rate": 0.0006014489061013725, "loss": 3.2992, "step": 46945 }, { "epoch": 3.1899714635140644, "grad_norm": 1.4569483995437622, "learning_rate": 0.0006014064410925397, "loss": 3.4048, "step": 46950 }, { "epoch": 3.190311183584726, "grad_norm": 2.2580175399780273, "learning_rate": 0.0006013639760837071, "loss": 3.3961, "step": 46955 }, { "epoch": 3.190650903655388, "grad_norm": 1.6780107021331787, "learning_rate": 0.0006013215110748743, "loss": 3.1467, "step": 46960 }, { "epoch": 3.1909906237260497, "grad_norm": 2.3125054836273193, "learning_rate": 0.0006012790460660415, "loss": 3.2981, "step": 46965 }, { "epoch": 3.1913303437967113, "grad_norm": 2.6176321506500244, "learning_rate": 0.0006012365810572089, "loss": 3.2512, "step": 46970 }, { "epoch": 3.1916700638673734, "grad_norm": 1.8925904035568237, "learning_rate": 0.0006011941160483762, "loss": 3.584, "step": 46975 }, { "epoch": 3.192009783938035, "grad_norm": 1.95341956615448, "learning_rate": 0.0006011516510395434, "loss": 3.3013, "step": 46980 }, { "epoch": 3.1923495040086967, "grad_norm": 2.152107000350952, "learning_rate": 0.0006011091860307108, "loss": 3.3493, "step": 46985 }, { "epoch": 3.1926892240793587, "grad_norm": 2.0670113563537598, "learning_rate": 0.000601066721021878, "loss": 3.4044, "step": 46990 }, { "epoch": 3.1930289441500204, "grad_norm": 1.8175722360610962, "learning_rate": 0.0006010242560130452, "loss": 3.4081, "step": 46995 }, { "epoch": 3.193368664220682, "grad_norm": 1.4878779649734497, "learning_rate": 0.0006009817910042125, "loss": 3.3499, "step": 47000 }, { "epoch": 3.193708384291344, "grad_norm": 1.5757566690444946, "learning_rate": 0.0006009393259953798, "loss": 3.6284, "step": 47005 }, { "epoch": 3.1940481043620057, "grad_norm": 1.6385858058929443, "learning_rate": 0.0006008968609865471, "loss": 3.2815, "step": 47010 }, { "epoch": 3.1943878244326673, "grad_norm": 1.968967318534851, "learning_rate": 0.0006008543959777144, "loss": 3.434, "step": 47015 }, { "epoch": 3.1947275445033294, "grad_norm": 1.795830249786377, "learning_rate": 0.0006008119309688817, "loss": 3.3044, "step": 47020 }, { "epoch": 3.195067264573991, "grad_norm": 1.7618591785430908, "learning_rate": 0.0006007694659600489, "loss": 3.4519, "step": 47025 }, { "epoch": 3.1954069846446527, "grad_norm": 2.0141499042510986, "learning_rate": 0.0006007270009512162, "loss": 3.4126, "step": 47030 }, { "epoch": 3.1957467047153147, "grad_norm": 1.6570392847061157, "learning_rate": 0.0006006845359423834, "loss": 3.4472, "step": 47035 }, { "epoch": 3.1960864247859764, "grad_norm": 2.3185818195343018, "learning_rate": 0.0006006420709335507, "loss": 3.5404, "step": 47040 }, { "epoch": 3.196426144856638, "grad_norm": 1.6375573873519897, "learning_rate": 0.0006005996059247181, "loss": 3.2412, "step": 47045 }, { "epoch": 3.1967658649273, "grad_norm": 1.9530835151672363, "learning_rate": 0.0006005571409158853, "loss": 3.3053, "step": 47050 }, { "epoch": 3.1971055849979617, "grad_norm": 1.6957811117172241, "learning_rate": 0.0006005146759070526, "loss": 3.4055, "step": 47055 }, { "epoch": 3.1974453050686233, "grad_norm": 2.2415616512298584, "learning_rate": 0.0006004722108982199, "loss": 3.2717, "step": 47060 }, { "epoch": 3.1977850251392854, "grad_norm": 1.7997286319732666, "learning_rate": 0.0006004297458893871, "loss": 3.6204, "step": 47065 }, { "epoch": 3.198124745209947, "grad_norm": 2.149703025817871, "learning_rate": 0.0006003872808805544, "loss": 3.3533, "step": 47070 }, { "epoch": 3.1984644652806087, "grad_norm": 1.8027677536010742, "learning_rate": 0.0006003448158717217, "loss": 3.6332, "step": 47075 }, { "epoch": 3.1988041853512708, "grad_norm": 2.2603070735931396, "learning_rate": 0.000600302350862889, "loss": 3.3516, "step": 47080 }, { "epoch": 3.1991439054219324, "grad_norm": 1.9115409851074219, "learning_rate": 0.0006002598858540562, "loss": 3.551, "step": 47085 }, { "epoch": 3.199483625492594, "grad_norm": 2.2101480960845947, "learning_rate": 0.0006002174208452236, "loss": 3.5708, "step": 47090 }, { "epoch": 3.1998233455632556, "grad_norm": 1.9738394021987915, "learning_rate": 0.0006001749558363908, "loss": 3.3339, "step": 47095 }, { "epoch": 3.2001630656339177, "grad_norm": 1.610397458076477, "learning_rate": 0.000600132490827558, "loss": 3.3399, "step": 47100 }, { "epoch": 3.2005027857045794, "grad_norm": 1.878980278968811, "learning_rate": 0.0006000900258187254, "loss": 3.3223, "step": 47105 }, { "epoch": 3.200842505775241, "grad_norm": 1.60816490650177, "learning_rate": 0.0006000475608098926, "loss": 3.802, "step": 47110 }, { "epoch": 3.201182225845903, "grad_norm": 1.8059662580490112, "learning_rate": 0.0006000050958010599, "loss": 3.4838, "step": 47115 }, { "epoch": 3.2015219459165647, "grad_norm": 2.5138471126556396, "learning_rate": 0.0005999626307922273, "loss": 3.5324, "step": 47120 }, { "epoch": 3.2018616659872263, "grad_norm": 1.5326398611068726, "learning_rate": 0.0005999201657833945, "loss": 3.1658, "step": 47125 }, { "epoch": 3.2022013860578884, "grad_norm": 1.6696141958236694, "learning_rate": 0.0005998777007745617, "loss": 3.6331, "step": 47130 }, { "epoch": 3.20254110612855, "grad_norm": 2.50117826461792, "learning_rate": 0.000599835235765729, "loss": 3.7002, "step": 47135 }, { "epoch": 3.2028808261992117, "grad_norm": 1.9994802474975586, "learning_rate": 0.0005997927707568963, "loss": 3.4897, "step": 47140 }, { "epoch": 3.2032205462698737, "grad_norm": 1.5711873769760132, "learning_rate": 0.0005997503057480636, "loss": 3.452, "step": 47145 }, { "epoch": 3.2035602663405354, "grad_norm": 1.9863831996917725, "learning_rate": 0.0005997078407392309, "loss": 3.5203, "step": 47150 }, { "epoch": 3.203899986411197, "grad_norm": 1.4957857131958008, "learning_rate": 0.0005996653757303982, "loss": 3.5999, "step": 47155 }, { "epoch": 3.204239706481859, "grad_norm": 1.793495774269104, "learning_rate": 0.0005996229107215655, "loss": 3.5655, "step": 47160 }, { "epoch": 3.2045794265525207, "grad_norm": 1.8815468549728394, "learning_rate": 0.0005995804457127327, "loss": 3.4666, "step": 47165 }, { "epoch": 3.2049191466231823, "grad_norm": 1.7938193082809448, "learning_rate": 0.0005995379807039, "loss": 3.5674, "step": 47170 }, { "epoch": 3.2052588666938444, "grad_norm": 1.6796282529830933, "learning_rate": 0.0005994955156950673, "loss": 3.3058, "step": 47175 }, { "epoch": 3.205598586764506, "grad_norm": 1.9794979095458984, "learning_rate": 0.0005994530506862345, "loss": 3.6708, "step": 47180 }, { "epoch": 3.2059383068351677, "grad_norm": 2.048649787902832, "learning_rate": 0.0005994105856774018, "loss": 3.3515, "step": 47185 }, { "epoch": 3.2062780269058297, "grad_norm": 2.0855419635772705, "learning_rate": 0.0005993681206685692, "loss": 3.4085, "step": 47190 }, { "epoch": 3.2066177469764914, "grad_norm": 2.004368782043457, "learning_rate": 0.0005993256556597364, "loss": 3.4018, "step": 47195 }, { "epoch": 3.206957467047153, "grad_norm": 1.6011316776275635, "learning_rate": 0.0005992831906509036, "loss": 3.1899, "step": 47200 }, { "epoch": 3.207297187117815, "grad_norm": 2.429779291152954, "learning_rate": 0.000599240725642071, "loss": 3.4055, "step": 47205 }, { "epoch": 3.2076369071884767, "grad_norm": 1.475732445716858, "learning_rate": 0.0005991982606332382, "loss": 3.4185, "step": 47210 }, { "epoch": 3.2079766272591383, "grad_norm": 1.7093687057495117, "learning_rate": 0.0005991557956244054, "loss": 3.1915, "step": 47215 }, { "epoch": 3.2083163473298004, "grad_norm": 2.080080509185791, "learning_rate": 0.0005991133306155729, "loss": 3.2629, "step": 47220 }, { "epoch": 3.208656067400462, "grad_norm": 1.7096601724624634, "learning_rate": 0.0005990708656067401, "loss": 3.3766, "step": 47225 }, { "epoch": 3.2089957874711237, "grad_norm": 1.734636664390564, "learning_rate": 0.0005990284005979073, "loss": 3.1457, "step": 47230 }, { "epoch": 3.2093355075417858, "grad_norm": 2.584747552871704, "learning_rate": 0.0005989859355890746, "loss": 3.6427, "step": 47235 }, { "epoch": 3.2096752276124474, "grad_norm": 2.253437042236328, "learning_rate": 0.0005989434705802419, "loss": 3.4936, "step": 47240 }, { "epoch": 3.210014947683109, "grad_norm": 1.9376400709152222, "learning_rate": 0.0005989010055714091, "loss": 3.5647, "step": 47245 }, { "epoch": 3.210354667753771, "grad_norm": 2.1140875816345215, "learning_rate": 0.0005988585405625765, "loss": 3.6559, "step": 47250 }, { "epoch": 3.2106943878244327, "grad_norm": 1.7372876405715942, "learning_rate": 0.0005988160755537438, "loss": 3.3695, "step": 47255 }, { "epoch": 3.2110341078950944, "grad_norm": 1.908353567123413, "learning_rate": 0.000598773610544911, "loss": 3.5197, "step": 47260 }, { "epoch": 3.2113738279657564, "grad_norm": 2.1079933643341064, "learning_rate": 0.0005987311455360783, "loss": 3.2842, "step": 47265 }, { "epoch": 3.211713548036418, "grad_norm": 1.540791392326355, "learning_rate": 0.0005986886805272456, "loss": 3.4317, "step": 47270 }, { "epoch": 3.2120532681070797, "grad_norm": 2.712599039077759, "learning_rate": 0.0005986462155184128, "loss": 3.3863, "step": 47275 }, { "epoch": 3.2123929881777418, "grad_norm": 2.202587366104126, "learning_rate": 0.0005986037505095801, "loss": 3.5369, "step": 47280 }, { "epoch": 3.2127327082484034, "grad_norm": 1.5948506593704224, "learning_rate": 0.0005985612855007475, "loss": 3.3929, "step": 47285 }, { "epoch": 3.213072428319065, "grad_norm": 2.3155508041381836, "learning_rate": 0.0005985188204919147, "loss": 3.4687, "step": 47290 }, { "epoch": 3.2134121483897267, "grad_norm": 1.5765202045440674, "learning_rate": 0.000598476355483082, "loss": 3.3135, "step": 47295 }, { "epoch": 3.2137518684603887, "grad_norm": 2.1462645530700684, "learning_rate": 0.0005984338904742492, "loss": 3.6339, "step": 47300 }, { "epoch": 3.2140915885310504, "grad_norm": 1.5241204500198364, "learning_rate": 0.0005983914254654165, "loss": 3.1642, "step": 47305 }, { "epoch": 3.214431308601712, "grad_norm": 1.797892689704895, "learning_rate": 0.0005983489604565838, "loss": 3.1466, "step": 47310 }, { "epoch": 3.214771028672374, "grad_norm": 1.7484838962554932, "learning_rate": 0.000598306495447751, "loss": 3.4386, "step": 47315 }, { "epoch": 3.2151107487430357, "grad_norm": 1.6441296339035034, "learning_rate": 0.0005982640304389184, "loss": 3.5862, "step": 47320 }, { "epoch": 3.2154504688136973, "grad_norm": 1.7020485401153564, "learning_rate": 0.0005982215654300857, "loss": 3.533, "step": 47325 }, { "epoch": 3.2157901888843594, "grad_norm": 2.7219467163085938, "learning_rate": 0.0005981791004212529, "loss": 3.247, "step": 47330 }, { "epoch": 3.216129908955021, "grad_norm": 1.7733404636383057, "learning_rate": 0.0005981366354124201, "loss": 3.3535, "step": 47335 }, { "epoch": 3.2164696290256827, "grad_norm": 1.8385757207870483, "learning_rate": 0.0005980941704035875, "loss": 3.4097, "step": 47340 }, { "epoch": 3.2168093490963448, "grad_norm": 2.0279390811920166, "learning_rate": 0.0005980517053947547, "loss": 3.2221, "step": 47345 }, { "epoch": 3.2171490691670064, "grad_norm": 2.2582740783691406, "learning_rate": 0.0005980092403859219, "loss": 3.4342, "step": 47350 }, { "epoch": 3.217488789237668, "grad_norm": 1.6363866329193115, "learning_rate": 0.0005979667753770894, "loss": 3.2967, "step": 47355 }, { "epoch": 3.21782850930833, "grad_norm": 1.66962468624115, "learning_rate": 0.0005979243103682566, "loss": 3.5536, "step": 47360 }, { "epoch": 3.2181682293789917, "grad_norm": 2.115278720855713, "learning_rate": 0.0005978818453594238, "loss": 3.2833, "step": 47365 }, { "epoch": 3.2185079494496533, "grad_norm": 1.799386978149414, "learning_rate": 0.0005978393803505912, "loss": 3.4972, "step": 47370 }, { "epoch": 3.2188476695203154, "grad_norm": 1.8734303712844849, "learning_rate": 0.0005977969153417584, "loss": 3.4213, "step": 47375 }, { "epoch": 3.219187389590977, "grad_norm": 1.5000823736190796, "learning_rate": 0.0005977544503329256, "loss": 3.6791, "step": 47380 }, { "epoch": 3.2195271096616387, "grad_norm": 1.9097518920898438, "learning_rate": 0.0005977119853240929, "loss": 3.0165, "step": 47385 }, { "epoch": 3.2198668297323008, "grad_norm": 1.618436574935913, "learning_rate": 0.0005976695203152603, "loss": 3.4867, "step": 47390 }, { "epoch": 3.2202065498029624, "grad_norm": 2.4889931678771973, "learning_rate": 0.0005976270553064275, "loss": 3.5454, "step": 47395 }, { "epoch": 3.220546269873624, "grad_norm": 1.8616281747817993, "learning_rate": 0.0005975845902975948, "loss": 3.4227, "step": 47400 }, { "epoch": 3.220885989944286, "grad_norm": 2.4252400398254395, "learning_rate": 0.0005975421252887621, "loss": 3.4881, "step": 47405 }, { "epoch": 3.2212257100149477, "grad_norm": 1.9031293392181396, "learning_rate": 0.0005974996602799293, "loss": 3.5929, "step": 47410 }, { "epoch": 3.2215654300856094, "grad_norm": 1.8153436183929443, "learning_rate": 0.0005974571952710966, "loss": 3.6317, "step": 47415 }, { "epoch": 3.2219051501562714, "grad_norm": 2.2120633125305176, "learning_rate": 0.0005974147302622639, "loss": 3.3019, "step": 47420 }, { "epoch": 3.222244870226933, "grad_norm": 2.234105110168457, "learning_rate": 0.0005973722652534312, "loss": 3.4012, "step": 47425 }, { "epoch": 3.2225845902975947, "grad_norm": 4.1012492179870605, "learning_rate": 0.0005973298002445985, "loss": 3.2588, "step": 47430 }, { "epoch": 3.2229243103682563, "grad_norm": 2.405019521713257, "learning_rate": 0.0005972873352357657, "loss": 3.5058, "step": 47435 }, { "epoch": 3.2232640304389184, "grad_norm": 2.1991970539093018, "learning_rate": 0.000597244870226933, "loss": 3.4838, "step": 47440 }, { "epoch": 3.22360375050958, "grad_norm": 1.7417380809783936, "learning_rate": 0.0005972024052181003, "loss": 3.5272, "step": 47445 }, { "epoch": 3.2239434705802417, "grad_norm": 1.913430094718933, "learning_rate": 0.0005971599402092675, "loss": 3.4511, "step": 47450 }, { "epoch": 3.2242831906509037, "grad_norm": 1.7483034133911133, "learning_rate": 0.0005971174752004348, "loss": 3.6746, "step": 47455 }, { "epoch": 3.2246229107215654, "grad_norm": 2.0673041343688965, "learning_rate": 0.0005970750101916022, "loss": 3.4879, "step": 47460 }, { "epoch": 3.224962630792227, "grad_norm": 1.8126397132873535, "learning_rate": 0.0005970325451827694, "loss": 3.4707, "step": 47465 }, { "epoch": 3.225302350862889, "grad_norm": 1.8038159608840942, "learning_rate": 0.0005969900801739367, "loss": 3.5122, "step": 47470 }, { "epoch": 3.2256420709335507, "grad_norm": 1.5942809581756592, "learning_rate": 0.000596947615165104, "loss": 3.3016, "step": 47475 }, { "epoch": 3.2259817910042123, "grad_norm": 1.8847910165786743, "learning_rate": 0.0005969051501562712, "loss": 3.5462, "step": 47480 }, { "epoch": 3.2263215110748744, "grad_norm": 1.6079635620117188, "learning_rate": 0.0005968626851474385, "loss": 3.5682, "step": 47485 }, { "epoch": 3.226661231145536, "grad_norm": 1.9467437267303467, "learning_rate": 0.0005968202201386058, "loss": 3.4949, "step": 47490 }, { "epoch": 3.2270009512161977, "grad_norm": 2.329909563064575, "learning_rate": 0.0005967777551297731, "loss": 3.1667, "step": 47495 }, { "epoch": 3.2273406712868598, "grad_norm": 2.546229124069214, "learning_rate": 0.0005967352901209404, "loss": 3.2746, "step": 47500 }, { "epoch": 3.2276803913575214, "grad_norm": 1.9558736085891724, "learning_rate": 0.0005966928251121077, "loss": 3.6615, "step": 47505 }, { "epoch": 3.228020111428183, "grad_norm": 1.8116745948791504, "learning_rate": 0.0005966503601032749, "loss": 3.4411, "step": 47510 }, { "epoch": 3.228359831498845, "grad_norm": 1.6246203184127808, "learning_rate": 0.0005966078950944422, "loss": 3.6331, "step": 47515 }, { "epoch": 3.2286995515695067, "grad_norm": 1.7588576078414917, "learning_rate": 0.0005965654300856095, "loss": 3.2917, "step": 47520 }, { "epoch": 3.2290392716401684, "grad_norm": 1.6424533128738403, "learning_rate": 0.0005965229650767767, "loss": 3.4209, "step": 47525 }, { "epoch": 3.2293789917108304, "grad_norm": 1.7002878189086914, "learning_rate": 0.0005964805000679441, "loss": 3.5401, "step": 47530 }, { "epoch": 3.229718711781492, "grad_norm": 2.017345905303955, "learning_rate": 0.0005964380350591113, "loss": 3.1596, "step": 47535 }, { "epoch": 3.2300584318521537, "grad_norm": 2.3950421810150146, "learning_rate": 0.0005963955700502786, "loss": 3.3685, "step": 47540 }, { "epoch": 3.2303981519228158, "grad_norm": 1.54490327835083, "learning_rate": 0.0005963531050414459, "loss": 3.346, "step": 47545 }, { "epoch": 3.2307378719934774, "grad_norm": 2.0213797092437744, "learning_rate": 0.0005963106400326131, "loss": 3.164, "step": 47550 }, { "epoch": 3.231077592064139, "grad_norm": 2.0906424522399902, "learning_rate": 0.0005962681750237804, "loss": 3.3931, "step": 47555 }, { "epoch": 3.231417312134801, "grad_norm": 1.9077619314193726, "learning_rate": 0.0005962257100149477, "loss": 3.2125, "step": 47560 }, { "epoch": 3.2317570322054627, "grad_norm": 1.6162208318710327, "learning_rate": 0.000596183245006115, "loss": 3.3929, "step": 47565 }, { "epoch": 3.2320967522761244, "grad_norm": 1.7621887922286987, "learning_rate": 0.0005961407799972823, "loss": 3.4185, "step": 47570 }, { "epoch": 3.2324364723467864, "grad_norm": 2.010450839996338, "learning_rate": 0.0005960983149884496, "loss": 3.4884, "step": 47575 }, { "epoch": 3.232776192417448, "grad_norm": 2.0913898944854736, "learning_rate": 0.0005960558499796168, "loss": 3.3763, "step": 47580 }, { "epoch": 3.2331159124881097, "grad_norm": 2.3909525871276855, "learning_rate": 0.000596013384970784, "loss": 3.6719, "step": 47585 }, { "epoch": 3.2334556325587718, "grad_norm": 1.7475301027297974, "learning_rate": 0.0005959709199619514, "loss": 3.6152, "step": 47590 }, { "epoch": 3.2337953526294334, "grad_norm": 1.9327079057693481, "learning_rate": 0.0005959284549531186, "loss": 3.1773, "step": 47595 }, { "epoch": 3.234135072700095, "grad_norm": 1.8257558345794678, "learning_rate": 0.0005958859899442859, "loss": 3.6391, "step": 47600 }, { "epoch": 3.234474792770757, "grad_norm": 2.141972780227661, "learning_rate": 0.0005958435249354533, "loss": 3.7142, "step": 47605 }, { "epoch": 3.2348145128414187, "grad_norm": 1.692678689956665, "learning_rate": 0.0005958010599266205, "loss": 3.1751, "step": 47610 }, { "epoch": 3.2351542329120804, "grad_norm": 1.8388365507125854, "learning_rate": 0.0005957585949177877, "loss": 3.5283, "step": 47615 }, { "epoch": 3.2354939529827424, "grad_norm": 1.5685421228408813, "learning_rate": 0.000595716129908955, "loss": 3.6355, "step": 47620 }, { "epoch": 3.235833673053404, "grad_norm": 2.342693567276001, "learning_rate": 0.0005956736649001223, "loss": 3.3232, "step": 47625 }, { "epoch": 3.2361733931240657, "grad_norm": 1.724745512008667, "learning_rate": 0.0005956311998912895, "loss": 3.5353, "step": 47630 }, { "epoch": 3.2365131131947273, "grad_norm": 1.8181426525115967, "learning_rate": 0.000595588734882457, "loss": 3.4418, "step": 47635 }, { "epoch": 3.2368528332653894, "grad_norm": 1.4772672653198242, "learning_rate": 0.0005955462698736242, "loss": 3.5299, "step": 47640 }, { "epoch": 3.237192553336051, "grad_norm": 1.6882177591323853, "learning_rate": 0.0005955038048647914, "loss": 3.489, "step": 47645 }, { "epoch": 3.2375322734067127, "grad_norm": 1.9789732694625854, "learning_rate": 0.0005954613398559587, "loss": 3.1585, "step": 47650 }, { "epoch": 3.2378719934773748, "grad_norm": 1.5566195249557495, "learning_rate": 0.000595418874847126, "loss": 3.4098, "step": 47655 }, { "epoch": 3.2382117135480364, "grad_norm": 1.8840306997299194, "learning_rate": 0.0005953764098382932, "loss": 3.6045, "step": 47660 }, { "epoch": 3.238551433618698, "grad_norm": 1.9218029975891113, "learning_rate": 0.0005953339448294605, "loss": 3.4252, "step": 47665 }, { "epoch": 3.23889115368936, "grad_norm": 1.5247470140457153, "learning_rate": 0.0005952914798206279, "loss": 3.4407, "step": 47670 }, { "epoch": 3.2392308737600217, "grad_norm": 2.0763814449310303, "learning_rate": 0.0005952490148117951, "loss": 3.3987, "step": 47675 }, { "epoch": 3.2395705938306834, "grad_norm": 1.6147326231002808, "learning_rate": 0.0005952065498029624, "loss": 3.4327, "step": 47680 }, { "epoch": 3.2399103139013454, "grad_norm": 2.083211898803711, "learning_rate": 0.0005951640847941296, "loss": 3.7141, "step": 47685 }, { "epoch": 3.240250033972007, "grad_norm": 1.941332221031189, "learning_rate": 0.0005951216197852969, "loss": 3.4367, "step": 47690 }, { "epoch": 3.2405897540426687, "grad_norm": 1.6919690370559692, "learning_rate": 0.0005950791547764642, "loss": 3.8608, "step": 47695 }, { "epoch": 3.2409294741133308, "grad_norm": 1.7769588232040405, "learning_rate": 0.0005950366897676314, "loss": 3.4734, "step": 47700 }, { "epoch": 3.2412691941839924, "grad_norm": 1.6825964450836182, "learning_rate": 0.0005949942247587988, "loss": 3.5281, "step": 47705 }, { "epoch": 3.241608914254654, "grad_norm": 2.115446090698242, "learning_rate": 0.0005949517597499661, "loss": 3.6126, "step": 47710 }, { "epoch": 3.241948634325316, "grad_norm": 2.1621596813201904, "learning_rate": 0.0005949092947411333, "loss": 3.4427, "step": 47715 }, { "epoch": 3.2422883543959777, "grad_norm": 2.648090362548828, "learning_rate": 0.0005948668297323005, "loss": 3.5206, "step": 47720 }, { "epoch": 3.2426280744666394, "grad_norm": 1.989894151687622, "learning_rate": 0.0005948243647234679, "loss": 3.3958, "step": 47725 }, { "epoch": 3.2429677945373014, "grad_norm": 2.001976251602173, "learning_rate": 0.0005947818997146351, "loss": 3.4185, "step": 47730 }, { "epoch": 3.243307514607963, "grad_norm": 2.3984405994415283, "learning_rate": 0.0005947394347058023, "loss": 3.5515, "step": 47735 }, { "epoch": 3.2436472346786247, "grad_norm": 1.8173751831054688, "learning_rate": 0.0005946969696969698, "loss": 3.3699, "step": 47740 }, { "epoch": 3.2439869547492868, "grad_norm": 1.9775543212890625, "learning_rate": 0.000594654504688137, "loss": 3.3689, "step": 47745 }, { "epoch": 3.2443266748199484, "grad_norm": 2.311025857925415, "learning_rate": 0.0005946120396793042, "loss": 3.4088, "step": 47750 }, { "epoch": 3.24466639489061, "grad_norm": 2.2083332538604736, "learning_rate": 0.0005945695746704716, "loss": 3.6392, "step": 47755 }, { "epoch": 3.245006114961272, "grad_norm": 1.9930346012115479, "learning_rate": 0.0005945271096616388, "loss": 3.467, "step": 47760 }, { "epoch": 3.2453458350319337, "grad_norm": 1.7703368663787842, "learning_rate": 0.000594484644652806, "loss": 3.6491, "step": 47765 }, { "epoch": 3.2456855551025954, "grad_norm": 1.552159070968628, "learning_rate": 0.0005944421796439735, "loss": 3.4588, "step": 47770 }, { "epoch": 3.246025275173257, "grad_norm": 1.6846122741699219, "learning_rate": 0.0005943997146351407, "loss": 3.4114, "step": 47775 }, { "epoch": 3.246364995243919, "grad_norm": 1.986571192741394, "learning_rate": 0.0005943572496263079, "loss": 3.4433, "step": 47780 }, { "epoch": 3.2467047153145807, "grad_norm": 1.8165996074676514, "learning_rate": 0.0005943147846174752, "loss": 3.1237, "step": 47785 }, { "epoch": 3.2470444353852423, "grad_norm": 1.959697961807251, "learning_rate": 0.0005942723196086425, "loss": 3.4545, "step": 47790 }, { "epoch": 3.2473841554559044, "grad_norm": 1.9548685550689697, "learning_rate": 0.0005942298545998097, "loss": 3.3749, "step": 47795 }, { "epoch": 3.247723875526566, "grad_norm": 1.8814946413040161, "learning_rate": 0.000594187389590977, "loss": 3.4764, "step": 47800 }, { "epoch": 3.2480635955972277, "grad_norm": 2.136023998260498, "learning_rate": 0.0005941449245821444, "loss": 3.2342, "step": 47805 }, { "epoch": 3.2484033156678898, "grad_norm": 1.7489999532699585, "learning_rate": 0.0005941024595733116, "loss": 3.5964, "step": 47810 }, { "epoch": 3.2487430357385514, "grad_norm": 1.869644284248352, "learning_rate": 0.0005940599945644789, "loss": 3.2706, "step": 47815 }, { "epoch": 3.249082755809213, "grad_norm": 1.5357037782669067, "learning_rate": 0.0005940175295556461, "loss": 3.772, "step": 47820 }, { "epoch": 3.249422475879875, "grad_norm": 2.283242702484131, "learning_rate": 0.0005939750645468135, "loss": 3.4193, "step": 47825 }, { "epoch": 3.2497621959505367, "grad_norm": 1.906314492225647, "learning_rate": 0.0005939325995379807, "loss": 3.3431, "step": 47830 }, { "epoch": 3.2501019160211984, "grad_norm": 2.2611584663391113, "learning_rate": 0.0005938901345291479, "loss": 3.5504, "step": 47835 }, { "epoch": 3.2504416360918604, "grad_norm": 2.1320197582244873, "learning_rate": 0.0005938476695203154, "loss": 3.6177, "step": 47840 }, { "epoch": 3.250781356162522, "grad_norm": 1.5585936307907104, "learning_rate": 0.0005938052045114826, "loss": 3.5178, "step": 47845 }, { "epoch": 3.2511210762331837, "grad_norm": 1.7074271440505981, "learning_rate": 0.0005937627395026498, "loss": 3.5904, "step": 47850 }, { "epoch": 3.2514607963038458, "grad_norm": 2.0488600730895996, "learning_rate": 0.0005937202744938172, "loss": 3.5582, "step": 47855 }, { "epoch": 3.2518005163745074, "grad_norm": 1.460033893585205, "learning_rate": 0.0005936778094849844, "loss": 3.4685, "step": 47860 }, { "epoch": 3.252140236445169, "grad_norm": 1.930822730064392, "learning_rate": 0.0005936353444761516, "loss": 3.2405, "step": 47865 }, { "epoch": 3.252479956515831, "grad_norm": 1.9525121450424194, "learning_rate": 0.000593592879467319, "loss": 3.3735, "step": 47870 }, { "epoch": 3.2528196765864927, "grad_norm": 2.155353546142578, "learning_rate": 0.0005935504144584863, "loss": 3.2715, "step": 47875 }, { "epoch": 3.2531593966571544, "grad_norm": 2.0286669731140137, "learning_rate": 0.0005935079494496535, "loss": 3.6216, "step": 47880 }, { "epoch": 3.2534991167278164, "grad_norm": 1.7892329692840576, "learning_rate": 0.0005934654844408208, "loss": 3.5053, "step": 47885 }, { "epoch": 3.253838836798478, "grad_norm": 1.8700206279754639, "learning_rate": 0.0005934230194319881, "loss": 3.537, "step": 47890 }, { "epoch": 3.2541785568691397, "grad_norm": 2.2278261184692383, "learning_rate": 0.0005933805544231553, "loss": 3.4601, "step": 47895 }, { "epoch": 3.254518276939802, "grad_norm": 1.985956072807312, "learning_rate": 0.0005933380894143226, "loss": 3.364, "step": 47900 }, { "epoch": 3.2548579970104634, "grad_norm": 2.2082672119140625, "learning_rate": 0.0005932956244054899, "loss": 3.4135, "step": 47905 }, { "epoch": 3.255197717081125, "grad_norm": 1.9508428573608398, "learning_rate": 0.0005932531593966572, "loss": 3.4034, "step": 47910 }, { "epoch": 3.255537437151787, "grad_norm": 1.8795510530471802, "learning_rate": 0.0005932106943878245, "loss": 3.3292, "step": 47915 }, { "epoch": 3.2558771572224487, "grad_norm": 1.6567018032073975, "learning_rate": 0.0005931682293789917, "loss": 3.8034, "step": 47920 }, { "epoch": 3.2562168772931104, "grad_norm": 1.3809220790863037, "learning_rate": 0.000593125764370159, "loss": 3.4277, "step": 47925 }, { "epoch": 3.2565565973637725, "grad_norm": 1.894418478012085, "learning_rate": 0.0005930832993613263, "loss": 3.517, "step": 47930 }, { "epoch": 3.256896317434434, "grad_norm": 1.7173410654067993, "learning_rate": 0.0005930408343524935, "loss": 3.6909, "step": 47935 }, { "epoch": 3.2572360375050957, "grad_norm": 2.077887535095215, "learning_rate": 0.0005929983693436608, "loss": 3.2965, "step": 47940 }, { "epoch": 3.257575757575758, "grad_norm": 1.7359085083007812, "learning_rate": 0.0005929559043348282, "loss": 3.3733, "step": 47945 }, { "epoch": 3.2579154776464194, "grad_norm": 1.6453614234924316, "learning_rate": 0.0005929134393259954, "loss": 3.4752, "step": 47950 }, { "epoch": 3.258255197717081, "grad_norm": 1.7622116804122925, "learning_rate": 0.0005928709743171627, "loss": 3.604, "step": 47955 }, { "epoch": 3.258594917787743, "grad_norm": 1.6352308988571167, "learning_rate": 0.00059282850930833, "loss": 3.5839, "step": 47960 }, { "epoch": 3.2589346378584048, "grad_norm": 1.8504459857940674, "learning_rate": 0.0005927860442994972, "loss": 3.4015, "step": 47965 }, { "epoch": 3.2592743579290664, "grad_norm": 2.265106439590454, "learning_rate": 0.0005927435792906644, "loss": 3.5762, "step": 47970 }, { "epoch": 3.2596140779997285, "grad_norm": 2.246199131011963, "learning_rate": 0.0005927011142818318, "loss": 3.3406, "step": 47975 }, { "epoch": 3.25995379807039, "grad_norm": 1.7307898998260498, "learning_rate": 0.0005926586492729991, "loss": 3.3156, "step": 47980 }, { "epoch": 3.2602935181410517, "grad_norm": 1.871741533279419, "learning_rate": 0.0005926161842641663, "loss": 3.531, "step": 47985 }, { "epoch": 3.2606332382117134, "grad_norm": 2.092527389526367, "learning_rate": 0.0005925737192553337, "loss": 3.4886, "step": 47990 }, { "epoch": 3.2609729582823754, "grad_norm": 1.7823492288589478, "learning_rate": 0.0005925312542465009, "loss": 3.5261, "step": 47995 }, { "epoch": 3.261312678353037, "grad_norm": 1.6652687788009644, "learning_rate": 0.0005924887892376681, "loss": 3.5301, "step": 48000 }, { "epoch": 3.2616523984236987, "grad_norm": 2.122608184814453, "learning_rate": 0.0005924463242288355, "loss": 3.4186, "step": 48005 }, { "epoch": 3.2619921184943608, "grad_norm": 1.7667638063430786, "learning_rate": 0.0005924038592200027, "loss": 3.6225, "step": 48010 }, { "epoch": 3.2623318385650224, "grad_norm": 1.9668848514556885, "learning_rate": 0.00059236139421117, "loss": 3.5661, "step": 48015 }, { "epoch": 3.262671558635684, "grad_norm": 2.1596665382385254, "learning_rate": 0.0005923189292023374, "loss": 3.4373, "step": 48020 }, { "epoch": 3.263011278706346, "grad_norm": 1.7511777877807617, "learning_rate": 0.0005922764641935046, "loss": 3.7372, "step": 48025 }, { "epoch": 3.2633509987770077, "grad_norm": 1.3324769735336304, "learning_rate": 0.0005922339991846718, "loss": 3.5434, "step": 48030 }, { "epoch": 3.2636907188476694, "grad_norm": 1.6813013553619385, "learning_rate": 0.0005921915341758391, "loss": 3.3949, "step": 48035 }, { "epoch": 3.2640304389183314, "grad_norm": 1.5483897924423218, "learning_rate": 0.0005921490691670064, "loss": 3.4702, "step": 48040 }, { "epoch": 3.264370158988993, "grad_norm": 2.01364803314209, "learning_rate": 0.0005921066041581736, "loss": 3.3574, "step": 48045 }, { "epoch": 3.2647098790596547, "grad_norm": 1.9062453508377075, "learning_rate": 0.000592064139149341, "loss": 3.7195, "step": 48050 }, { "epoch": 3.265049599130317, "grad_norm": 2.3433945178985596, "learning_rate": 0.0005920216741405083, "loss": 3.1678, "step": 48055 }, { "epoch": 3.2653893192009784, "grad_norm": 2.2805328369140625, "learning_rate": 0.0005919792091316755, "loss": 3.161, "step": 48060 }, { "epoch": 3.26572903927164, "grad_norm": 1.8548752069473267, "learning_rate": 0.0005919367441228428, "loss": 3.4523, "step": 48065 }, { "epoch": 3.266068759342302, "grad_norm": 2.185076951980591, "learning_rate": 0.00059189427911401, "loss": 3.4098, "step": 48070 }, { "epoch": 3.2664084794129638, "grad_norm": 1.7908042669296265, "learning_rate": 0.0005918518141051773, "loss": 3.5444, "step": 48075 }, { "epoch": 3.2667481994836254, "grad_norm": 2.0943427085876465, "learning_rate": 0.0005918093490963446, "loss": 3.3788, "step": 48080 }, { "epoch": 3.2670879195542875, "grad_norm": 2.290393352508545, "learning_rate": 0.0005917668840875119, "loss": 3.2716, "step": 48085 }, { "epoch": 3.267427639624949, "grad_norm": 2.5426113605499268, "learning_rate": 0.0005917244190786792, "loss": 3.4091, "step": 48090 }, { "epoch": 3.2677673596956107, "grad_norm": 1.902587890625, "learning_rate": 0.0005916819540698465, "loss": 3.4608, "step": 48095 }, { "epoch": 3.2681070797662723, "grad_norm": 1.7879643440246582, "learning_rate": 0.0005916394890610137, "loss": 3.5874, "step": 48100 }, { "epoch": 3.2684467998369344, "grad_norm": 1.786653995513916, "learning_rate": 0.000591597024052181, "loss": 3.5119, "step": 48105 }, { "epoch": 3.268786519907596, "grad_norm": 2.614281415939331, "learning_rate": 0.0005915545590433483, "loss": 3.488, "step": 48110 }, { "epoch": 3.2691262399782577, "grad_norm": 1.90050208568573, "learning_rate": 0.0005915120940345155, "loss": 3.4522, "step": 48115 }, { "epoch": 3.2694659600489198, "grad_norm": 1.4464002847671509, "learning_rate": 0.0005914696290256828, "loss": 3.4786, "step": 48120 }, { "epoch": 3.2698056801195814, "grad_norm": 2.088862419128418, "learning_rate": 0.0005914271640168502, "loss": 3.6471, "step": 48125 }, { "epoch": 3.270145400190243, "grad_norm": 1.9068669080734253, "learning_rate": 0.0005913846990080174, "loss": 3.3229, "step": 48130 }, { "epoch": 3.270485120260905, "grad_norm": 2.1145849227905273, "learning_rate": 0.0005913422339991846, "loss": 3.2551, "step": 48135 }, { "epoch": 3.2708248403315667, "grad_norm": 2.07222580909729, "learning_rate": 0.000591299768990352, "loss": 3.3908, "step": 48140 }, { "epoch": 3.2711645604022284, "grad_norm": 1.915968418121338, "learning_rate": 0.0005912573039815192, "loss": 3.5714, "step": 48145 }, { "epoch": 3.2715042804728904, "grad_norm": 1.6784883737564087, "learning_rate": 0.0005912148389726864, "loss": 3.7348, "step": 48150 }, { "epoch": 3.271844000543552, "grad_norm": 2.314162492752075, "learning_rate": 0.0005911723739638539, "loss": 3.6095, "step": 48155 }, { "epoch": 3.2721837206142137, "grad_norm": 2.0055856704711914, "learning_rate": 0.0005911299089550211, "loss": 3.1101, "step": 48160 }, { "epoch": 3.2725234406848758, "grad_norm": 2.211111545562744, "learning_rate": 0.0005910874439461884, "loss": 3.4768, "step": 48165 }, { "epoch": 3.2728631607555374, "grad_norm": 1.9798932075500488, "learning_rate": 0.0005910449789373556, "loss": 3.2551, "step": 48170 }, { "epoch": 3.273202880826199, "grad_norm": 2.4293203353881836, "learning_rate": 0.0005910025139285229, "loss": 3.5058, "step": 48175 }, { "epoch": 3.273542600896861, "grad_norm": 1.8365823030471802, "learning_rate": 0.0005909600489196902, "loss": 3.3794, "step": 48180 }, { "epoch": 3.2738823209675227, "grad_norm": 2.4370810985565186, "learning_rate": 0.0005909175839108574, "loss": 3.6437, "step": 48185 }, { "epoch": 3.2742220410381844, "grad_norm": 2.2480485439300537, "learning_rate": 0.0005908751189020248, "loss": 3.513, "step": 48190 }, { "epoch": 3.2745617611088464, "grad_norm": 2.1183276176452637, "learning_rate": 0.0005908326538931921, "loss": 3.5971, "step": 48195 }, { "epoch": 3.274901481179508, "grad_norm": 2.2539522647857666, "learning_rate": 0.0005907901888843593, "loss": 3.4195, "step": 48200 }, { "epoch": 3.2752412012501697, "grad_norm": 2.5638835430145264, "learning_rate": 0.0005907477238755266, "loss": 3.4242, "step": 48205 }, { "epoch": 3.275580921320832, "grad_norm": 2.443641185760498, "learning_rate": 0.0005907052588666939, "loss": 3.6716, "step": 48210 }, { "epoch": 3.2759206413914934, "grad_norm": 2.253308057785034, "learning_rate": 0.0005906627938578611, "loss": 3.4702, "step": 48215 }, { "epoch": 3.276260361462155, "grad_norm": 1.4525930881500244, "learning_rate": 0.0005906203288490283, "loss": 3.4537, "step": 48220 }, { "epoch": 3.276600081532817, "grad_norm": 1.9344549179077148, "learning_rate": 0.0005905778638401958, "loss": 3.4624, "step": 48225 }, { "epoch": 3.2769398016034788, "grad_norm": 2.020176649093628, "learning_rate": 0.000590535398831363, "loss": 3.5235, "step": 48230 }, { "epoch": 3.2772795216741404, "grad_norm": 2.5383946895599365, "learning_rate": 0.0005904929338225302, "loss": 3.5317, "step": 48235 }, { "epoch": 3.2776192417448025, "grad_norm": 1.9749037027359009, "learning_rate": 0.0005904504688136976, "loss": 3.5252, "step": 48240 }, { "epoch": 3.277958961815464, "grad_norm": 1.7463905811309814, "learning_rate": 0.0005904080038048648, "loss": 3.6695, "step": 48245 }, { "epoch": 3.2782986818861257, "grad_norm": 1.814500331878662, "learning_rate": 0.000590365538796032, "loss": 3.5877, "step": 48250 }, { "epoch": 3.278638401956788, "grad_norm": 1.787211537361145, "learning_rate": 0.0005903230737871994, "loss": 3.3009, "step": 48255 }, { "epoch": 3.2789781220274494, "grad_norm": 3.479971408843994, "learning_rate": 0.0005902806087783667, "loss": 3.4705, "step": 48260 }, { "epoch": 3.279317842098111, "grad_norm": 2.1820762157440186, "learning_rate": 0.0005902381437695339, "loss": 3.4118, "step": 48265 }, { "epoch": 3.279657562168773, "grad_norm": 1.6723617315292358, "learning_rate": 0.0005901956787607012, "loss": 3.3576, "step": 48270 }, { "epoch": 3.2799972822394348, "grad_norm": 2.2110700607299805, "learning_rate": 0.0005901532137518685, "loss": 3.2835, "step": 48275 }, { "epoch": 3.2803370023100964, "grad_norm": 1.6258012056350708, "learning_rate": 0.0005901107487430357, "loss": 3.3514, "step": 48280 }, { "epoch": 3.2806767223807585, "grad_norm": 1.8078787326812744, "learning_rate": 0.000590068283734203, "loss": 3.4194, "step": 48285 }, { "epoch": 3.28101644245142, "grad_norm": 2.0083134174346924, "learning_rate": 0.0005900258187253703, "loss": 3.3549, "step": 48290 }, { "epoch": 3.2813561625220817, "grad_norm": 1.8512283563613892, "learning_rate": 0.0005899833537165376, "loss": 3.4815, "step": 48295 }, { "epoch": 3.281695882592744, "grad_norm": 2.2272958755493164, "learning_rate": 0.0005899408887077049, "loss": 3.4148, "step": 48300 }, { "epoch": 3.2820356026634054, "grad_norm": 2.0951380729675293, "learning_rate": 0.0005898984236988722, "loss": 3.5078, "step": 48305 }, { "epoch": 3.282375322734067, "grad_norm": 1.8672142028808594, "learning_rate": 0.0005898559586900394, "loss": 3.5774, "step": 48310 }, { "epoch": 3.282715042804729, "grad_norm": 1.9262534379959106, "learning_rate": 0.0005898134936812067, "loss": 3.4057, "step": 48315 }, { "epoch": 3.2830547628753908, "grad_norm": 2.334843635559082, "learning_rate": 0.0005897710286723739, "loss": 3.545, "step": 48320 }, { "epoch": 3.2833944829460524, "grad_norm": 2.3516335487365723, "learning_rate": 0.0005897285636635412, "loss": 3.5179, "step": 48325 }, { "epoch": 3.283734203016714, "grad_norm": 1.9479082822799683, "learning_rate": 0.0005896860986547086, "loss": 3.498, "step": 48330 }, { "epoch": 3.284073923087376, "grad_norm": 1.8130651712417603, "learning_rate": 0.0005896436336458758, "loss": 3.3335, "step": 48335 }, { "epoch": 3.2844136431580377, "grad_norm": 1.706079125404358, "learning_rate": 0.0005896011686370431, "loss": 3.416, "step": 48340 }, { "epoch": 3.2847533632286994, "grad_norm": 1.5900890827178955, "learning_rate": 0.0005895587036282104, "loss": 3.1019, "step": 48345 }, { "epoch": 3.2850930832993614, "grad_norm": 2.0840368270874023, "learning_rate": 0.0005895162386193776, "loss": 3.5721, "step": 48350 }, { "epoch": 3.285432803370023, "grad_norm": 1.7636008262634277, "learning_rate": 0.0005894737736105448, "loss": 3.3899, "step": 48355 }, { "epoch": 3.2857725234406847, "grad_norm": 1.9186943769454956, "learning_rate": 0.0005894313086017123, "loss": 3.3137, "step": 48360 }, { "epoch": 3.286112243511347, "grad_norm": 1.7379652261734009, "learning_rate": 0.0005893888435928795, "loss": 3.4923, "step": 48365 }, { "epoch": 3.2864519635820084, "grad_norm": 1.7700865268707275, "learning_rate": 0.0005893463785840467, "loss": 3.5556, "step": 48370 }, { "epoch": 3.28679168365267, "grad_norm": 2.0971436500549316, "learning_rate": 0.0005893039135752141, "loss": 3.6223, "step": 48375 }, { "epoch": 3.287131403723332, "grad_norm": 1.6619226932525635, "learning_rate": 0.0005892614485663813, "loss": 3.5791, "step": 48380 }, { "epoch": 3.2874711237939938, "grad_norm": 1.6978974342346191, "learning_rate": 0.0005892189835575485, "loss": 3.3204, "step": 48385 }, { "epoch": 3.2878108438646554, "grad_norm": 2.0312256813049316, "learning_rate": 0.0005891765185487159, "loss": 3.5506, "step": 48390 }, { "epoch": 3.2881505639353175, "grad_norm": 1.6114840507507324, "learning_rate": 0.0005891340535398832, "loss": 3.4235, "step": 48395 }, { "epoch": 3.288490284005979, "grad_norm": 1.4821128845214844, "learning_rate": 0.0005890915885310504, "loss": 3.5088, "step": 48400 }, { "epoch": 3.2888300040766407, "grad_norm": 1.7703464031219482, "learning_rate": 0.0005890491235222178, "loss": 3.7915, "step": 48405 }, { "epoch": 3.289169724147303, "grad_norm": 1.5500333309173584, "learning_rate": 0.000589006658513385, "loss": 3.5389, "step": 48410 }, { "epoch": 3.2895094442179644, "grad_norm": 1.945791244506836, "learning_rate": 0.0005889641935045522, "loss": 3.3047, "step": 48415 }, { "epoch": 3.289849164288626, "grad_norm": 2.445952892303467, "learning_rate": 0.0005889217284957195, "loss": 3.3418, "step": 48420 }, { "epoch": 3.290188884359288, "grad_norm": 2.359149217605591, "learning_rate": 0.0005888792634868868, "loss": 3.4241, "step": 48425 }, { "epoch": 3.2905286044299498, "grad_norm": 2.121394395828247, "learning_rate": 0.0005888367984780541, "loss": 3.4997, "step": 48430 }, { "epoch": 3.2908683245006114, "grad_norm": 1.6774481534957886, "learning_rate": 0.0005887943334692214, "loss": 3.3004, "step": 48435 }, { "epoch": 3.291208044571273, "grad_norm": 2.5258986949920654, "learning_rate": 0.0005887518684603887, "loss": 3.7859, "step": 48440 }, { "epoch": 3.291547764641935, "grad_norm": 1.78915536403656, "learning_rate": 0.0005887094034515559, "loss": 3.6991, "step": 48445 }, { "epoch": 3.2918874847125967, "grad_norm": 1.392765998840332, "learning_rate": 0.0005886669384427232, "loss": 3.692, "step": 48450 }, { "epoch": 3.2922272047832584, "grad_norm": 1.628413200378418, "learning_rate": 0.0005886244734338904, "loss": 3.0755, "step": 48455 }, { "epoch": 3.2925669248539204, "grad_norm": 1.8164069652557373, "learning_rate": 0.0005885820084250577, "loss": 3.0974, "step": 48460 }, { "epoch": 3.292906644924582, "grad_norm": 1.8113172054290771, "learning_rate": 0.0005885395434162251, "loss": 3.4704, "step": 48465 }, { "epoch": 3.2932463649952437, "grad_norm": 2.632765293121338, "learning_rate": 0.0005884970784073923, "loss": 3.3987, "step": 48470 }, { "epoch": 3.2935860850659058, "grad_norm": 2.003427505493164, "learning_rate": 0.0005884546133985596, "loss": 3.3669, "step": 48475 }, { "epoch": 3.2939258051365674, "grad_norm": 1.5260846614837646, "learning_rate": 0.0005884121483897269, "loss": 3.6562, "step": 48480 }, { "epoch": 3.294265525207229, "grad_norm": 1.8090779781341553, "learning_rate": 0.0005883696833808941, "loss": 3.3747, "step": 48485 }, { "epoch": 3.294605245277891, "grad_norm": 2.0615012645721436, "learning_rate": 0.0005883272183720614, "loss": 3.5011, "step": 48490 }, { "epoch": 3.2949449653485527, "grad_norm": 1.9805197715759277, "learning_rate": 0.0005882847533632287, "loss": 3.3667, "step": 48495 }, { "epoch": 3.2952846854192144, "grad_norm": 1.879643201828003, "learning_rate": 0.000588242288354396, "loss": 3.4188, "step": 48500 }, { "epoch": 3.2956244054898765, "grad_norm": 1.8703250885009766, "learning_rate": 0.0005881998233455634, "loss": 3.639, "step": 48505 }, { "epoch": 3.295964125560538, "grad_norm": 2.0591983795166016, "learning_rate": 0.0005881573583367306, "loss": 3.3874, "step": 48510 }, { "epoch": 3.2963038456311997, "grad_norm": 1.912125825881958, "learning_rate": 0.0005881148933278978, "loss": 3.4583, "step": 48515 }, { "epoch": 3.296643565701862, "grad_norm": 1.7980912923812866, "learning_rate": 0.0005880724283190651, "loss": 3.3325, "step": 48520 }, { "epoch": 3.2969832857725234, "grad_norm": 1.9601203203201294, "learning_rate": 0.0005880299633102324, "loss": 3.3088, "step": 48525 }, { "epoch": 3.297323005843185, "grad_norm": 1.9746242761611938, "learning_rate": 0.0005879874983013996, "loss": 3.3552, "step": 48530 }, { "epoch": 3.297662725913847, "grad_norm": 1.7070938348770142, "learning_rate": 0.000587945033292567, "loss": 3.2766, "step": 48535 }, { "epoch": 3.2980024459845088, "grad_norm": 1.8827825784683228, "learning_rate": 0.0005879025682837343, "loss": 3.1971, "step": 48540 }, { "epoch": 3.2983421660551704, "grad_norm": 2.1057376861572266, "learning_rate": 0.0005878601032749015, "loss": 3.4972, "step": 48545 }, { "epoch": 3.2986818861258325, "grad_norm": 2.3892199993133545, "learning_rate": 0.0005878176382660688, "loss": 3.4351, "step": 48550 }, { "epoch": 3.299021606196494, "grad_norm": 2.3449831008911133, "learning_rate": 0.000587775173257236, "loss": 3.5764, "step": 48555 }, { "epoch": 3.2993613262671557, "grad_norm": 1.8264706134796143, "learning_rate": 0.0005877327082484033, "loss": 3.6068, "step": 48560 }, { "epoch": 3.299701046337818, "grad_norm": 1.8234124183654785, "learning_rate": 0.0005876902432395706, "loss": 3.2728, "step": 48565 }, { "epoch": 3.3000407664084794, "grad_norm": 1.6616747379302979, "learning_rate": 0.0005876477782307379, "loss": 3.5508, "step": 48570 }, { "epoch": 3.300380486479141, "grad_norm": 2.3519818782806396, "learning_rate": 0.0005876053132219052, "loss": 3.5762, "step": 48575 }, { "epoch": 3.300720206549803, "grad_norm": 1.6911909580230713, "learning_rate": 0.0005875628482130725, "loss": 3.6028, "step": 48580 }, { "epoch": 3.3010599266204648, "grad_norm": 1.6135029792785645, "learning_rate": 0.0005875203832042397, "loss": 3.5417, "step": 48585 }, { "epoch": 3.3013996466911264, "grad_norm": 2.285107374191284, "learning_rate": 0.000587477918195407, "loss": 3.3087, "step": 48590 }, { "epoch": 3.3017393667617885, "grad_norm": 1.9527268409729004, "learning_rate": 0.0005874354531865743, "loss": 3.6041, "step": 48595 }, { "epoch": 3.30207908683245, "grad_norm": 1.5449681282043457, "learning_rate": 0.0005873929881777415, "loss": 3.4327, "step": 48600 }, { "epoch": 3.3024188069031117, "grad_norm": 2.2741551399230957, "learning_rate": 0.0005873505231689088, "loss": 3.3257, "step": 48605 }, { "epoch": 3.302758526973774, "grad_norm": 2.0790998935699463, "learning_rate": 0.0005873080581600762, "loss": 3.4491, "step": 48610 }, { "epoch": 3.3030982470444354, "grad_norm": 1.93492591381073, "learning_rate": 0.0005872655931512434, "loss": 3.706, "step": 48615 }, { "epoch": 3.303437967115097, "grad_norm": 1.7539172172546387, "learning_rate": 0.0005872231281424106, "loss": 3.4302, "step": 48620 }, { "epoch": 3.303777687185759, "grad_norm": 1.979448914527893, "learning_rate": 0.000587180663133578, "loss": 3.2875, "step": 48625 }, { "epoch": 3.304117407256421, "grad_norm": 1.9929969310760498, "learning_rate": 0.0005871381981247452, "loss": 3.2887, "step": 48630 }, { "epoch": 3.3044571273270824, "grad_norm": 1.7765355110168457, "learning_rate": 0.0005870957331159124, "loss": 3.6095, "step": 48635 }, { "epoch": 3.3047968473977445, "grad_norm": 2.1225388050079346, "learning_rate": 0.0005870532681070799, "loss": 3.5769, "step": 48640 }, { "epoch": 3.305136567468406, "grad_norm": 2.2214441299438477, "learning_rate": 0.0005870108030982471, "loss": 3.474, "step": 48645 }, { "epoch": 3.3054762875390677, "grad_norm": 1.7066986560821533, "learning_rate": 0.0005869683380894143, "loss": 3.6275, "step": 48650 }, { "epoch": 3.30581600760973, "grad_norm": 1.4470620155334473, "learning_rate": 0.0005869258730805816, "loss": 3.4306, "step": 48655 }, { "epoch": 3.3061557276803915, "grad_norm": 1.7117397785186768, "learning_rate": 0.0005868834080717489, "loss": 3.6491, "step": 48660 }, { "epoch": 3.306495447751053, "grad_norm": 1.5803937911987305, "learning_rate": 0.0005868409430629161, "loss": 3.326, "step": 48665 }, { "epoch": 3.3068351678217147, "grad_norm": 1.9837898015975952, "learning_rate": 0.0005867984780540834, "loss": 3.5093, "step": 48670 }, { "epoch": 3.307174887892377, "grad_norm": 2.2020928859710693, "learning_rate": 0.0005867560130452508, "loss": 3.4096, "step": 48675 }, { "epoch": 3.3075146079630384, "grad_norm": 1.8863941431045532, "learning_rate": 0.000586713548036418, "loss": 3.2421, "step": 48680 }, { "epoch": 3.3078543280337, "grad_norm": 2.4047036170959473, "learning_rate": 0.0005866710830275853, "loss": 3.1984, "step": 48685 }, { "epoch": 3.308194048104362, "grad_norm": 2.4813737869262695, "learning_rate": 0.0005866286180187526, "loss": 3.2254, "step": 48690 }, { "epoch": 3.3085337681750238, "grad_norm": 1.8762086629867554, "learning_rate": 0.0005865861530099198, "loss": 3.4745, "step": 48695 }, { "epoch": 3.3088734882456854, "grad_norm": 2.014108180999756, "learning_rate": 0.0005865436880010871, "loss": 3.6126, "step": 48700 }, { "epoch": 3.3092132083163475, "grad_norm": 1.6257715225219727, "learning_rate": 0.0005865012229922543, "loss": 3.427, "step": 48705 }, { "epoch": 3.309552928387009, "grad_norm": 1.8787434101104736, "learning_rate": 0.0005864587579834217, "loss": 3.3377, "step": 48710 }, { "epoch": 3.3098926484576707, "grad_norm": 1.8197405338287354, "learning_rate": 0.000586416292974589, "loss": 3.3652, "step": 48715 }, { "epoch": 3.310232368528333, "grad_norm": 1.6935862302780151, "learning_rate": 0.0005863738279657562, "loss": 3.2918, "step": 48720 }, { "epoch": 3.3105720885989944, "grad_norm": 2.430835008621216, "learning_rate": 0.0005863313629569235, "loss": 3.4945, "step": 48725 }, { "epoch": 3.310911808669656, "grad_norm": 1.8227735757827759, "learning_rate": 0.0005862888979480908, "loss": 3.4187, "step": 48730 }, { "epoch": 3.311251528740318, "grad_norm": 1.961749792098999, "learning_rate": 0.000586246432939258, "loss": 3.3856, "step": 48735 }, { "epoch": 3.3115912488109798, "grad_norm": 1.7848975658416748, "learning_rate": 0.0005862039679304252, "loss": 3.5084, "step": 48740 }, { "epoch": 3.3119309688816414, "grad_norm": 2.117611885070801, "learning_rate": 0.0005861615029215927, "loss": 3.3191, "step": 48745 }, { "epoch": 3.3122706889523035, "grad_norm": 1.9490265846252441, "learning_rate": 0.0005861190379127599, "loss": 3.6546, "step": 48750 }, { "epoch": 3.312610409022965, "grad_norm": 1.5827898979187012, "learning_rate": 0.0005860765729039271, "loss": 3.1947, "step": 48755 }, { "epoch": 3.3129501290936267, "grad_norm": 1.6102439165115356, "learning_rate": 0.0005860341078950945, "loss": 3.313, "step": 48760 }, { "epoch": 3.313289849164289, "grad_norm": 2.0908124446868896, "learning_rate": 0.0005859916428862617, "loss": 3.4581, "step": 48765 }, { "epoch": 3.3136295692349504, "grad_norm": 2.4526543617248535, "learning_rate": 0.0005859491778774289, "loss": 3.3588, "step": 48770 }, { "epoch": 3.313969289305612, "grad_norm": 1.4450421333312988, "learning_rate": 0.0005859067128685963, "loss": 3.3564, "step": 48775 }, { "epoch": 3.3143090093762737, "grad_norm": 1.9341416358947754, "learning_rate": 0.0005858642478597636, "loss": 3.5462, "step": 48780 }, { "epoch": 3.314648729446936, "grad_norm": 1.821825385093689, "learning_rate": 0.0005858217828509308, "loss": 3.4331, "step": 48785 }, { "epoch": 3.3149884495175974, "grad_norm": 1.7714563608169556, "learning_rate": 0.0005857793178420982, "loss": 3.6357, "step": 48790 }, { "epoch": 3.315328169588259, "grad_norm": 1.632968544960022, "learning_rate": 0.0005857368528332654, "loss": 3.5502, "step": 48795 }, { "epoch": 3.315667889658921, "grad_norm": 1.7416313886642456, "learning_rate": 0.0005856943878244326, "loss": 3.4207, "step": 48800 }, { "epoch": 3.3160076097295828, "grad_norm": 2.0115087032318115, "learning_rate": 0.0005856519228155999, "loss": 3.5268, "step": 48805 }, { "epoch": 3.3163473298002444, "grad_norm": 1.5448200702667236, "learning_rate": 0.0005856094578067672, "loss": 3.3316, "step": 48810 }, { "epoch": 3.3166870498709065, "grad_norm": 2.1921708583831787, "learning_rate": 0.0005855669927979345, "loss": 3.4849, "step": 48815 }, { "epoch": 3.317026769941568, "grad_norm": 2.112004518508911, "learning_rate": 0.0005855245277891018, "loss": 3.3465, "step": 48820 }, { "epoch": 3.3173664900122297, "grad_norm": 2.054098129272461, "learning_rate": 0.0005854820627802691, "loss": 3.4676, "step": 48825 }, { "epoch": 3.317706210082892, "grad_norm": 1.8278321027755737, "learning_rate": 0.0005854395977714363, "loss": 3.3053, "step": 48830 }, { "epoch": 3.3180459301535534, "grad_norm": 1.378496766090393, "learning_rate": 0.0005853971327626036, "loss": 3.7385, "step": 48835 }, { "epoch": 3.318385650224215, "grad_norm": 1.556939721107483, "learning_rate": 0.0005853546677537708, "loss": 3.5664, "step": 48840 }, { "epoch": 3.318725370294877, "grad_norm": 1.5924265384674072, "learning_rate": 0.0005853122027449382, "loss": 3.6496, "step": 48845 }, { "epoch": 3.3190650903655388, "grad_norm": 1.7956526279449463, "learning_rate": 0.0005852697377361055, "loss": 3.2275, "step": 48850 }, { "epoch": 3.3194048104362004, "grad_norm": 1.7928497791290283, "learning_rate": 0.0005852272727272727, "loss": 3.5012, "step": 48855 }, { "epoch": 3.3197445305068625, "grad_norm": 2.353036642074585, "learning_rate": 0.0005851848077184401, "loss": 3.1084, "step": 48860 }, { "epoch": 3.320084250577524, "grad_norm": 2.324296712875366, "learning_rate": 0.0005851423427096073, "loss": 3.5144, "step": 48865 }, { "epoch": 3.3204239706481857, "grad_norm": 1.8222452402114868, "learning_rate": 0.0005850998777007745, "loss": 3.5832, "step": 48870 }, { "epoch": 3.320763690718848, "grad_norm": 1.688933253288269, "learning_rate": 0.0005850574126919419, "loss": 3.3613, "step": 48875 }, { "epoch": 3.3211034107895094, "grad_norm": 2.092306137084961, "learning_rate": 0.0005850149476831091, "loss": 3.4964, "step": 48880 }, { "epoch": 3.321443130860171, "grad_norm": 1.9681377410888672, "learning_rate": 0.0005849724826742764, "loss": 3.378, "step": 48885 }, { "epoch": 3.321782850930833, "grad_norm": 2.258326292037964, "learning_rate": 0.0005849300176654438, "loss": 3.4474, "step": 48890 }, { "epoch": 3.3221225710014948, "grad_norm": 1.8681323528289795, "learning_rate": 0.000584887552656611, "loss": 3.5647, "step": 48895 }, { "epoch": 3.3224622910721564, "grad_norm": 1.5325560569763184, "learning_rate": 0.0005848450876477782, "loss": 3.6272, "step": 48900 }, { "epoch": 3.3228020111428185, "grad_norm": 2.2299394607543945, "learning_rate": 0.0005848026226389455, "loss": 3.3602, "step": 48905 }, { "epoch": 3.32314173121348, "grad_norm": 1.5616353750228882, "learning_rate": 0.0005847601576301128, "loss": 3.6507, "step": 48910 }, { "epoch": 3.3234814512841417, "grad_norm": 2.1832799911499023, "learning_rate": 0.00058471769262128, "loss": 3.5233, "step": 48915 }, { "epoch": 3.323821171354804, "grad_norm": 2.123194456100464, "learning_rate": 0.0005846752276124474, "loss": 3.3876, "step": 48920 }, { "epoch": 3.3241608914254654, "grad_norm": 1.943467617034912, "learning_rate": 0.0005846327626036147, "loss": 3.5884, "step": 48925 }, { "epoch": 3.324500611496127, "grad_norm": 2.061922311782837, "learning_rate": 0.0005845902975947819, "loss": 3.5719, "step": 48930 }, { "epoch": 3.324840331566789, "grad_norm": 2.131023645401001, "learning_rate": 0.0005845478325859492, "loss": 3.5092, "step": 48935 }, { "epoch": 3.325180051637451, "grad_norm": 1.9679747819900513, "learning_rate": 0.0005845053675771165, "loss": 3.3878, "step": 48940 }, { "epoch": 3.3255197717081124, "grad_norm": 1.6551051139831543, "learning_rate": 0.0005844629025682837, "loss": 3.4671, "step": 48945 }, { "epoch": 3.3258594917787745, "grad_norm": 2.210174322128296, "learning_rate": 0.0005844204375594511, "loss": 3.6269, "step": 48950 }, { "epoch": 3.326199211849436, "grad_norm": 1.8614016771316528, "learning_rate": 0.0005843779725506183, "loss": 3.5181, "step": 48955 }, { "epoch": 3.3265389319200978, "grad_norm": 2.053786516189575, "learning_rate": 0.0005843355075417856, "loss": 3.4529, "step": 48960 }, { "epoch": 3.32687865199076, "grad_norm": 1.9758161306381226, "learning_rate": 0.0005842930425329529, "loss": 3.7867, "step": 48965 }, { "epoch": 3.3272183720614215, "grad_norm": 1.9234797954559326, "learning_rate": 0.0005842505775241201, "loss": 3.5188, "step": 48970 }, { "epoch": 3.327558092132083, "grad_norm": 1.8768881559371948, "learning_rate": 0.0005842081125152874, "loss": 3.1932, "step": 48975 }, { "epoch": 3.327897812202745, "grad_norm": 1.7786215543746948, "learning_rate": 0.0005841656475064547, "loss": 3.4101, "step": 48980 }, { "epoch": 3.328237532273407, "grad_norm": 1.6768468618392944, "learning_rate": 0.000584123182497622, "loss": 3.2809, "step": 48985 }, { "epoch": 3.3285772523440684, "grad_norm": 2.3579001426696777, "learning_rate": 0.0005840807174887893, "loss": 3.374, "step": 48990 }, { "epoch": 3.3289169724147305, "grad_norm": 2.477222442626953, "learning_rate": 0.0005840382524799566, "loss": 3.4744, "step": 48995 }, { "epoch": 3.329256692485392, "grad_norm": 2.3610482215881348, "learning_rate": 0.0005839957874711238, "loss": 3.6486, "step": 49000 }, { "epoch": 3.3295964125560538, "grad_norm": 2.1794867515563965, "learning_rate": 0.000583953322462291, "loss": 3.3531, "step": 49005 }, { "epoch": 3.3299361326267154, "grad_norm": 2.0788450241088867, "learning_rate": 0.0005839108574534584, "loss": 3.5179, "step": 49010 }, { "epoch": 3.3302758526973775, "grad_norm": 2.669645309448242, "learning_rate": 0.0005838683924446256, "loss": 3.3967, "step": 49015 }, { "epoch": 3.330615572768039, "grad_norm": 2.299269199371338, "learning_rate": 0.0005838259274357929, "loss": 3.3304, "step": 49020 }, { "epoch": 3.3309552928387007, "grad_norm": 1.9262198209762573, "learning_rate": 0.0005837834624269603, "loss": 3.6459, "step": 49025 }, { "epoch": 3.331295012909363, "grad_norm": 2.08821177482605, "learning_rate": 0.0005837409974181275, "loss": 3.5965, "step": 49030 }, { "epoch": 3.3316347329800244, "grad_norm": 1.7722634077072144, "learning_rate": 0.0005836985324092947, "loss": 3.5898, "step": 49035 }, { "epoch": 3.331974453050686, "grad_norm": 1.7677117586135864, "learning_rate": 0.000583656067400462, "loss": 3.6168, "step": 49040 }, { "epoch": 3.332314173121348, "grad_norm": 1.5594482421875, "learning_rate": 0.0005836136023916293, "loss": 3.3558, "step": 49045 }, { "epoch": 3.3326538931920098, "grad_norm": 1.8607749938964844, "learning_rate": 0.0005835711373827965, "loss": 3.5715, "step": 49050 }, { "epoch": 3.3329936132626714, "grad_norm": 1.494781494140625, "learning_rate": 0.000583528672373964, "loss": 3.512, "step": 49055 }, { "epoch": 3.3333333333333335, "grad_norm": 1.8627128601074219, "learning_rate": 0.0005834862073651312, "loss": 3.5772, "step": 49060 }, { "epoch": 3.333673053403995, "grad_norm": 2.0709879398345947, "learning_rate": 0.0005834437423562984, "loss": 3.0391, "step": 49065 }, { "epoch": 3.3340127734746567, "grad_norm": 2.3118691444396973, "learning_rate": 0.0005834012773474657, "loss": 3.1967, "step": 49070 }, { "epoch": 3.334352493545319, "grad_norm": 1.6087273359298706, "learning_rate": 0.000583358812338633, "loss": 3.4665, "step": 49075 }, { "epoch": 3.3346922136159804, "grad_norm": 2.0830278396606445, "learning_rate": 0.0005833163473298002, "loss": 3.3383, "step": 49080 }, { "epoch": 3.335031933686642, "grad_norm": 1.9233534336090088, "learning_rate": 0.0005832738823209675, "loss": 3.4456, "step": 49085 }, { "epoch": 3.335371653757304, "grad_norm": 1.8136073350906372, "learning_rate": 0.0005832314173121349, "loss": 3.2724, "step": 49090 }, { "epoch": 3.335711373827966, "grad_norm": 1.8333749771118164, "learning_rate": 0.0005831889523033021, "loss": 3.6638, "step": 49095 }, { "epoch": 3.3360510938986274, "grad_norm": 1.6971673965454102, "learning_rate": 0.0005831464872944694, "loss": 3.3932, "step": 49100 }, { "epoch": 3.3363908139692895, "grad_norm": 1.9497089385986328, "learning_rate": 0.0005831040222856366, "loss": 3.3181, "step": 49105 }, { "epoch": 3.336730534039951, "grad_norm": 1.875058889389038, "learning_rate": 0.0005830615572768039, "loss": 3.7173, "step": 49110 }, { "epoch": 3.3370702541106128, "grad_norm": 2.0540409088134766, "learning_rate": 0.0005830190922679712, "loss": 3.322, "step": 49115 }, { "epoch": 3.3374099741812744, "grad_norm": 1.8854646682739258, "learning_rate": 0.0005829766272591384, "loss": 3.342, "step": 49120 }, { "epoch": 3.3377496942519365, "grad_norm": 2.182403802871704, "learning_rate": 0.0005829341622503058, "loss": 3.3921, "step": 49125 }, { "epoch": 3.338089414322598, "grad_norm": 1.9928174018859863, "learning_rate": 0.0005828916972414731, "loss": 3.4646, "step": 49130 }, { "epoch": 3.3384291343932597, "grad_norm": 1.6818182468414307, "learning_rate": 0.0005828492322326403, "loss": 3.3968, "step": 49135 }, { "epoch": 3.338768854463922, "grad_norm": 2.6168203353881836, "learning_rate": 0.0005828067672238075, "loss": 3.772, "step": 49140 }, { "epoch": 3.3391085745345834, "grad_norm": 2.4274795055389404, "learning_rate": 0.0005827643022149749, "loss": 3.6125, "step": 49145 }, { "epoch": 3.339448294605245, "grad_norm": 1.3738806247711182, "learning_rate": 0.0005827218372061421, "loss": 3.4433, "step": 49150 }, { "epoch": 3.339788014675907, "grad_norm": 2.6640307903289795, "learning_rate": 0.0005826793721973093, "loss": 3.3368, "step": 49155 }, { "epoch": 3.3401277347465688, "grad_norm": 2.265444278717041, "learning_rate": 0.0005826369071884768, "loss": 3.4463, "step": 49160 }, { "epoch": 3.3404674548172304, "grad_norm": 2.2182259559631348, "learning_rate": 0.000582594442179644, "loss": 3.3947, "step": 49165 }, { "epoch": 3.3408071748878925, "grad_norm": 1.6178839206695557, "learning_rate": 0.0005825519771708112, "loss": 3.4242, "step": 49170 }, { "epoch": 3.341146894958554, "grad_norm": 1.9877797365188599, "learning_rate": 0.0005825095121619786, "loss": 3.4769, "step": 49175 }, { "epoch": 3.3414866150292157, "grad_norm": 2.57914400100708, "learning_rate": 0.0005824670471531458, "loss": 3.2718, "step": 49180 }, { "epoch": 3.341826335099878, "grad_norm": 2.2956326007843018, "learning_rate": 0.0005824245821443131, "loss": 3.1936, "step": 49185 }, { "epoch": 3.3421660551705394, "grad_norm": 1.5400341749191284, "learning_rate": 0.0005823821171354803, "loss": 3.2567, "step": 49190 }, { "epoch": 3.342505775241201, "grad_norm": 1.875779628753662, "learning_rate": 0.0005823396521266477, "loss": 3.3643, "step": 49195 }, { "epoch": 3.342845495311863, "grad_norm": 2.2254655361175537, "learning_rate": 0.000582297187117815, "loss": 3.3734, "step": 49200 }, { "epoch": 3.3431852153825248, "grad_norm": 1.7716046571731567, "learning_rate": 0.0005822547221089822, "loss": 3.734, "step": 49205 }, { "epoch": 3.3435249354531864, "grad_norm": 2.4638121128082275, "learning_rate": 0.0005822122571001495, "loss": 3.6108, "step": 49210 }, { "epoch": 3.3438646555238485, "grad_norm": 2.2612006664276123, "learning_rate": 0.0005821697920913168, "loss": 3.4367, "step": 49215 }, { "epoch": 3.34420437559451, "grad_norm": 1.9144160747528076, "learning_rate": 0.000582127327082484, "loss": 3.3613, "step": 49220 }, { "epoch": 3.3445440956651717, "grad_norm": 2.1561312675476074, "learning_rate": 0.0005820848620736513, "loss": 3.4158, "step": 49225 }, { "epoch": 3.344883815735834, "grad_norm": 1.8731714487075806, "learning_rate": 0.0005820423970648187, "loss": 3.5959, "step": 49230 }, { "epoch": 3.3452235358064955, "grad_norm": 1.4585211277008057, "learning_rate": 0.0005819999320559859, "loss": 3.0109, "step": 49235 }, { "epoch": 3.345563255877157, "grad_norm": 1.9423960447311401, "learning_rate": 0.0005819574670471531, "loss": 3.4552, "step": 49240 }, { "epoch": 3.345902975947819, "grad_norm": 1.604511022567749, "learning_rate": 0.0005819150020383205, "loss": 3.357, "step": 49245 }, { "epoch": 3.346242696018481, "grad_norm": 2.740128517150879, "learning_rate": 0.0005818725370294877, "loss": 3.5384, "step": 49250 }, { "epoch": 3.3465824160891424, "grad_norm": 2.2594969272613525, "learning_rate": 0.0005818300720206549, "loss": 3.5059, "step": 49255 }, { "epoch": 3.3469221361598045, "grad_norm": 1.6986438035964966, "learning_rate": 0.0005817876070118223, "loss": 3.5542, "step": 49260 }, { "epoch": 3.347261856230466, "grad_norm": 2.3130342960357666, "learning_rate": 0.0005817451420029896, "loss": 3.5595, "step": 49265 }, { "epoch": 3.3476015763011278, "grad_norm": 2.100306272506714, "learning_rate": 0.0005817026769941568, "loss": 3.5807, "step": 49270 }, { "epoch": 3.34794129637179, "grad_norm": 1.7206411361694336, "learning_rate": 0.0005816602119853242, "loss": 3.5235, "step": 49275 }, { "epoch": 3.3482810164424515, "grad_norm": 1.9786427021026611, "learning_rate": 0.0005816177469764914, "loss": 3.599, "step": 49280 }, { "epoch": 3.348620736513113, "grad_norm": 1.9434044361114502, "learning_rate": 0.0005815752819676586, "loss": 3.4618, "step": 49285 }, { "epoch": 3.348960456583775, "grad_norm": 1.6762877702713013, "learning_rate": 0.000581532816958826, "loss": 3.5086, "step": 49290 }, { "epoch": 3.349300176654437, "grad_norm": 1.6228718757629395, "learning_rate": 0.0005814903519499932, "loss": 3.4663, "step": 49295 }, { "epoch": 3.3496398967250984, "grad_norm": 1.743025779724121, "learning_rate": 0.0005814478869411605, "loss": 3.4113, "step": 49300 }, { "epoch": 3.3499796167957605, "grad_norm": 2.260340690612793, "learning_rate": 0.0005814054219323278, "loss": 3.4249, "step": 49305 }, { "epoch": 3.350319336866422, "grad_norm": 1.8508179187774658, "learning_rate": 0.0005813629569234951, "loss": 3.4595, "step": 49310 }, { "epoch": 3.3506590569370838, "grad_norm": 1.5240752696990967, "learning_rate": 0.0005813204919146623, "loss": 3.5899, "step": 49315 }, { "epoch": 3.350998777007746, "grad_norm": 2.1330740451812744, "learning_rate": 0.0005812780269058296, "loss": 3.5926, "step": 49320 }, { "epoch": 3.3513384970784075, "grad_norm": 2.1190452575683594, "learning_rate": 0.0005812355618969969, "loss": 3.3042, "step": 49325 }, { "epoch": 3.351678217149069, "grad_norm": 1.9212216138839722, "learning_rate": 0.0005811930968881641, "loss": 3.3109, "step": 49330 }, { "epoch": 3.352017937219731, "grad_norm": 2.346827268600464, "learning_rate": 0.0005811506318793315, "loss": 3.3799, "step": 49335 }, { "epoch": 3.352357657290393, "grad_norm": 1.727285623550415, "learning_rate": 0.0005811081668704987, "loss": 3.4366, "step": 49340 }, { "epoch": 3.3526973773610544, "grad_norm": 2.2660393714904785, "learning_rate": 0.000581065701861666, "loss": 3.3033, "step": 49345 }, { "epoch": 3.353037097431716, "grad_norm": 1.836759328842163, "learning_rate": 0.0005810232368528333, "loss": 3.3838, "step": 49350 }, { "epoch": 3.353376817502378, "grad_norm": 2.134249448776245, "learning_rate": 0.0005809807718440005, "loss": 3.1929, "step": 49355 }, { "epoch": 3.35371653757304, "grad_norm": 2.353113889694214, "learning_rate": 0.0005809383068351678, "loss": 3.3064, "step": 49360 }, { "epoch": 3.3540562576437014, "grad_norm": 2.078810930252075, "learning_rate": 0.0005808958418263351, "loss": 3.4453, "step": 49365 }, { "epoch": 3.3543959777143635, "grad_norm": 1.6788089275360107, "learning_rate": 0.0005808533768175024, "loss": 3.4596, "step": 49370 }, { "epoch": 3.354735697785025, "grad_norm": 1.6254063844680786, "learning_rate": 0.0005808109118086697, "loss": 3.3493, "step": 49375 }, { "epoch": 3.3550754178556867, "grad_norm": 1.9853084087371826, "learning_rate": 0.000580768446799837, "loss": 3.3412, "step": 49380 }, { "epoch": 3.355415137926349, "grad_norm": 2.4192135334014893, "learning_rate": 0.0005807259817910042, "loss": 3.5678, "step": 49385 }, { "epoch": 3.3557548579970105, "grad_norm": 1.3956403732299805, "learning_rate": 0.0005806920097839381, "loss": 3.2231, "step": 49390 }, { "epoch": 3.356094578067672, "grad_norm": 1.7228670120239258, "learning_rate": 0.0005806495447751054, "loss": 3.5989, "step": 49395 }, { "epoch": 3.356434298138334, "grad_norm": 2.060227870941162, "learning_rate": 0.0005806070797662726, "loss": 3.4893, "step": 49400 }, { "epoch": 3.356774018208996, "grad_norm": 1.6338855028152466, "learning_rate": 0.0005805646147574399, "loss": 3.4143, "step": 49405 }, { "epoch": 3.3571137382796574, "grad_norm": 1.8475395441055298, "learning_rate": 0.0005805221497486072, "loss": 3.8233, "step": 49410 }, { "epoch": 3.3574534583503195, "grad_norm": 1.4253703355789185, "learning_rate": 0.0005804796847397744, "loss": 3.262, "step": 49415 }, { "epoch": 3.357793178420981, "grad_norm": 1.656537652015686, "learning_rate": 0.0005804372197309417, "loss": 3.4831, "step": 49420 }, { "epoch": 3.3581328984916428, "grad_norm": 1.724272608757019, "learning_rate": 0.0005803947547221091, "loss": 3.4826, "step": 49425 }, { "epoch": 3.358472618562305, "grad_norm": 1.6249830722808838, "learning_rate": 0.0005803522897132763, "loss": 3.4284, "step": 49430 }, { "epoch": 3.3588123386329665, "grad_norm": 2.0583083629608154, "learning_rate": 0.0005803098247044436, "loss": 3.1996, "step": 49435 }, { "epoch": 3.359152058703628, "grad_norm": 2.290314197540283, "learning_rate": 0.0005802673596956108, "loss": 3.1768, "step": 49440 }, { "epoch": 3.35949177877429, "grad_norm": 1.9390642642974854, "learning_rate": 0.0005802248946867781, "loss": 3.1823, "step": 49445 }, { "epoch": 3.359831498844952, "grad_norm": 1.9786207675933838, "learning_rate": 0.0005801824296779454, "loss": 3.2092, "step": 49450 }, { "epoch": 3.3601712189156134, "grad_norm": 2.945157766342163, "learning_rate": 0.0005801399646691126, "loss": 3.7338, "step": 49455 }, { "epoch": 3.360510938986275, "grad_norm": 1.999534010887146, "learning_rate": 0.00058009749966028, "loss": 3.4608, "step": 49460 }, { "epoch": 3.360850659056937, "grad_norm": 2.224175453186035, "learning_rate": 0.0005800550346514473, "loss": 3.2259, "step": 49465 }, { "epoch": 3.3611903791275988, "grad_norm": 1.9551622867584229, "learning_rate": 0.0005800125696426145, "loss": 3.6189, "step": 49470 }, { "epoch": 3.3615300991982604, "grad_norm": 1.7464982271194458, "learning_rate": 0.0005799701046337817, "loss": 3.4519, "step": 49475 }, { "epoch": 3.3618698192689225, "grad_norm": 1.9538719654083252, "learning_rate": 0.0005799276396249491, "loss": 3.2927, "step": 49480 }, { "epoch": 3.362209539339584, "grad_norm": 1.9500126838684082, "learning_rate": 0.0005798851746161163, "loss": 3.3214, "step": 49485 }, { "epoch": 3.3625492594102457, "grad_norm": 2.1273531913757324, "learning_rate": 0.0005798427096072835, "loss": 3.6011, "step": 49490 }, { "epoch": 3.362888979480908, "grad_norm": 2.249049663543701, "learning_rate": 0.000579800244598451, "loss": 3.5893, "step": 49495 }, { "epoch": 3.3632286995515694, "grad_norm": 2.1170005798339844, "learning_rate": 0.0005797577795896182, "loss": 3.3706, "step": 49500 }, { "epoch": 3.363568419622231, "grad_norm": 2.0775537490844727, "learning_rate": 0.0005797153145807854, "loss": 3.3112, "step": 49505 }, { "epoch": 3.363908139692893, "grad_norm": 1.8260912895202637, "learning_rate": 0.0005796728495719528, "loss": 3.4248, "step": 49510 }, { "epoch": 3.364247859763555, "grad_norm": 1.9801737070083618, "learning_rate": 0.00057963038456312, "loss": 3.7053, "step": 49515 }, { "epoch": 3.3645875798342164, "grad_norm": 2.3168129920959473, "learning_rate": 0.0005795879195542872, "loss": 3.5157, "step": 49520 }, { "epoch": 3.3649272999048785, "grad_norm": 1.8986320495605469, "learning_rate": 0.0005795454545454545, "loss": 3.4967, "step": 49525 }, { "epoch": 3.36526701997554, "grad_norm": 2.4123897552490234, "learning_rate": 0.0005795029895366219, "loss": 3.5697, "step": 49530 }, { "epoch": 3.3656067400462018, "grad_norm": 2.029803991317749, "learning_rate": 0.0005794605245277891, "loss": 3.7896, "step": 49535 }, { "epoch": 3.365946460116864, "grad_norm": 2.1454832553863525, "learning_rate": 0.0005794180595189564, "loss": 3.5454, "step": 49540 }, { "epoch": 3.3662861801875255, "grad_norm": 1.9162073135375977, "learning_rate": 0.0005793755945101237, "loss": 3.5643, "step": 49545 }, { "epoch": 3.366625900258187, "grad_norm": 1.4737119674682617, "learning_rate": 0.0005793331295012909, "loss": 3.5497, "step": 49550 }, { "epoch": 3.366965620328849, "grad_norm": 1.7408980131149292, "learning_rate": 0.0005792906644924582, "loss": 3.5812, "step": 49555 }, { "epoch": 3.367305340399511, "grad_norm": 2.0354740619659424, "learning_rate": 0.0005792481994836255, "loss": 3.2205, "step": 49560 }, { "epoch": 3.3676450604701724, "grad_norm": 2.1571388244628906, "learning_rate": 0.0005792057344747928, "loss": 3.437, "step": 49565 }, { "epoch": 3.3679847805408345, "grad_norm": 2.4140496253967285, "learning_rate": 0.0005791632694659601, "loss": 3.5281, "step": 49570 }, { "epoch": 3.368324500611496, "grad_norm": 1.6843162775039673, "learning_rate": 0.0005791208044571273, "loss": 3.4623, "step": 49575 }, { "epoch": 3.3686642206821578, "grad_norm": 2.141275405883789, "learning_rate": 0.0005790783394482946, "loss": 3.6181, "step": 49580 }, { "epoch": 3.36900394075282, "grad_norm": 2.3597257137298584, "learning_rate": 0.0005790358744394619, "loss": 3.2972, "step": 49585 }, { "epoch": 3.3693436608234815, "grad_norm": 1.4293310642242432, "learning_rate": 0.0005789934094306291, "loss": 3.2844, "step": 49590 }, { "epoch": 3.369683380894143, "grad_norm": 2.018113136291504, "learning_rate": 0.0005789509444217964, "loss": 3.5367, "step": 49595 }, { "epoch": 3.370023100964805, "grad_norm": 1.9665203094482422, "learning_rate": 0.0005789084794129638, "loss": 3.4004, "step": 49600 }, { "epoch": 3.370362821035467, "grad_norm": 1.726584553718567, "learning_rate": 0.000578866014404131, "loss": 3.5614, "step": 49605 }, { "epoch": 3.3707025411061284, "grad_norm": 1.8588389158248901, "learning_rate": 0.0005788235493952983, "loss": 3.516, "step": 49610 }, { "epoch": 3.3710422611767905, "grad_norm": 1.4033305644989014, "learning_rate": 0.0005787810843864656, "loss": 3.3024, "step": 49615 }, { "epoch": 3.371381981247452, "grad_norm": 1.9547865390777588, "learning_rate": 0.0005787386193776328, "loss": 3.4977, "step": 49620 }, { "epoch": 3.3717217013181138, "grad_norm": 2.4719080924987793, "learning_rate": 0.0005786961543688, "loss": 3.4259, "step": 49625 }, { "epoch": 3.372061421388776, "grad_norm": 2.1513025760650635, "learning_rate": 0.0005786536893599674, "loss": 3.5222, "step": 49630 }, { "epoch": 3.3724011414594375, "grad_norm": 2.082242250442505, "learning_rate": 0.0005786112243511347, "loss": 3.3086, "step": 49635 }, { "epoch": 3.372740861530099, "grad_norm": 1.7409496307373047, "learning_rate": 0.0005785687593423019, "loss": 3.3831, "step": 49640 }, { "epoch": 3.373080581600761, "grad_norm": 1.7872347831726074, "learning_rate": 0.0005785262943334693, "loss": 3.3981, "step": 49645 }, { "epoch": 3.373420301671423, "grad_norm": 1.509894847869873, "learning_rate": 0.0005784838293246365, "loss": 3.5286, "step": 49650 }, { "epoch": 3.3737600217420844, "grad_norm": 2.054478883743286, "learning_rate": 0.0005784413643158037, "loss": 3.5699, "step": 49655 }, { "epoch": 3.3740997418127465, "grad_norm": 2.136624574661255, "learning_rate": 0.0005783988993069711, "loss": 3.3238, "step": 49660 }, { "epoch": 3.374439461883408, "grad_norm": 1.7034759521484375, "learning_rate": 0.0005783564342981383, "loss": 3.4683, "step": 49665 }, { "epoch": 3.37477918195407, "grad_norm": 1.6400409936904907, "learning_rate": 0.0005783139692893056, "loss": 3.5245, "step": 49670 }, { "epoch": 3.375118902024732, "grad_norm": 1.7568819522857666, "learning_rate": 0.000578271504280473, "loss": 3.3297, "step": 49675 }, { "epoch": 3.3754586220953935, "grad_norm": 2.204331636428833, "learning_rate": 0.0005782290392716402, "loss": 3.2857, "step": 49680 }, { "epoch": 3.375798342166055, "grad_norm": 1.6631886959075928, "learning_rate": 0.0005781865742628074, "loss": 3.423, "step": 49685 }, { "epoch": 3.3761380622367168, "grad_norm": 2.2188339233398438, "learning_rate": 0.0005781441092539747, "loss": 3.459, "step": 49690 }, { "epoch": 3.376477782307379, "grad_norm": 1.8844051361083984, "learning_rate": 0.000578101644245142, "loss": 3.658, "step": 49695 }, { "epoch": 3.3768175023780405, "grad_norm": 1.9578137397766113, "learning_rate": 0.0005780591792363092, "loss": 3.3792, "step": 49700 }, { "epoch": 3.377157222448702, "grad_norm": 1.745198369026184, "learning_rate": 0.0005780167142274766, "loss": 3.3484, "step": 49705 }, { "epoch": 3.377496942519364, "grad_norm": 1.9859095811843872, "learning_rate": 0.0005779742492186439, "loss": 3.4599, "step": 49710 }, { "epoch": 3.377836662590026, "grad_norm": 1.4655535221099854, "learning_rate": 0.0005779317842098111, "loss": 3.3322, "step": 49715 }, { "epoch": 3.3781763826606874, "grad_norm": 1.7466710805892944, "learning_rate": 0.0005778893192009784, "loss": 3.5444, "step": 49720 }, { "epoch": 3.3785161027313495, "grad_norm": 1.9031509160995483, "learning_rate": 0.0005778468541921456, "loss": 3.3423, "step": 49725 }, { "epoch": 3.378855822802011, "grad_norm": 1.8958418369293213, "learning_rate": 0.000577804389183313, "loss": 3.4876, "step": 49730 }, { "epoch": 3.3791955428726728, "grad_norm": 2.1591427326202393, "learning_rate": 0.0005777619241744803, "loss": 3.4646, "step": 49735 }, { "epoch": 3.379535262943335, "grad_norm": 2.116861343383789, "learning_rate": 0.0005777194591656475, "loss": 3.4634, "step": 49740 }, { "epoch": 3.3798749830139965, "grad_norm": 2.293529510498047, "learning_rate": 0.0005776769941568149, "loss": 3.3944, "step": 49745 }, { "epoch": 3.380214703084658, "grad_norm": 2.1134142875671387, "learning_rate": 0.0005776345291479821, "loss": 3.3831, "step": 49750 }, { "epoch": 3.38055442315532, "grad_norm": 1.711585521697998, "learning_rate": 0.0005775920641391493, "loss": 3.3768, "step": 49755 }, { "epoch": 3.380894143225982, "grad_norm": 2.082655429840088, "learning_rate": 0.0005775495991303167, "loss": 3.1875, "step": 49760 }, { "epoch": 3.3812338632966434, "grad_norm": 2.1252589225769043, "learning_rate": 0.0005775071341214839, "loss": 3.5049, "step": 49765 }, { "epoch": 3.3815735833673055, "grad_norm": 1.6873582601547241, "learning_rate": 0.0005774646691126512, "loss": 3.2262, "step": 49770 }, { "epoch": 3.381913303437967, "grad_norm": 1.9074841737747192, "learning_rate": 0.0005774222041038186, "loss": 3.5334, "step": 49775 }, { "epoch": 3.3822530235086288, "grad_norm": 2.00986647605896, "learning_rate": 0.0005773797390949858, "loss": 3.6056, "step": 49780 }, { "epoch": 3.382592743579291, "grad_norm": 1.5847067832946777, "learning_rate": 0.000577337274086153, "loss": 3.5083, "step": 49785 }, { "epoch": 3.3829324636499525, "grad_norm": 1.4886356592178345, "learning_rate": 0.0005772948090773203, "loss": 3.534, "step": 49790 }, { "epoch": 3.383272183720614, "grad_norm": 1.6394901275634766, "learning_rate": 0.0005772523440684876, "loss": 3.4471, "step": 49795 }, { "epoch": 3.3836119037912757, "grad_norm": 1.746172547340393, "learning_rate": 0.0005772098790596548, "loss": 3.4691, "step": 49800 }, { "epoch": 3.383951623861938, "grad_norm": 1.62233304977417, "learning_rate": 0.0005771674140508222, "loss": 3.1701, "step": 49805 }, { "epoch": 3.3842913439325994, "grad_norm": 1.8405243158340454, "learning_rate": 0.0005771249490419895, "loss": 3.5547, "step": 49810 }, { "epoch": 3.384631064003261, "grad_norm": 1.8449677228927612, "learning_rate": 0.0005770824840331567, "loss": 3.4607, "step": 49815 }, { "epoch": 3.384970784073923, "grad_norm": 1.7668062448501587, "learning_rate": 0.000577040019024324, "loss": 3.3789, "step": 49820 }, { "epoch": 3.385310504144585, "grad_norm": 1.5713622570037842, "learning_rate": 0.0005769975540154912, "loss": 3.423, "step": 49825 }, { "epoch": 3.3856502242152464, "grad_norm": 2.4901256561279297, "learning_rate": 0.0005769550890066585, "loss": 3.6619, "step": 49830 }, { "epoch": 3.3859899442859085, "grad_norm": 2.3122754096984863, "learning_rate": 0.0005769126239978258, "loss": 3.7696, "step": 49835 }, { "epoch": 3.38632966435657, "grad_norm": 1.7104943990707397, "learning_rate": 0.0005768701589889931, "loss": 3.2429, "step": 49840 }, { "epoch": 3.3866693844272318, "grad_norm": 2.045361042022705, "learning_rate": 0.0005768276939801604, "loss": 3.3192, "step": 49845 }, { "epoch": 3.387009104497894, "grad_norm": 1.9890762567520142, "learning_rate": 0.0005767852289713277, "loss": 3.5642, "step": 49850 }, { "epoch": 3.3873488245685555, "grad_norm": 1.9214777946472168, "learning_rate": 0.0005767427639624949, "loss": 3.4846, "step": 49855 }, { "epoch": 3.387688544639217, "grad_norm": 2.2329373359680176, "learning_rate": 0.0005767002989536622, "loss": 3.4722, "step": 49860 }, { "epoch": 3.388028264709879, "grad_norm": 1.7532614469528198, "learning_rate": 0.0005766578339448295, "loss": 3.4871, "step": 49865 }, { "epoch": 3.388367984780541, "grad_norm": 2.205948829650879, "learning_rate": 0.0005766153689359967, "loss": 3.5223, "step": 49870 }, { "epoch": 3.3887077048512024, "grad_norm": 2.0812244415283203, "learning_rate": 0.000576572903927164, "loss": 3.6456, "step": 49875 }, { "epoch": 3.3890474249218645, "grad_norm": 1.952055811882019, "learning_rate": 0.0005765304389183314, "loss": 3.5135, "step": 49880 }, { "epoch": 3.389387144992526, "grad_norm": 2.4456098079681396, "learning_rate": 0.0005764879739094986, "loss": 3.3759, "step": 49885 }, { "epoch": 3.3897268650631878, "grad_norm": 2.3227145671844482, "learning_rate": 0.0005764455089006658, "loss": 3.3248, "step": 49890 }, { "epoch": 3.39006658513385, "grad_norm": 1.873034954071045, "learning_rate": 0.0005764030438918332, "loss": 3.542, "step": 49895 }, { "epoch": 3.3904063052045115, "grad_norm": 2.333099842071533, "learning_rate": 0.0005763605788830004, "loss": 3.6742, "step": 49900 }, { "epoch": 3.390746025275173, "grad_norm": 2.2457447052001953, "learning_rate": 0.0005763181138741676, "loss": 3.4224, "step": 49905 }, { "epoch": 3.391085745345835, "grad_norm": 1.9893593788146973, "learning_rate": 0.0005762756488653351, "loss": 3.4106, "step": 49910 }, { "epoch": 3.391425465416497, "grad_norm": 2.331895112991333, "learning_rate": 0.0005762331838565023, "loss": 3.352, "step": 49915 }, { "epoch": 3.3917651854871584, "grad_norm": 2.339909076690674, "learning_rate": 0.0005761907188476695, "loss": 3.2942, "step": 49920 }, { "epoch": 3.3921049055578205, "grad_norm": 2.154444932937622, "learning_rate": 0.0005761482538388368, "loss": 3.5472, "step": 49925 }, { "epoch": 3.392444625628482, "grad_norm": 1.9177024364471436, "learning_rate": 0.0005761057888300041, "loss": 3.4435, "step": 49930 }, { "epoch": 3.3927843456991438, "grad_norm": 2.1303296089172363, "learning_rate": 0.0005760633238211713, "loss": 3.6801, "step": 49935 }, { "epoch": 3.393124065769806, "grad_norm": 2.7427453994750977, "learning_rate": 0.0005760208588123386, "loss": 3.4281, "step": 49940 }, { "epoch": 3.3934637858404675, "grad_norm": 1.9766734838485718, "learning_rate": 0.000575978393803506, "loss": 3.5524, "step": 49945 }, { "epoch": 3.393803505911129, "grad_norm": 1.7412222623825073, "learning_rate": 0.0005759359287946732, "loss": 3.4082, "step": 49950 }, { "epoch": 3.394143225981791, "grad_norm": 1.5165077447891235, "learning_rate": 0.0005758934637858405, "loss": 3.5877, "step": 49955 }, { "epoch": 3.394482946052453, "grad_norm": 1.6437585353851318, "learning_rate": 0.0005758509987770078, "loss": 3.4965, "step": 49960 }, { "epoch": 3.3948226661231145, "grad_norm": 1.7158472537994385, "learning_rate": 0.000575808533768175, "loss": 3.5175, "step": 49965 }, { "epoch": 3.3951623861937765, "grad_norm": 1.6667547225952148, "learning_rate": 0.0005757660687593423, "loss": 3.3432, "step": 49970 }, { "epoch": 3.395502106264438, "grad_norm": 1.5104236602783203, "learning_rate": 0.0005757236037505095, "loss": 3.5011, "step": 49975 }, { "epoch": 3.3958418263351, "grad_norm": 2.1447880268096924, "learning_rate": 0.0005756811387416769, "loss": 3.5194, "step": 49980 }, { "epoch": 3.396181546405762, "grad_norm": 1.644206166267395, "learning_rate": 0.0005756386737328442, "loss": 3.4028, "step": 49985 }, { "epoch": 3.3965212664764235, "grad_norm": 1.697945475578308, "learning_rate": 0.0005755962087240114, "loss": 3.5333, "step": 49990 }, { "epoch": 3.396860986547085, "grad_norm": 1.6152082681655884, "learning_rate": 0.0005755537437151787, "loss": 3.324, "step": 49995 }, { "epoch": 3.397200706617747, "grad_norm": 1.699716329574585, "learning_rate": 0.000575511278706346, "loss": 3.5213, "step": 50000 }, { "epoch": 3.397540426688409, "grad_norm": 1.9274173974990845, "learning_rate": 0.0005754688136975132, "loss": 3.5032, "step": 50005 }, { "epoch": 3.3978801467590705, "grad_norm": 2.161248207092285, "learning_rate": 0.0005754263486886804, "loss": 3.6543, "step": 50010 }, { "epoch": 3.3982198668297325, "grad_norm": 1.8394718170166016, "learning_rate": 0.0005753838836798479, "loss": 3.575, "step": 50015 }, { "epoch": 3.398559586900394, "grad_norm": 1.8651001453399658, "learning_rate": 0.0005753414186710151, "loss": 3.3396, "step": 50020 }, { "epoch": 3.398899306971056, "grad_norm": 2.2249755859375, "learning_rate": 0.0005752989536621823, "loss": 3.3991, "step": 50025 }, { "epoch": 3.3992390270417174, "grad_norm": 1.537339687347412, "learning_rate": 0.0005752564886533497, "loss": 3.3927, "step": 50030 }, { "epoch": 3.3995787471123795, "grad_norm": 1.8215932846069336, "learning_rate": 0.0005752140236445169, "loss": 3.5036, "step": 50035 }, { "epoch": 3.399918467183041, "grad_norm": 1.7600674629211426, "learning_rate": 0.0005751715586356841, "loss": 3.6653, "step": 50040 }, { "epoch": 3.4002581872537028, "grad_norm": 2.2920284271240234, "learning_rate": 0.0005751290936268515, "loss": 3.3788, "step": 50045 }, { "epoch": 3.400597907324365, "grad_norm": 1.7816781997680664, "learning_rate": 0.0005750866286180188, "loss": 3.455, "step": 50050 }, { "epoch": 3.4009376273950265, "grad_norm": 2.0395350456237793, "learning_rate": 0.000575044163609186, "loss": 3.4816, "step": 50055 }, { "epoch": 3.401277347465688, "grad_norm": 1.9060719013214111, "learning_rate": 0.0005750016986003534, "loss": 3.2952, "step": 50060 }, { "epoch": 3.40161706753635, "grad_norm": 1.8800588846206665, "learning_rate": 0.0005749592335915206, "loss": 3.5694, "step": 50065 }, { "epoch": 3.401956787607012, "grad_norm": 1.8732824325561523, "learning_rate": 0.0005749167685826879, "loss": 3.4264, "step": 50070 }, { "epoch": 3.4022965076776734, "grad_norm": 2.2993788719177246, "learning_rate": 0.0005748743035738551, "loss": 3.5528, "step": 50075 }, { "epoch": 3.4026362277483355, "grad_norm": 1.5891200304031372, "learning_rate": 0.0005748318385650224, "loss": 3.1561, "step": 50080 }, { "epoch": 3.402975947818997, "grad_norm": 1.6828067302703857, "learning_rate": 0.0005747893735561898, "loss": 3.5795, "step": 50085 }, { "epoch": 3.403315667889659, "grad_norm": 1.6472727060317993, "learning_rate": 0.000574746908547357, "loss": 3.3748, "step": 50090 }, { "epoch": 3.403655387960321, "grad_norm": 2.3344480991363525, "learning_rate": 0.0005747044435385243, "loss": 3.6266, "step": 50095 }, { "epoch": 3.4039951080309825, "grad_norm": 1.8339158296585083, "learning_rate": 0.0005746619785296916, "loss": 3.3805, "step": 50100 }, { "epoch": 3.404334828101644, "grad_norm": 2.24326229095459, "learning_rate": 0.0005746195135208588, "loss": 3.4759, "step": 50105 }, { "epoch": 3.404674548172306, "grad_norm": 1.933663249015808, "learning_rate": 0.000574577048512026, "loss": 3.5281, "step": 50110 }, { "epoch": 3.405014268242968, "grad_norm": 2.1570096015930176, "learning_rate": 0.0005745345835031934, "loss": 3.5697, "step": 50115 }, { "epoch": 3.4053539883136295, "grad_norm": 1.7578321695327759, "learning_rate": 0.0005744921184943607, "loss": 3.6375, "step": 50120 }, { "epoch": 3.4056937083842915, "grad_norm": 2.242614984512329, "learning_rate": 0.0005744496534855279, "loss": 3.5551, "step": 50125 }, { "epoch": 3.406033428454953, "grad_norm": 1.6742323637008667, "learning_rate": 0.0005744071884766953, "loss": 3.6062, "step": 50130 }, { "epoch": 3.406373148525615, "grad_norm": 1.778421401977539, "learning_rate": 0.0005743647234678625, "loss": 3.3477, "step": 50135 }, { "epoch": 3.4067128685962764, "grad_norm": 1.7463288307189941, "learning_rate": 0.0005743222584590297, "loss": 3.5492, "step": 50140 }, { "epoch": 3.4070525886669385, "grad_norm": 1.8485682010650635, "learning_rate": 0.0005742797934501971, "loss": 3.4369, "step": 50145 }, { "epoch": 3.4073923087376, "grad_norm": 2.128166437149048, "learning_rate": 0.0005742373284413643, "loss": 3.4119, "step": 50150 }, { "epoch": 3.4077320288082618, "grad_norm": 1.9802231788635254, "learning_rate": 0.0005741948634325316, "loss": 3.5814, "step": 50155 }, { "epoch": 3.408071748878924, "grad_norm": 1.8571150302886963, "learning_rate": 0.000574152398423699, "loss": 3.4302, "step": 50160 }, { "epoch": 3.4084114689495855, "grad_norm": 2.007990837097168, "learning_rate": 0.0005741099334148662, "loss": 3.6784, "step": 50165 }, { "epoch": 3.408751189020247, "grad_norm": 1.8863131999969482, "learning_rate": 0.0005740674684060334, "loss": 3.5038, "step": 50170 }, { "epoch": 3.409090909090909, "grad_norm": 1.8607031106948853, "learning_rate": 0.0005740250033972007, "loss": 3.3589, "step": 50175 }, { "epoch": 3.409430629161571, "grad_norm": 1.7159827947616577, "learning_rate": 0.000573982538388368, "loss": 3.4225, "step": 50180 }, { "epoch": 3.4097703492322324, "grad_norm": 2.6051034927368164, "learning_rate": 0.0005739400733795352, "loss": 3.4558, "step": 50185 }, { "epoch": 3.4101100693028945, "grad_norm": 2.0911715030670166, "learning_rate": 0.0005738976083707026, "loss": 3.4322, "step": 50190 }, { "epoch": 3.410449789373556, "grad_norm": 1.8393439054489136, "learning_rate": 0.0005738551433618699, "loss": 3.1961, "step": 50195 }, { "epoch": 3.4107895094442178, "grad_norm": 3.0791115760803223, "learning_rate": 0.0005738126783530371, "loss": 3.6154, "step": 50200 }, { "epoch": 3.41112922951488, "grad_norm": 2.4286563396453857, "learning_rate": 0.0005737702133442044, "loss": 3.5781, "step": 50205 }, { "epoch": 3.4114689495855415, "grad_norm": 1.6909178495407104, "learning_rate": 0.0005737277483353716, "loss": 3.2023, "step": 50210 }, { "epoch": 3.411808669656203, "grad_norm": 1.7838259935379028, "learning_rate": 0.0005736852833265389, "loss": 3.5801, "step": 50215 }, { "epoch": 3.412148389726865, "grad_norm": 1.593294620513916, "learning_rate": 0.0005736428183177062, "loss": 3.4115, "step": 50220 }, { "epoch": 3.412488109797527, "grad_norm": 2.4117801189422607, "learning_rate": 0.0005736003533088735, "loss": 3.3039, "step": 50225 }, { "epoch": 3.4128278298681884, "grad_norm": 2.8864824771881104, "learning_rate": 0.0005735578883000408, "loss": 3.3955, "step": 50230 }, { "epoch": 3.4131675499388505, "grad_norm": 1.9253531694412231, "learning_rate": 0.0005735154232912081, "loss": 3.1714, "step": 50235 }, { "epoch": 3.413507270009512, "grad_norm": 1.604238510131836, "learning_rate": 0.0005734729582823753, "loss": 3.4738, "step": 50240 }, { "epoch": 3.413846990080174, "grad_norm": 1.9421863555908203, "learning_rate": 0.0005734304932735426, "loss": 3.2676, "step": 50245 }, { "epoch": 3.414186710150836, "grad_norm": 1.7813549041748047, "learning_rate": 0.0005733880282647099, "loss": 3.6012, "step": 50250 }, { "epoch": 3.4145264302214975, "grad_norm": 1.7647643089294434, "learning_rate": 0.0005733455632558771, "loss": 3.2911, "step": 50255 }, { "epoch": 3.414866150292159, "grad_norm": 1.9346832036972046, "learning_rate": 0.0005733030982470444, "loss": 3.6015, "step": 50260 }, { "epoch": 3.415205870362821, "grad_norm": 1.9102439880371094, "learning_rate": 0.0005732606332382118, "loss": 3.633, "step": 50265 }, { "epoch": 3.415545590433483, "grad_norm": 2.0448873043060303, "learning_rate": 0.000573218168229379, "loss": 3.5539, "step": 50270 }, { "epoch": 3.4158853105041445, "grad_norm": 2.024092197418213, "learning_rate": 0.0005731757032205462, "loss": 3.4169, "step": 50275 }, { "epoch": 3.4162250305748065, "grad_norm": 1.6181548833847046, "learning_rate": 0.0005731332382117136, "loss": 3.6358, "step": 50280 }, { "epoch": 3.416564750645468, "grad_norm": 1.9569207429885864, "learning_rate": 0.0005730907732028808, "loss": 3.4443, "step": 50285 }, { "epoch": 3.41690447071613, "grad_norm": 1.5837208032608032, "learning_rate": 0.000573048308194048, "loss": 3.5921, "step": 50290 }, { "epoch": 3.417244190786792, "grad_norm": 1.7498940229415894, "learning_rate": 0.0005730058431852155, "loss": 3.4237, "step": 50295 }, { "epoch": 3.4175839108574535, "grad_norm": 1.7716060876846313, "learning_rate": 0.0005729633781763827, "loss": 3.3897, "step": 50300 }, { "epoch": 3.417923630928115, "grad_norm": 1.5955809354782104, "learning_rate": 0.0005729209131675499, "loss": 3.2928, "step": 50305 }, { "epoch": 3.418263350998777, "grad_norm": 1.8564586639404297, "learning_rate": 0.0005728784481587172, "loss": 3.4378, "step": 50310 }, { "epoch": 3.418603071069439, "grad_norm": 1.9531959295272827, "learning_rate": 0.0005728359831498845, "loss": 3.3118, "step": 50315 }, { "epoch": 3.4189427911401005, "grad_norm": 1.9289170503616333, "learning_rate": 0.0005727935181410517, "loss": 3.4503, "step": 50320 }, { "epoch": 3.4192825112107625, "grad_norm": 1.876869797706604, "learning_rate": 0.0005727510531322191, "loss": 3.4419, "step": 50325 }, { "epoch": 3.419622231281424, "grad_norm": 2.104480028152466, "learning_rate": 0.0005727085881233864, "loss": 3.6597, "step": 50330 }, { "epoch": 3.419961951352086, "grad_norm": 1.7303661108016968, "learning_rate": 0.0005726661231145536, "loss": 3.3478, "step": 50335 }, { "epoch": 3.420301671422748, "grad_norm": 2.2306830883026123, "learning_rate": 0.0005726236581057209, "loss": 3.7127, "step": 50340 }, { "epoch": 3.4206413914934095, "grad_norm": 2.0671274662017822, "learning_rate": 0.0005725811930968882, "loss": 3.4773, "step": 50345 }, { "epoch": 3.420981111564071, "grad_norm": 1.878821849822998, "learning_rate": 0.0005725387280880554, "loss": 3.5303, "step": 50350 }, { "epoch": 3.421320831634733, "grad_norm": 1.9958022832870483, "learning_rate": 0.0005724962630792227, "loss": 3.4687, "step": 50355 }, { "epoch": 3.421660551705395, "grad_norm": 2.1407454013824463, "learning_rate": 0.00057245379807039, "loss": 3.3393, "step": 50360 }, { "epoch": 3.4220002717760565, "grad_norm": 1.5343798398971558, "learning_rate": 0.0005724113330615573, "loss": 3.3659, "step": 50365 }, { "epoch": 3.422339991846718, "grad_norm": 1.8655325174331665, "learning_rate": 0.0005723688680527246, "loss": 3.5011, "step": 50370 }, { "epoch": 3.42267971191738, "grad_norm": 2.1244046688079834, "learning_rate": 0.0005723264030438918, "loss": 3.5248, "step": 50375 }, { "epoch": 3.423019431988042, "grad_norm": 2.154606819152832, "learning_rate": 0.0005722839380350591, "loss": 3.3294, "step": 50380 }, { "epoch": 3.4233591520587034, "grad_norm": 2.1521050930023193, "learning_rate": 0.0005722414730262264, "loss": 3.6277, "step": 50385 }, { "epoch": 3.4236988721293655, "grad_norm": 1.7792236804962158, "learning_rate": 0.0005721990080173936, "loss": 3.2966, "step": 50390 }, { "epoch": 3.424038592200027, "grad_norm": 1.5516672134399414, "learning_rate": 0.000572156543008561, "loss": 3.4174, "step": 50395 }, { "epoch": 3.424378312270689, "grad_norm": 1.7614659070968628, "learning_rate": 0.0005721140779997283, "loss": 3.4067, "step": 50400 }, { "epoch": 3.424718032341351, "grad_norm": 1.8918920755386353, "learning_rate": 0.0005720716129908955, "loss": 3.3145, "step": 50405 }, { "epoch": 3.4250577524120125, "grad_norm": 2.091484308242798, "learning_rate": 0.0005720291479820629, "loss": 3.4773, "step": 50410 }, { "epoch": 3.425397472482674, "grad_norm": 2.04561185836792, "learning_rate": 0.0005719866829732301, "loss": 3.3102, "step": 50415 }, { "epoch": 3.425737192553336, "grad_norm": 1.8157917261123657, "learning_rate": 0.0005719442179643973, "loss": 3.4465, "step": 50420 }, { "epoch": 3.426076912623998, "grad_norm": 1.906151533126831, "learning_rate": 0.0005719017529555646, "loss": 3.5436, "step": 50425 }, { "epoch": 3.4264166326946595, "grad_norm": 1.7150171995162964, "learning_rate": 0.000571859287946732, "loss": 3.4928, "step": 50430 }, { "epoch": 3.4267563527653215, "grad_norm": 1.5990688800811768, "learning_rate": 0.0005718168229378992, "loss": 3.2713, "step": 50435 }, { "epoch": 3.427096072835983, "grad_norm": 2.1458394527435303, "learning_rate": 0.0005717743579290665, "loss": 3.3293, "step": 50440 }, { "epoch": 3.427435792906645, "grad_norm": 1.5716561079025269, "learning_rate": 0.0005717318929202338, "loss": 3.4324, "step": 50445 }, { "epoch": 3.427775512977307, "grad_norm": 1.9222768545150757, "learning_rate": 0.000571689427911401, "loss": 3.2841, "step": 50450 }, { "epoch": 3.4281152330479685, "grad_norm": 1.6767876148223877, "learning_rate": 0.0005716469629025683, "loss": 3.4563, "step": 50455 }, { "epoch": 3.42845495311863, "grad_norm": 2.09551739692688, "learning_rate": 0.0005716044978937355, "loss": 3.5897, "step": 50460 }, { "epoch": 3.428794673189292, "grad_norm": 2.1509101390838623, "learning_rate": 0.0005715620328849029, "loss": 3.2622, "step": 50465 }, { "epoch": 3.429134393259954, "grad_norm": 1.6475797891616821, "learning_rate": 0.0005715195678760702, "loss": 3.467, "step": 50470 }, { "epoch": 3.4294741133306155, "grad_norm": 1.76125967502594, "learning_rate": 0.0005714771028672374, "loss": 3.3983, "step": 50475 }, { "epoch": 3.429813833401277, "grad_norm": 1.7800369262695312, "learning_rate": 0.0005714346378584047, "loss": 3.5663, "step": 50480 }, { "epoch": 3.430153553471939, "grad_norm": 1.8979827165603638, "learning_rate": 0.000571392172849572, "loss": 3.5694, "step": 50485 }, { "epoch": 3.430493273542601, "grad_norm": 1.3507866859436035, "learning_rate": 0.0005713497078407392, "loss": 3.3891, "step": 50490 }, { "epoch": 3.4308329936132624, "grad_norm": 2.2149291038513184, "learning_rate": 0.0005713072428319064, "loss": 3.3148, "step": 50495 }, { "epoch": 3.4311727136839245, "grad_norm": 1.8230637311935425, "learning_rate": 0.0005712647778230739, "loss": 3.5983, "step": 50500 }, { "epoch": 3.431512433754586, "grad_norm": 1.5664583444595337, "learning_rate": 0.0005712223128142411, "loss": 3.4086, "step": 50505 }, { "epoch": 3.4318521538252478, "grad_norm": 1.9650788307189941, "learning_rate": 0.0005711798478054083, "loss": 3.2557, "step": 50510 }, { "epoch": 3.43219187389591, "grad_norm": 2.570908546447754, "learning_rate": 0.0005711373827965757, "loss": 3.5289, "step": 50515 }, { "epoch": 3.4325315939665715, "grad_norm": 2.283437967300415, "learning_rate": 0.0005710949177877429, "loss": 3.4906, "step": 50520 }, { "epoch": 3.432871314037233, "grad_norm": 1.9161657094955444, "learning_rate": 0.0005710524527789101, "loss": 3.1823, "step": 50525 }, { "epoch": 3.433211034107895, "grad_norm": 1.747236967086792, "learning_rate": 0.0005710099877700775, "loss": 3.5077, "step": 50530 }, { "epoch": 3.433550754178557, "grad_norm": 2.23516583442688, "learning_rate": 0.0005709675227612448, "loss": 3.2786, "step": 50535 }, { "epoch": 3.4338904742492184, "grad_norm": 1.9871057271957397, "learning_rate": 0.000570925057752412, "loss": 3.4748, "step": 50540 }, { "epoch": 3.4342301943198805, "grad_norm": 1.8576210737228394, "learning_rate": 0.0005708825927435794, "loss": 3.4587, "step": 50545 }, { "epoch": 3.434569914390542, "grad_norm": 1.9274260997772217, "learning_rate": 0.0005708401277347466, "loss": 3.5134, "step": 50550 }, { "epoch": 3.434909634461204, "grad_norm": 1.7894905805587769, "learning_rate": 0.0005707976627259138, "loss": 3.3994, "step": 50555 }, { "epoch": 3.435249354531866, "grad_norm": 2.0913760662078857, "learning_rate": 0.0005707551977170811, "loss": 3.6719, "step": 50560 }, { "epoch": 3.4355890746025275, "grad_norm": 1.7670429944992065, "learning_rate": 0.0005707127327082484, "loss": 3.4307, "step": 50565 }, { "epoch": 3.435928794673189, "grad_norm": 1.5656447410583496, "learning_rate": 0.0005706702676994157, "loss": 3.3826, "step": 50570 }, { "epoch": 3.436268514743851, "grad_norm": 1.7139209508895874, "learning_rate": 0.000570627802690583, "loss": 3.3224, "step": 50575 }, { "epoch": 3.436608234814513, "grad_norm": 2.158568859100342, "learning_rate": 0.0005705853376817503, "loss": 3.5445, "step": 50580 }, { "epoch": 3.4369479548851745, "grad_norm": 2.4625179767608643, "learning_rate": 0.0005705428726729175, "loss": 3.6854, "step": 50585 }, { "epoch": 3.4372876749558365, "grad_norm": 2.3531391620635986, "learning_rate": 0.0005705004076640848, "loss": 3.4922, "step": 50590 }, { "epoch": 3.437627395026498, "grad_norm": 2.1843950748443604, "learning_rate": 0.000570457942655252, "loss": 3.6549, "step": 50595 }, { "epoch": 3.43796711509716, "grad_norm": 1.5481890439987183, "learning_rate": 0.0005704154776464193, "loss": 3.5804, "step": 50600 }, { "epoch": 3.438306835167822, "grad_norm": 1.5113122463226318, "learning_rate": 0.0005703730126375867, "loss": 3.4866, "step": 50605 }, { "epoch": 3.4386465552384835, "grad_norm": 1.9328553676605225, "learning_rate": 0.0005703305476287539, "loss": 3.4377, "step": 50610 }, { "epoch": 3.438986275309145, "grad_norm": 1.7552860975265503, "learning_rate": 0.0005702880826199212, "loss": 3.1403, "step": 50615 }, { "epoch": 3.439325995379807, "grad_norm": 2.348480463027954, "learning_rate": 0.0005702456176110885, "loss": 3.577, "step": 50620 }, { "epoch": 3.439665715450469, "grad_norm": 1.9851948022842407, "learning_rate": 0.0005702031526022557, "loss": 3.4651, "step": 50625 }, { "epoch": 3.4400054355211305, "grad_norm": 1.8739817142486572, "learning_rate": 0.000570160687593423, "loss": 3.6306, "step": 50630 }, { "epoch": 3.4403451555917925, "grad_norm": 2.1254944801330566, "learning_rate": 0.0005701182225845903, "loss": 3.3927, "step": 50635 }, { "epoch": 3.440684875662454, "grad_norm": 1.7314409017562866, "learning_rate": 0.0005700757575757576, "loss": 3.5148, "step": 50640 }, { "epoch": 3.441024595733116, "grad_norm": 2.304077386856079, "learning_rate": 0.0005700332925669249, "loss": 3.6824, "step": 50645 }, { "epoch": 3.441364315803778, "grad_norm": 1.7200849056243896, "learning_rate": 0.0005699908275580922, "loss": 3.4872, "step": 50650 }, { "epoch": 3.4417040358744395, "grad_norm": 2.692547559738159, "learning_rate": 0.0005699483625492594, "loss": 3.6284, "step": 50655 }, { "epoch": 3.442043755945101, "grad_norm": 1.626230239868164, "learning_rate": 0.0005699058975404266, "loss": 3.4731, "step": 50660 }, { "epoch": 3.442383476015763, "grad_norm": 1.3427497148513794, "learning_rate": 0.000569863432531594, "loss": 3.5812, "step": 50665 }, { "epoch": 3.442723196086425, "grad_norm": 1.8710161447525024, "learning_rate": 0.0005698209675227612, "loss": 3.4419, "step": 50670 }, { "epoch": 3.4430629161570865, "grad_norm": 1.9942619800567627, "learning_rate": 0.0005697785025139285, "loss": 3.3764, "step": 50675 }, { "epoch": 3.4434026362277486, "grad_norm": 1.7818019390106201, "learning_rate": 0.0005697360375050959, "loss": 3.554, "step": 50680 }, { "epoch": 3.44374235629841, "grad_norm": 1.9108713865280151, "learning_rate": 0.0005696935724962631, "loss": 3.6075, "step": 50685 }, { "epoch": 3.444082076369072, "grad_norm": 2.282614231109619, "learning_rate": 0.0005696511074874303, "loss": 3.6082, "step": 50690 }, { "epoch": 3.444421796439734, "grad_norm": 1.8968225717544556, "learning_rate": 0.0005696086424785977, "loss": 3.5337, "step": 50695 }, { "epoch": 3.4447615165103955, "grad_norm": 1.3748499155044556, "learning_rate": 0.0005695661774697649, "loss": 3.385, "step": 50700 }, { "epoch": 3.445101236581057, "grad_norm": 1.9294074773788452, "learning_rate": 0.0005695237124609321, "loss": 3.4313, "step": 50705 }, { "epoch": 3.4454409566517192, "grad_norm": 1.851098656654358, "learning_rate": 0.0005694812474520995, "loss": 3.5458, "step": 50710 }, { "epoch": 3.445780676722381, "grad_norm": 2.3079471588134766, "learning_rate": 0.0005694387824432668, "loss": 3.6505, "step": 50715 }, { "epoch": 3.4461203967930425, "grad_norm": 1.9756916761398315, "learning_rate": 0.000569396317434434, "loss": 3.549, "step": 50720 }, { "epoch": 3.446460116863704, "grad_norm": 1.6828978061676025, "learning_rate": 0.0005693538524256013, "loss": 3.6484, "step": 50725 }, { "epoch": 3.446799836934366, "grad_norm": 1.904600977897644, "learning_rate": 0.0005693113874167686, "loss": 3.775, "step": 50730 }, { "epoch": 3.447139557005028, "grad_norm": 2.420520782470703, "learning_rate": 0.0005692689224079358, "loss": 3.2436, "step": 50735 }, { "epoch": 3.4474792770756895, "grad_norm": 1.7193049192428589, "learning_rate": 0.0005692264573991031, "loss": 3.4434, "step": 50740 }, { "epoch": 3.4478189971463515, "grad_norm": 1.6240252256393433, "learning_rate": 0.0005691839923902705, "loss": 3.2233, "step": 50745 }, { "epoch": 3.448158717217013, "grad_norm": 2.4093897342681885, "learning_rate": 0.0005691415273814378, "loss": 3.4371, "step": 50750 }, { "epoch": 3.448498437287675, "grad_norm": 1.7581946849822998, "learning_rate": 0.000569099062372605, "loss": 3.4959, "step": 50755 }, { "epoch": 3.448838157358337, "grad_norm": 2.1138179302215576, "learning_rate": 0.0005690565973637722, "loss": 3.1967, "step": 50760 }, { "epoch": 3.4491778774289985, "grad_norm": 1.7763513326644897, "learning_rate": 0.0005690141323549396, "loss": 3.1204, "step": 50765 }, { "epoch": 3.44951759749966, "grad_norm": 2.1499686241149902, "learning_rate": 0.0005689716673461068, "loss": 3.4952, "step": 50770 }, { "epoch": 3.449857317570322, "grad_norm": 2.1232502460479736, "learning_rate": 0.000568929202337274, "loss": 3.2587, "step": 50775 }, { "epoch": 3.450197037640984, "grad_norm": 1.6010558605194092, "learning_rate": 0.0005688867373284415, "loss": 3.3391, "step": 50780 }, { "epoch": 3.4505367577116455, "grad_norm": 1.7719324827194214, "learning_rate": 0.0005688442723196087, "loss": 3.4028, "step": 50785 }, { "epoch": 3.4508764777823075, "grad_norm": 2.0675957202911377, "learning_rate": 0.0005688018073107759, "loss": 3.7329, "step": 50790 }, { "epoch": 3.451216197852969, "grad_norm": 2.7564709186553955, "learning_rate": 0.0005687593423019433, "loss": 3.4148, "step": 50795 }, { "epoch": 3.451555917923631, "grad_norm": 1.7039406299591064, "learning_rate": 0.0005687168772931105, "loss": 3.4867, "step": 50800 }, { "epoch": 3.451895637994293, "grad_norm": 1.9518440961837769, "learning_rate": 0.0005686744122842777, "loss": 3.535, "step": 50805 }, { "epoch": 3.4522353580649545, "grad_norm": 1.8485604524612427, "learning_rate": 0.0005686319472754451, "loss": 3.4506, "step": 50810 }, { "epoch": 3.452575078135616, "grad_norm": 1.4171884059906006, "learning_rate": 0.0005685894822666124, "loss": 3.5324, "step": 50815 }, { "epoch": 3.452914798206278, "grad_norm": 1.807242512702942, "learning_rate": 0.0005685470172577796, "loss": 3.3426, "step": 50820 }, { "epoch": 3.45325451827694, "grad_norm": 1.5525455474853516, "learning_rate": 0.0005685045522489469, "loss": 3.3971, "step": 50825 }, { "epoch": 3.4535942383476015, "grad_norm": 2.057805299758911, "learning_rate": 0.0005684620872401142, "loss": 3.3993, "step": 50830 }, { "epoch": 3.453933958418263, "grad_norm": 2.09201717376709, "learning_rate": 0.0005684196222312814, "loss": 3.3589, "step": 50835 }, { "epoch": 3.454273678488925, "grad_norm": 1.766451358795166, "learning_rate": 0.0005683771572224487, "loss": 3.4249, "step": 50840 }, { "epoch": 3.454613398559587, "grad_norm": 1.6907037496566772, "learning_rate": 0.0005683346922136161, "loss": 3.4388, "step": 50845 }, { "epoch": 3.4549531186302485, "grad_norm": 1.7916990518569946, "learning_rate": 0.0005682922272047833, "loss": 3.5279, "step": 50850 }, { "epoch": 3.4552928387009105, "grad_norm": 2.2918765544891357, "learning_rate": 0.0005682497621959506, "loss": 3.655, "step": 50855 }, { "epoch": 3.455632558771572, "grad_norm": 1.7478888034820557, "learning_rate": 0.0005682072971871178, "loss": 3.3526, "step": 50860 }, { "epoch": 3.455972278842234, "grad_norm": 2.1956732273101807, "learning_rate": 0.0005681648321782851, "loss": 3.4161, "step": 50865 }, { "epoch": 3.456311998912896, "grad_norm": 2.1089236736297607, "learning_rate": 0.0005681223671694524, "loss": 3.5154, "step": 50870 }, { "epoch": 3.4566517189835575, "grad_norm": 2.5865657329559326, "learning_rate": 0.0005680799021606196, "loss": 3.4544, "step": 50875 }, { "epoch": 3.456991439054219, "grad_norm": 1.815216302871704, "learning_rate": 0.000568037437151787, "loss": 3.4703, "step": 50880 }, { "epoch": 3.457331159124881, "grad_norm": 2.190901517868042, "learning_rate": 0.0005679949721429543, "loss": 3.3796, "step": 50885 }, { "epoch": 3.457670879195543, "grad_norm": 1.7789579629898071, "learning_rate": 0.0005679525071341215, "loss": 3.4864, "step": 50890 }, { "epoch": 3.4580105992662045, "grad_norm": 2.5370571613311768, "learning_rate": 0.0005679100421252887, "loss": 3.295, "step": 50895 }, { "epoch": 3.4583503193368665, "grad_norm": 1.9377717971801758, "learning_rate": 0.0005678675771164561, "loss": 3.5455, "step": 50900 }, { "epoch": 3.458690039407528, "grad_norm": 1.8743077516555786, "learning_rate": 0.0005678251121076233, "loss": 3.5206, "step": 50905 }, { "epoch": 3.45902975947819, "grad_norm": 2.221021890640259, "learning_rate": 0.0005677826470987905, "loss": 3.5532, "step": 50910 }, { "epoch": 3.459369479548852, "grad_norm": 2.063241958618164, "learning_rate": 0.000567740182089958, "loss": 3.591, "step": 50915 }, { "epoch": 3.4597091996195135, "grad_norm": 2.1675732135772705, "learning_rate": 0.0005676977170811252, "loss": 3.3121, "step": 50920 }, { "epoch": 3.460048919690175, "grad_norm": 1.9727340936660767, "learning_rate": 0.0005676552520722924, "loss": 3.2523, "step": 50925 }, { "epoch": 3.460388639760837, "grad_norm": 1.662295937538147, "learning_rate": 0.0005676127870634598, "loss": 3.1819, "step": 50930 }, { "epoch": 3.460728359831499, "grad_norm": 2.1552560329437256, "learning_rate": 0.000567570322054627, "loss": 3.6295, "step": 50935 }, { "epoch": 3.4610680799021605, "grad_norm": 2.031942844390869, "learning_rate": 0.0005675278570457942, "loss": 3.4788, "step": 50940 }, { "epoch": 3.4614077999728226, "grad_norm": 1.8413223028182983, "learning_rate": 0.0005674853920369615, "loss": 3.5715, "step": 50945 }, { "epoch": 3.461747520043484, "grad_norm": 2.0094354152679443, "learning_rate": 0.0005674429270281289, "loss": 3.5276, "step": 50950 }, { "epoch": 3.462087240114146, "grad_norm": 1.8856335878372192, "learning_rate": 0.0005674004620192961, "loss": 3.4151, "step": 50955 }, { "epoch": 3.462426960184808, "grad_norm": 1.645314335823059, "learning_rate": 0.0005673579970104634, "loss": 3.6583, "step": 50960 }, { "epoch": 3.4627666802554695, "grad_norm": 1.9797707796096802, "learning_rate": 0.0005673155320016307, "loss": 3.433, "step": 50965 }, { "epoch": 3.463106400326131, "grad_norm": 1.9496124982833862, "learning_rate": 0.0005672730669927979, "loss": 3.4784, "step": 50970 }, { "epoch": 3.4634461203967932, "grad_norm": 2.340040683746338, "learning_rate": 0.0005672306019839652, "loss": 3.595, "step": 50975 }, { "epoch": 3.463785840467455, "grad_norm": 2.0014498233795166, "learning_rate": 0.0005671881369751325, "loss": 3.5118, "step": 50980 }, { "epoch": 3.4641255605381165, "grad_norm": 1.7764394283294678, "learning_rate": 0.0005671456719662998, "loss": 3.3147, "step": 50985 }, { "epoch": 3.4644652806087786, "grad_norm": 2.047830820083618, "learning_rate": 0.0005671032069574671, "loss": 3.2313, "step": 50990 }, { "epoch": 3.46480500067944, "grad_norm": 1.8854376077651978, "learning_rate": 0.0005670607419486343, "loss": 3.5553, "step": 50995 }, { "epoch": 3.465144720750102, "grad_norm": 1.35625159740448, "learning_rate": 0.0005670182769398016, "loss": 3.6055, "step": 51000 }, { "epoch": 3.465484440820764, "grad_norm": 1.5939171314239502, "learning_rate": 0.0005669758119309689, "loss": 3.7878, "step": 51005 }, { "epoch": 3.4658241608914255, "grad_norm": 1.926578164100647, "learning_rate": 0.0005669333469221361, "loss": 3.3584, "step": 51010 }, { "epoch": 3.466163880962087, "grad_norm": 2.1163437366485596, "learning_rate": 0.0005668908819133034, "loss": 3.3927, "step": 51015 }, { "epoch": 3.4665036010327492, "grad_norm": 2.031254529953003, "learning_rate": 0.0005668484169044708, "loss": 3.3566, "step": 51020 }, { "epoch": 3.466843321103411, "grad_norm": 2.2981908321380615, "learning_rate": 0.000566805951895638, "loss": 3.5143, "step": 51025 }, { "epoch": 3.4671830411740725, "grad_norm": 1.6489938497543335, "learning_rate": 0.0005667634868868053, "loss": 3.4854, "step": 51030 }, { "epoch": 3.4675227612447346, "grad_norm": 1.9332232475280762, "learning_rate": 0.0005667210218779726, "loss": 3.5961, "step": 51035 }, { "epoch": 3.467862481315396, "grad_norm": 1.738968014717102, "learning_rate": 0.0005666785568691398, "loss": 3.6088, "step": 51040 }, { "epoch": 3.468202201386058, "grad_norm": 1.8829797506332397, "learning_rate": 0.000566636091860307, "loss": 3.406, "step": 51045 }, { "epoch": 3.46854192145672, "grad_norm": 1.7274125814437866, "learning_rate": 0.0005665936268514744, "loss": 3.6237, "step": 51050 }, { "epoch": 3.4688816415273815, "grad_norm": 1.7630010843276978, "learning_rate": 0.0005665511618426417, "loss": 3.5416, "step": 51055 }, { "epoch": 3.469221361598043, "grad_norm": 2.2979683876037598, "learning_rate": 0.0005665086968338089, "loss": 3.3069, "step": 51060 }, { "epoch": 3.469561081668705, "grad_norm": 1.9126908779144287, "learning_rate": 0.0005664662318249763, "loss": 3.43, "step": 51065 }, { "epoch": 3.469900801739367, "grad_norm": 2.0735442638397217, "learning_rate": 0.0005664237668161435, "loss": 3.265, "step": 51070 }, { "epoch": 3.4702405218100285, "grad_norm": 1.9194142818450928, "learning_rate": 0.0005663813018073107, "loss": 3.283, "step": 51075 }, { "epoch": 3.47058024188069, "grad_norm": 2.701779365539551, "learning_rate": 0.0005663388367984781, "loss": 3.516, "step": 51080 }, { "epoch": 3.470919961951352, "grad_norm": 2.3209636211395264, "learning_rate": 0.0005662963717896453, "loss": 3.4525, "step": 51085 }, { "epoch": 3.471259682022014, "grad_norm": 2.150028944015503, "learning_rate": 0.0005662539067808127, "loss": 3.723, "step": 51090 }, { "epoch": 3.4715994020926755, "grad_norm": 1.4700651168823242, "learning_rate": 0.00056621144177198, "loss": 3.4377, "step": 51095 }, { "epoch": 3.4719391221633376, "grad_norm": 1.996562123298645, "learning_rate": 0.0005661689767631472, "loss": 3.5504, "step": 51100 }, { "epoch": 3.472278842233999, "grad_norm": 1.8055143356323242, "learning_rate": 0.0005661265117543145, "loss": 3.3507, "step": 51105 }, { "epoch": 3.472618562304661, "grad_norm": 1.8015713691711426, "learning_rate": 0.0005660840467454817, "loss": 3.3926, "step": 51110 }, { "epoch": 3.472958282375323, "grad_norm": 1.5791528224945068, "learning_rate": 0.000566041581736649, "loss": 3.516, "step": 51115 }, { "epoch": 3.4732980024459845, "grad_norm": 1.9961060285568237, "learning_rate": 0.0005659991167278163, "loss": 3.551, "step": 51120 }, { "epoch": 3.473637722516646, "grad_norm": 2.214905261993408, "learning_rate": 0.0005659566517189836, "loss": 3.6313, "step": 51125 }, { "epoch": 3.4739774425873082, "grad_norm": 2.282393455505371, "learning_rate": 0.0005659141867101509, "loss": 3.5082, "step": 51130 }, { "epoch": 3.47431716265797, "grad_norm": 1.7563806772232056, "learning_rate": 0.0005658717217013182, "loss": 3.4317, "step": 51135 }, { "epoch": 3.4746568827286315, "grad_norm": 2.2926816940307617, "learning_rate": 0.0005658292566924854, "loss": 3.2951, "step": 51140 }, { "epoch": 3.4749966027992936, "grad_norm": 1.765072226524353, "learning_rate": 0.0005657867916836526, "loss": 3.5106, "step": 51145 }, { "epoch": 3.475336322869955, "grad_norm": 1.4838457107543945, "learning_rate": 0.00056574432667482, "loss": 3.5464, "step": 51150 }, { "epoch": 3.475676042940617, "grad_norm": 2.447964668273926, "learning_rate": 0.0005657018616659872, "loss": 3.3823, "step": 51155 }, { "epoch": 3.4760157630112785, "grad_norm": 1.6898807287216187, "learning_rate": 0.0005656593966571545, "loss": 3.4972, "step": 51160 }, { "epoch": 3.4763554830819405, "grad_norm": 1.6925603151321411, "learning_rate": 0.0005656169316483219, "loss": 3.3514, "step": 51165 }, { "epoch": 3.476695203152602, "grad_norm": 1.6570905447006226, "learning_rate": 0.0005655744666394891, "loss": 3.2755, "step": 51170 }, { "epoch": 3.477034923223264, "grad_norm": 1.674778699874878, "learning_rate": 0.0005655320016306563, "loss": 3.2585, "step": 51175 }, { "epoch": 3.477374643293926, "grad_norm": 2.178972005844116, "learning_rate": 0.0005654895366218237, "loss": 3.5987, "step": 51180 }, { "epoch": 3.4777143633645875, "grad_norm": 2.010876417160034, "learning_rate": 0.0005654470716129909, "loss": 3.7529, "step": 51185 }, { "epoch": 3.478054083435249, "grad_norm": 1.3661320209503174, "learning_rate": 0.0005654046066041581, "loss": 3.5163, "step": 51190 }, { "epoch": 3.478393803505911, "grad_norm": 1.8481292724609375, "learning_rate": 0.0005653621415953256, "loss": 3.5593, "step": 51195 }, { "epoch": 3.478733523576573, "grad_norm": 1.835841417312622, "learning_rate": 0.0005653196765864928, "loss": 3.2381, "step": 51200 }, { "epoch": 3.4790732436472345, "grad_norm": 1.622836709022522, "learning_rate": 0.00056527721157766, "loss": 3.2893, "step": 51205 }, { "epoch": 3.4794129637178965, "grad_norm": 2.2724931240081787, "learning_rate": 0.0005652347465688273, "loss": 3.2144, "step": 51210 }, { "epoch": 3.479752683788558, "grad_norm": 1.9805634021759033, "learning_rate": 0.0005651922815599946, "loss": 3.5804, "step": 51215 }, { "epoch": 3.48009240385922, "grad_norm": 2.0856666564941406, "learning_rate": 0.0005651498165511618, "loss": 3.3246, "step": 51220 }, { "epoch": 3.480432123929882, "grad_norm": 1.8920788764953613, "learning_rate": 0.0005651073515423291, "loss": 3.4819, "step": 51225 }, { "epoch": 3.4807718440005435, "grad_norm": 2.7568650245666504, "learning_rate": 0.0005650648865334965, "loss": 3.6157, "step": 51230 }, { "epoch": 3.481111564071205, "grad_norm": 2.010067939758301, "learning_rate": 0.0005650224215246637, "loss": 3.5553, "step": 51235 }, { "epoch": 3.481451284141867, "grad_norm": 1.6495715379714966, "learning_rate": 0.000564979956515831, "loss": 3.3709, "step": 51240 }, { "epoch": 3.481791004212529, "grad_norm": 1.8998080492019653, "learning_rate": 0.0005649374915069982, "loss": 3.2509, "step": 51245 }, { "epoch": 3.4821307242831905, "grad_norm": 2.0142056941986084, "learning_rate": 0.0005648950264981655, "loss": 3.3614, "step": 51250 }, { "epoch": 3.4824704443538526, "grad_norm": 1.6845731735229492, "learning_rate": 0.0005648525614893328, "loss": 3.6734, "step": 51255 }, { "epoch": 3.482810164424514, "grad_norm": 2.13271164894104, "learning_rate": 0.0005648100964805, "loss": 3.3696, "step": 51260 }, { "epoch": 3.483149884495176, "grad_norm": 1.7071186304092407, "learning_rate": 0.0005647676314716674, "loss": 3.4198, "step": 51265 }, { "epoch": 3.483489604565838, "grad_norm": 1.6416147947311401, "learning_rate": 0.0005647251664628347, "loss": 3.6635, "step": 51270 }, { "epoch": 3.4838293246364995, "grad_norm": 2.2057065963745117, "learning_rate": 0.0005646827014540019, "loss": 3.2448, "step": 51275 }, { "epoch": 3.484169044707161, "grad_norm": 1.7079933881759644, "learning_rate": 0.0005646402364451692, "loss": 3.6776, "step": 51280 }, { "epoch": 3.4845087647778232, "grad_norm": 1.5787914991378784, "learning_rate": 0.0005645977714363365, "loss": 3.4406, "step": 51285 }, { "epoch": 3.484848484848485, "grad_norm": 2.100236415863037, "learning_rate": 0.0005645553064275037, "loss": 3.3622, "step": 51290 }, { "epoch": 3.4851882049191465, "grad_norm": 1.935909628868103, "learning_rate": 0.0005645128414186709, "loss": 3.4422, "step": 51295 }, { "epoch": 3.4855279249898086, "grad_norm": 1.589951992034912, "learning_rate": 0.0005644703764098384, "loss": 3.3448, "step": 51300 }, { "epoch": 3.48586764506047, "grad_norm": 1.633399486541748, "learning_rate": 0.0005644279114010056, "loss": 3.6535, "step": 51305 }, { "epoch": 3.486207365131132, "grad_norm": 1.9692575931549072, "learning_rate": 0.0005643854463921728, "loss": 3.3669, "step": 51310 }, { "epoch": 3.486547085201794, "grad_norm": 1.8384559154510498, "learning_rate": 0.0005643429813833402, "loss": 3.5139, "step": 51315 }, { "epoch": 3.4868868052724555, "grad_norm": 1.9662935733795166, "learning_rate": 0.0005643005163745074, "loss": 3.2061, "step": 51320 }, { "epoch": 3.487226525343117, "grad_norm": 1.9868521690368652, "learning_rate": 0.0005642580513656746, "loss": 3.5933, "step": 51325 }, { "epoch": 3.4875662454137792, "grad_norm": 1.8238680362701416, "learning_rate": 0.000564215586356842, "loss": 3.7599, "step": 51330 }, { "epoch": 3.487905965484441, "grad_norm": 1.6559512615203857, "learning_rate": 0.0005641731213480093, "loss": 3.6233, "step": 51335 }, { "epoch": 3.4882456855551025, "grad_norm": 1.6456646919250488, "learning_rate": 0.0005641306563391765, "loss": 3.4147, "step": 51340 }, { "epoch": 3.4885854056257646, "grad_norm": 1.6249643564224243, "learning_rate": 0.0005640881913303438, "loss": 3.6076, "step": 51345 }, { "epoch": 3.488925125696426, "grad_norm": 1.7242951393127441, "learning_rate": 0.0005640457263215111, "loss": 3.5246, "step": 51350 }, { "epoch": 3.489264845767088, "grad_norm": 1.592475414276123, "learning_rate": 0.0005640032613126783, "loss": 3.36, "step": 51355 }, { "epoch": 3.48960456583775, "grad_norm": 2.291947603225708, "learning_rate": 0.0005639607963038456, "loss": 3.1605, "step": 51360 }, { "epoch": 3.4899442859084115, "grad_norm": 2.0115954875946045, "learning_rate": 0.0005639183312950129, "loss": 3.3689, "step": 51365 }, { "epoch": 3.490284005979073, "grad_norm": 2.033202648162842, "learning_rate": 0.0005638758662861802, "loss": 3.5286, "step": 51370 }, { "epoch": 3.4906237260497353, "grad_norm": 1.6315147876739502, "learning_rate": 0.0005638334012773475, "loss": 3.5966, "step": 51375 }, { "epoch": 3.490963446120397, "grad_norm": 1.6498416662216187, "learning_rate": 0.0005637909362685148, "loss": 3.4393, "step": 51380 }, { "epoch": 3.4913031661910585, "grad_norm": 1.8581453561782837, "learning_rate": 0.000563748471259682, "loss": 3.4009, "step": 51385 }, { "epoch": 3.4916428862617206, "grad_norm": 2.143939971923828, "learning_rate": 0.0005637060062508493, "loss": 3.2239, "step": 51390 }, { "epoch": 3.491982606332382, "grad_norm": 1.739531397819519, "learning_rate": 0.0005636635412420165, "loss": 3.543, "step": 51395 }, { "epoch": 3.492322326403044, "grad_norm": 1.8879441022872925, "learning_rate": 0.0005636210762331838, "loss": 3.4659, "step": 51400 }, { "epoch": 3.4926620464737055, "grad_norm": 1.7893099784851074, "learning_rate": 0.0005635786112243512, "loss": 3.4032, "step": 51405 }, { "epoch": 3.4930017665443676, "grad_norm": 2.288109540939331, "learning_rate": 0.0005635361462155184, "loss": 3.6358, "step": 51410 }, { "epoch": 3.493341486615029, "grad_norm": 1.7919960021972656, "learning_rate": 0.0005634936812066857, "loss": 3.3806, "step": 51415 }, { "epoch": 3.493681206685691, "grad_norm": 1.857527732849121, "learning_rate": 0.000563451216197853, "loss": 3.8759, "step": 51420 }, { "epoch": 3.494020926756353, "grad_norm": 1.7018814086914062, "learning_rate": 0.0005634087511890202, "loss": 3.5079, "step": 51425 }, { "epoch": 3.4943606468270145, "grad_norm": 1.949379801750183, "learning_rate": 0.0005633662861801876, "loss": 3.4719, "step": 51430 }, { "epoch": 3.494700366897676, "grad_norm": 1.625848650932312, "learning_rate": 0.0005633238211713549, "loss": 3.608, "step": 51435 }, { "epoch": 3.4950400869683382, "grad_norm": 1.675010323524475, "learning_rate": 0.0005632813561625221, "loss": 3.3106, "step": 51440 }, { "epoch": 3.495379807039, "grad_norm": 1.9498777389526367, "learning_rate": 0.0005632388911536894, "loss": 3.3458, "step": 51445 }, { "epoch": 3.4957195271096615, "grad_norm": 2.347348928451538, "learning_rate": 0.0005631964261448567, "loss": 3.3385, "step": 51450 }, { "epoch": 3.4960592471803236, "grad_norm": 2.6716253757476807, "learning_rate": 0.0005631539611360239, "loss": 3.461, "step": 51455 }, { "epoch": 3.496398967250985, "grad_norm": 1.558258056640625, "learning_rate": 0.0005631114961271912, "loss": 3.5523, "step": 51460 }, { "epoch": 3.496738687321647, "grad_norm": 1.7064250707626343, "learning_rate": 0.0005630690311183585, "loss": 3.3237, "step": 51465 }, { "epoch": 3.497078407392309, "grad_norm": 1.9179221391677856, "learning_rate": 0.0005630265661095258, "loss": 3.4198, "step": 51470 }, { "epoch": 3.4974181274629705, "grad_norm": 1.8802661895751953, "learning_rate": 0.0005629841011006931, "loss": 3.257, "step": 51475 }, { "epoch": 3.497757847533632, "grad_norm": 1.8639100790023804, "learning_rate": 0.0005629416360918604, "loss": 3.6597, "step": 51480 }, { "epoch": 3.4980975676042942, "grad_norm": 1.8341141939163208, "learning_rate": 0.0005628991710830276, "loss": 3.2234, "step": 51485 }, { "epoch": 3.498437287674956, "grad_norm": 1.5809470415115356, "learning_rate": 0.0005628567060741949, "loss": 3.4116, "step": 51490 }, { "epoch": 3.4987770077456175, "grad_norm": 1.8410992622375488, "learning_rate": 0.0005628142410653621, "loss": 3.4054, "step": 51495 }, { "epoch": 3.499116727816279, "grad_norm": 1.635978102684021, "learning_rate": 0.0005627717760565294, "loss": 3.3728, "step": 51500 }, { "epoch": 3.499456447886941, "grad_norm": 2.426227569580078, "learning_rate": 0.0005627293110476968, "loss": 3.5017, "step": 51505 }, { "epoch": 3.499796167957603, "grad_norm": 1.6066495180130005, "learning_rate": 0.000562686846038864, "loss": 3.5671, "step": 51510 }, { "epoch": 3.5001358880282645, "grad_norm": 2.1498892307281494, "learning_rate": 0.0005626443810300313, "loss": 3.3258, "step": 51515 }, { "epoch": 3.5004756080989265, "grad_norm": 1.719695806503296, "learning_rate": 0.0005626019160211986, "loss": 3.2083, "step": 51520 }, { "epoch": 3.500815328169588, "grad_norm": 1.872487187385559, "learning_rate": 0.0005625594510123658, "loss": 3.3593, "step": 51525 }, { "epoch": 3.50115504824025, "grad_norm": 1.8177851438522339, "learning_rate": 0.000562516986003533, "loss": 3.466, "step": 51530 }, { "epoch": 3.501494768310912, "grad_norm": 1.655215859413147, "learning_rate": 0.0005624745209947004, "loss": 3.2467, "step": 51535 }, { "epoch": 3.5018344883815735, "grad_norm": 1.7935537099838257, "learning_rate": 0.0005624320559858677, "loss": 3.4056, "step": 51540 }, { "epoch": 3.502174208452235, "grad_norm": 2.2290515899658203, "learning_rate": 0.0005623895909770349, "loss": 3.344, "step": 51545 }, { "epoch": 3.5025139285228972, "grad_norm": 1.8825781345367432, "learning_rate": 0.0005623471259682023, "loss": 3.5103, "step": 51550 }, { "epoch": 3.502853648593559, "grad_norm": 1.804178237915039, "learning_rate": 0.0005623046609593695, "loss": 3.3339, "step": 51555 }, { "epoch": 3.5031933686642205, "grad_norm": 1.7001041173934937, "learning_rate": 0.0005622621959505367, "loss": 3.3352, "step": 51560 }, { "epoch": 3.5035330887348826, "grad_norm": 1.7387140989303589, "learning_rate": 0.0005622197309417041, "loss": 3.5933, "step": 51565 }, { "epoch": 3.503872808805544, "grad_norm": 1.64702308177948, "learning_rate": 0.0005621772659328713, "loss": 3.1681, "step": 51570 }, { "epoch": 3.504212528876206, "grad_norm": 2.0261785984039307, "learning_rate": 0.0005621348009240386, "loss": 3.7411, "step": 51575 }, { "epoch": 3.504552248946868, "grad_norm": 1.9939336776733398, "learning_rate": 0.000562092335915206, "loss": 3.3919, "step": 51580 }, { "epoch": 3.5048919690175295, "grad_norm": 1.6968985795974731, "learning_rate": 0.0005620498709063732, "loss": 3.3747, "step": 51585 }, { "epoch": 3.505231689088191, "grad_norm": 1.696456789970398, "learning_rate": 0.0005620074058975404, "loss": 3.2683, "step": 51590 }, { "epoch": 3.5055714091588532, "grad_norm": 2.23848557472229, "learning_rate": 0.0005619649408887077, "loss": 3.7177, "step": 51595 }, { "epoch": 3.505911129229515, "grad_norm": 1.9560644626617432, "learning_rate": 0.000561922475879875, "loss": 3.1508, "step": 51600 }, { "epoch": 3.5062508493001765, "grad_norm": 1.6741759777069092, "learning_rate": 0.0005618800108710422, "loss": 3.1443, "step": 51605 }, { "epoch": 3.5065905693708386, "grad_norm": 1.7980127334594727, "learning_rate": 0.0005618375458622096, "loss": 3.2618, "step": 51610 }, { "epoch": 3.5069302894415, "grad_norm": 1.8461209535598755, "learning_rate": 0.0005617950808533769, "loss": 3.1215, "step": 51615 }, { "epoch": 3.507270009512162, "grad_norm": 1.6035442352294922, "learning_rate": 0.0005617526158445441, "loss": 3.5514, "step": 51620 }, { "epoch": 3.507609729582824, "grad_norm": 2.1823954582214355, "learning_rate": 0.0005617101508357114, "loss": 3.7041, "step": 51625 }, { "epoch": 3.5079494496534855, "grad_norm": 2.252168655395508, "learning_rate": 0.0005616676858268786, "loss": 3.4927, "step": 51630 }, { "epoch": 3.508289169724147, "grad_norm": 1.8013311624526978, "learning_rate": 0.0005616252208180459, "loss": 3.3756, "step": 51635 }, { "epoch": 3.5086288897948092, "grad_norm": 2.283372402191162, "learning_rate": 0.0005615827558092132, "loss": 3.6924, "step": 51640 }, { "epoch": 3.508968609865471, "grad_norm": 2.291586399078369, "learning_rate": 0.0005615402908003805, "loss": 3.4448, "step": 51645 }, { "epoch": 3.5093083299361325, "grad_norm": 1.801373839378357, "learning_rate": 0.0005614978257915478, "loss": 3.6124, "step": 51650 }, { "epoch": 3.5096480500067946, "grad_norm": 1.7409220933914185, "learning_rate": 0.0005614553607827151, "loss": 3.2653, "step": 51655 }, { "epoch": 3.509987770077456, "grad_norm": 1.7821309566497803, "learning_rate": 0.0005614128957738823, "loss": 3.3843, "step": 51660 }, { "epoch": 3.510327490148118, "grad_norm": 1.4784996509552002, "learning_rate": 0.0005613704307650496, "loss": 3.2452, "step": 51665 }, { "epoch": 3.51066721021878, "grad_norm": 1.6693813800811768, "learning_rate": 0.0005613279657562169, "loss": 3.5184, "step": 51670 }, { "epoch": 3.5110069302894416, "grad_norm": 1.8186990022659302, "learning_rate": 0.0005612855007473841, "loss": 3.4786, "step": 51675 }, { "epoch": 3.511346650360103, "grad_norm": 1.6676100492477417, "learning_rate": 0.0005612430357385514, "loss": 3.4561, "step": 51680 }, { "epoch": 3.5116863704307653, "grad_norm": 1.4139245748519897, "learning_rate": 0.0005612005707297188, "loss": 3.4684, "step": 51685 }, { "epoch": 3.512026090501427, "grad_norm": 1.7861557006835938, "learning_rate": 0.000561158105720886, "loss": 3.2374, "step": 51690 }, { "epoch": 3.5123658105720885, "grad_norm": 1.6454936265945435, "learning_rate": 0.0005611156407120532, "loss": 3.5996, "step": 51695 }, { "epoch": 3.5127055306427506, "grad_norm": 2.1890199184417725, "learning_rate": 0.0005610731757032206, "loss": 3.617, "step": 51700 }, { "epoch": 3.5130452507134122, "grad_norm": 1.9571752548217773, "learning_rate": 0.0005610307106943878, "loss": 3.2797, "step": 51705 }, { "epoch": 3.513384970784074, "grad_norm": 1.5436302423477173, "learning_rate": 0.000560988245685555, "loss": 3.5732, "step": 51710 }, { "epoch": 3.513724690854736, "grad_norm": 1.8529459238052368, "learning_rate": 0.0005609457806767225, "loss": 3.1989, "step": 51715 }, { "epoch": 3.5140644109253976, "grad_norm": 2.0910089015960693, "learning_rate": 0.0005609033156678897, "loss": 3.481, "step": 51720 }, { "epoch": 3.514404130996059, "grad_norm": 1.6955515146255493, "learning_rate": 0.0005608608506590569, "loss": 3.2292, "step": 51725 }, { "epoch": 3.5147438510667213, "grad_norm": 2.031996965408325, "learning_rate": 0.0005608183856502242, "loss": 3.2609, "step": 51730 }, { "epoch": 3.515083571137383, "grad_norm": 1.6871217489242554, "learning_rate": 0.0005607759206413915, "loss": 3.4287, "step": 51735 }, { "epoch": 3.5154232912080445, "grad_norm": 2.2753305435180664, "learning_rate": 0.0005607334556325587, "loss": 3.5002, "step": 51740 }, { "epoch": 3.5157630112787066, "grad_norm": 2.6725447177886963, "learning_rate": 0.000560690990623726, "loss": 3.5295, "step": 51745 }, { "epoch": 3.5161027313493682, "grad_norm": 2.0233006477355957, "learning_rate": 0.0005606485256148934, "loss": 3.7381, "step": 51750 }, { "epoch": 3.51644245142003, "grad_norm": 1.7122670412063599, "learning_rate": 0.0005606060606060606, "loss": 3.501, "step": 51755 }, { "epoch": 3.516782171490692, "grad_norm": 1.8593800067901611, "learning_rate": 0.0005605635955972279, "loss": 3.3816, "step": 51760 }, { "epoch": 3.5171218915613536, "grad_norm": 1.7341623306274414, "learning_rate": 0.0005605211305883952, "loss": 3.3935, "step": 51765 }, { "epoch": 3.517461611632015, "grad_norm": 2.1405091285705566, "learning_rate": 0.0005604786655795624, "loss": 3.4167, "step": 51770 }, { "epoch": 3.517801331702677, "grad_norm": 2.179018497467041, "learning_rate": 0.0005604362005707297, "loss": 3.4589, "step": 51775 }, { "epoch": 3.518141051773339, "grad_norm": 2.1271495819091797, "learning_rate": 0.0005603937355618969, "loss": 3.4884, "step": 51780 }, { "epoch": 3.5184807718440005, "grad_norm": 2.194150686264038, "learning_rate": 0.0005603512705530644, "loss": 3.323, "step": 51785 }, { "epoch": 3.518820491914662, "grad_norm": 1.8203932046890259, "learning_rate": 0.0005603088055442316, "loss": 3.6213, "step": 51790 }, { "epoch": 3.5191602119853242, "grad_norm": 2.2320570945739746, "learning_rate": 0.0005602663405353988, "loss": 3.4452, "step": 51795 }, { "epoch": 3.519499932055986, "grad_norm": 1.9792693853378296, "learning_rate": 0.0005602238755265662, "loss": 3.3104, "step": 51800 }, { "epoch": 3.5198396521266475, "grad_norm": 1.8853317499160767, "learning_rate": 0.0005601814105177334, "loss": 3.618, "step": 51805 }, { "epoch": 3.520179372197309, "grad_norm": 1.5262572765350342, "learning_rate": 0.0005601389455089006, "loss": 3.6279, "step": 51810 }, { "epoch": 3.520519092267971, "grad_norm": 1.7948814630508423, "learning_rate": 0.000560096480500068, "loss": 3.452, "step": 51815 }, { "epoch": 3.520858812338633, "grad_norm": 1.8591432571411133, "learning_rate": 0.0005600540154912353, "loss": 3.2536, "step": 51820 }, { "epoch": 3.5211985324092945, "grad_norm": 1.7465609312057495, "learning_rate": 0.0005600115504824025, "loss": 3.332, "step": 51825 }, { "epoch": 3.5215382524799566, "grad_norm": 1.9076948165893555, "learning_rate": 0.0005599690854735699, "loss": 3.4384, "step": 51830 }, { "epoch": 3.521877972550618, "grad_norm": 1.613897442817688, "learning_rate": 0.0005599266204647371, "loss": 3.6467, "step": 51835 }, { "epoch": 3.52221769262128, "grad_norm": 2.5788042545318604, "learning_rate": 0.0005598841554559043, "loss": 3.3261, "step": 51840 }, { "epoch": 3.522557412691942, "grad_norm": 1.9983140230178833, "learning_rate": 0.0005598416904470716, "loss": 3.3562, "step": 51845 }, { "epoch": 3.5228971327626035, "grad_norm": 2.0339584350585938, "learning_rate": 0.0005597992254382389, "loss": 3.5105, "step": 51850 }, { "epoch": 3.523236852833265, "grad_norm": 2.013946056365967, "learning_rate": 0.0005597567604294062, "loss": 3.4361, "step": 51855 }, { "epoch": 3.5235765729039272, "grad_norm": 1.9847900867462158, "learning_rate": 0.0005597142954205735, "loss": 3.586, "step": 51860 }, { "epoch": 3.523916292974589, "grad_norm": 1.6572020053863525, "learning_rate": 0.0005596718304117408, "loss": 3.3329, "step": 51865 }, { "epoch": 3.5242560130452505, "grad_norm": 1.6896247863769531, "learning_rate": 0.000559629365402908, "loss": 3.3628, "step": 51870 }, { "epoch": 3.5245957331159126, "grad_norm": 1.6763297319412231, "learning_rate": 0.0005595869003940753, "loss": 3.5459, "step": 51875 }, { "epoch": 3.524935453186574, "grad_norm": 2.0575766563415527, "learning_rate": 0.0005595444353852425, "loss": 3.2801, "step": 51880 }, { "epoch": 3.525275173257236, "grad_norm": 1.6660428047180176, "learning_rate": 0.0005595019703764098, "loss": 3.1341, "step": 51885 }, { "epoch": 3.525614893327898, "grad_norm": 1.5710084438323975, "learning_rate": 0.0005594595053675772, "loss": 3.5185, "step": 51890 }, { "epoch": 3.5259546133985595, "grad_norm": 1.846036434173584, "learning_rate": 0.0005594170403587444, "loss": 3.5107, "step": 51895 }, { "epoch": 3.526294333469221, "grad_norm": 1.7053557634353638, "learning_rate": 0.0005593745753499117, "loss": 3.3941, "step": 51900 }, { "epoch": 3.5266340535398832, "grad_norm": 1.971868634223938, "learning_rate": 0.000559332110341079, "loss": 3.4963, "step": 51905 }, { "epoch": 3.526973773610545, "grad_norm": 1.9818956851959229, "learning_rate": 0.0005592896453322462, "loss": 3.5018, "step": 51910 }, { "epoch": 3.5273134936812065, "grad_norm": 2.228419542312622, "learning_rate": 0.0005592471803234134, "loss": 3.5718, "step": 51915 }, { "epoch": 3.5276532137518686, "grad_norm": 1.8063019514083862, "learning_rate": 0.0005592047153145808, "loss": 3.1017, "step": 51920 }, { "epoch": 3.52799293382253, "grad_norm": 2.185992479324341, "learning_rate": 0.0005591622503057481, "loss": 3.5545, "step": 51925 }, { "epoch": 3.528332653893192, "grad_norm": 1.7658573389053345, "learning_rate": 0.0005591197852969153, "loss": 3.613, "step": 51930 }, { "epoch": 3.528672373963854, "grad_norm": 1.8401049375534058, "learning_rate": 0.0005590773202880827, "loss": 3.3086, "step": 51935 }, { "epoch": 3.5290120940345155, "grad_norm": 2.500962734222412, "learning_rate": 0.0005590348552792499, "loss": 3.4755, "step": 51940 }, { "epoch": 3.529351814105177, "grad_norm": 2.078378438949585, "learning_rate": 0.0005589923902704171, "loss": 3.3342, "step": 51945 }, { "epoch": 3.5296915341758393, "grad_norm": 1.890795111656189, "learning_rate": 0.0005589499252615845, "loss": 3.2948, "step": 51950 }, { "epoch": 3.530031254246501, "grad_norm": 1.8753224611282349, "learning_rate": 0.0005589074602527517, "loss": 3.431, "step": 51955 }, { "epoch": 3.5303709743171625, "grad_norm": 2.049924850463867, "learning_rate": 0.000558864995243919, "loss": 3.4269, "step": 51960 }, { "epoch": 3.5307106943878246, "grad_norm": 1.9376109838485718, "learning_rate": 0.0005588225302350864, "loss": 3.3035, "step": 51965 }, { "epoch": 3.531050414458486, "grad_norm": 1.5722074508666992, "learning_rate": 0.0005587800652262536, "loss": 3.6627, "step": 51970 }, { "epoch": 3.531390134529148, "grad_norm": 1.7273832559585571, "learning_rate": 0.0005587376002174208, "loss": 3.0738, "step": 51975 }, { "epoch": 3.53172985459981, "grad_norm": 1.7738440036773682, "learning_rate": 0.0005586951352085881, "loss": 3.1756, "step": 51980 }, { "epoch": 3.5320695746704716, "grad_norm": 1.8924591541290283, "learning_rate": 0.0005586526701997554, "loss": 3.54, "step": 51985 }, { "epoch": 3.532409294741133, "grad_norm": 1.863511085510254, "learning_rate": 0.0005586102051909226, "loss": 3.3957, "step": 51990 }, { "epoch": 3.5327490148117953, "grad_norm": 2.245655059814453, "learning_rate": 0.00055856774018209, "loss": 3.5061, "step": 51995 }, { "epoch": 3.533088734882457, "grad_norm": 2.3522064685821533, "learning_rate": 0.0005585252751732573, "loss": 3.5201, "step": 52000 }, { "epoch": 3.5334284549531185, "grad_norm": 1.8578294515609741, "learning_rate": 0.0005584828101644245, "loss": 3.68, "step": 52005 }, { "epoch": 3.5337681750237806, "grad_norm": 1.8966641426086426, "learning_rate": 0.0005584403451555918, "loss": 3.2245, "step": 52010 }, { "epoch": 3.5341078950944422, "grad_norm": 2.11234450340271, "learning_rate": 0.000558397880146759, "loss": 3.5224, "step": 52015 }, { "epoch": 3.534447615165104, "grad_norm": 1.7524162530899048, "learning_rate": 0.0005583554151379263, "loss": 3.3342, "step": 52020 }, { "epoch": 3.534787335235766, "grad_norm": 1.4723888635635376, "learning_rate": 0.0005583129501290937, "loss": 3.4355, "step": 52025 }, { "epoch": 3.5351270553064276, "grad_norm": 1.7445749044418335, "learning_rate": 0.0005582704851202609, "loss": 3.5482, "step": 52030 }, { "epoch": 3.535466775377089, "grad_norm": 2.291203737258911, "learning_rate": 0.0005582280201114282, "loss": 3.6068, "step": 52035 }, { "epoch": 3.5358064954477513, "grad_norm": 2.358865976333618, "learning_rate": 0.0005581855551025955, "loss": 3.4036, "step": 52040 }, { "epoch": 3.536146215518413, "grad_norm": 1.8407379388809204, "learning_rate": 0.0005581430900937627, "loss": 3.5384, "step": 52045 }, { "epoch": 3.5364859355890745, "grad_norm": 2.0277247428894043, "learning_rate": 0.00055810062508493, "loss": 3.5622, "step": 52050 }, { "epoch": 3.5368256556597366, "grad_norm": 2.0964014530181885, "learning_rate": 0.0005580581600760973, "loss": 3.5603, "step": 52055 }, { "epoch": 3.5371653757303982, "grad_norm": 2.2041962146759033, "learning_rate": 0.0005580156950672646, "loss": 3.4659, "step": 52060 }, { "epoch": 3.53750509580106, "grad_norm": 2.067497730255127, "learning_rate": 0.0005579732300584319, "loss": 3.3433, "step": 52065 }, { "epoch": 3.537844815871722, "grad_norm": 2.425023317337036, "learning_rate": 0.0005579307650495992, "loss": 3.305, "step": 52070 }, { "epoch": 3.5381845359423836, "grad_norm": 1.8892920017242432, "learning_rate": 0.0005578883000407664, "loss": 3.5383, "step": 52075 }, { "epoch": 3.538524256013045, "grad_norm": 1.8721908330917358, "learning_rate": 0.0005578458350319336, "loss": 3.4977, "step": 52080 }, { "epoch": 3.5388639760837073, "grad_norm": 2.304945945739746, "learning_rate": 0.000557803370023101, "loss": 3.3509, "step": 52085 }, { "epoch": 3.539203696154369, "grad_norm": 2.028104782104492, "learning_rate": 0.0005577609050142682, "loss": 3.364, "step": 52090 }, { "epoch": 3.5395434162250305, "grad_norm": 1.8725193738937378, "learning_rate": 0.0005577184400054355, "loss": 3.7012, "step": 52095 }, { "epoch": 3.5398831362956926, "grad_norm": 1.6662746667861938, "learning_rate": 0.0005576759749966029, "loss": 3.4005, "step": 52100 }, { "epoch": 3.5402228563663543, "grad_norm": 1.4151115417480469, "learning_rate": 0.0005576335099877701, "loss": 3.593, "step": 52105 }, { "epoch": 3.540562576437016, "grad_norm": 1.6227933168411255, "learning_rate": 0.0005575910449789373, "loss": 3.2332, "step": 52110 }, { "epoch": 3.5409022965076775, "grad_norm": 1.6749593019485474, "learning_rate": 0.0005575485799701047, "loss": 3.6394, "step": 52115 }, { "epoch": 3.5412420165783396, "grad_norm": 1.8655990362167358, "learning_rate": 0.0005575061149612719, "loss": 3.3029, "step": 52120 }, { "epoch": 3.541581736649001, "grad_norm": 1.7478363513946533, "learning_rate": 0.0005574721429542058, "loss": 3.3613, "step": 52125 }, { "epoch": 3.541921456719663, "grad_norm": 2.0376813411712646, "learning_rate": 0.0005574381709471395, "loss": 3.5948, "step": 52130 }, { "epoch": 3.542261176790325, "grad_norm": 2.0681400299072266, "learning_rate": 0.0005573957059383068, "loss": 3.5515, "step": 52135 }, { "epoch": 3.5426008968609866, "grad_norm": 1.7833133935928345, "learning_rate": 0.0005573532409294742, "loss": 3.3493, "step": 52140 }, { "epoch": 3.542940616931648, "grad_norm": 1.611725926399231, "learning_rate": 0.0005573107759206414, "loss": 3.5068, "step": 52145 }, { "epoch": 3.54328033700231, "grad_norm": 1.5021198987960815, "learning_rate": 0.0005572683109118086, "loss": 3.4024, "step": 52150 }, { "epoch": 3.543620057072972, "grad_norm": 1.9036853313446045, "learning_rate": 0.000557225845902976, "loss": 3.518, "step": 52155 }, { "epoch": 3.5439597771436335, "grad_norm": 2.0233616828918457, "learning_rate": 0.0005571833808941432, "loss": 3.7266, "step": 52160 }, { "epoch": 3.544299497214295, "grad_norm": 1.9346129894256592, "learning_rate": 0.0005571409158853104, "loss": 3.4111, "step": 52165 }, { "epoch": 3.5446392172849572, "grad_norm": 2.1523923873901367, "learning_rate": 0.0005570984508764778, "loss": 3.3031, "step": 52170 }, { "epoch": 3.544978937355619, "grad_norm": 2.0214669704437256, "learning_rate": 0.0005570559858676451, "loss": 3.2442, "step": 52175 }, { "epoch": 3.5453186574262805, "grad_norm": 2.1475830078125, "learning_rate": 0.0005570135208588123, "loss": 3.2924, "step": 52180 }, { "epoch": 3.5456583774969426, "grad_norm": 1.711842656135559, "learning_rate": 0.0005569710558499797, "loss": 3.1462, "step": 52185 }, { "epoch": 3.545998097567604, "grad_norm": 1.5595680475234985, "learning_rate": 0.0005569285908411469, "loss": 3.439, "step": 52190 }, { "epoch": 3.546337817638266, "grad_norm": 1.7752677202224731, "learning_rate": 0.0005568861258323142, "loss": 3.2147, "step": 52195 }, { "epoch": 3.546677537708928, "grad_norm": 2.6288702487945557, "learning_rate": 0.0005568436608234814, "loss": 3.3813, "step": 52200 }, { "epoch": 3.5470172577795895, "grad_norm": 2.3990464210510254, "learning_rate": 0.0005568011958146487, "loss": 3.3419, "step": 52205 }, { "epoch": 3.547356977850251, "grad_norm": 1.8372174501419067, "learning_rate": 0.0005567587308058161, "loss": 3.4961, "step": 52210 }, { "epoch": 3.5476966979209132, "grad_norm": 1.9505499601364136, "learning_rate": 0.0005567162657969833, "loss": 3.4863, "step": 52215 }, { "epoch": 3.548036417991575, "grad_norm": 1.7367585897445679, "learning_rate": 0.0005566738007881506, "loss": 3.46, "step": 52220 }, { "epoch": 3.5483761380622365, "grad_norm": 1.8657593727111816, "learning_rate": 0.0005566313357793179, "loss": 3.3786, "step": 52225 }, { "epoch": 3.5487158581328986, "grad_norm": 2.089179515838623, "learning_rate": 0.0005565888707704851, "loss": 3.5617, "step": 52230 }, { "epoch": 3.54905557820356, "grad_norm": 1.5298203229904175, "learning_rate": 0.0005565464057616524, "loss": 3.4693, "step": 52235 }, { "epoch": 3.549395298274222, "grad_norm": 1.8641834259033203, "learning_rate": 0.0005565039407528197, "loss": 3.4106, "step": 52240 }, { "epoch": 3.549735018344884, "grad_norm": 2.231863260269165, "learning_rate": 0.000556461475743987, "loss": 3.5988, "step": 52245 }, { "epoch": 3.5500747384155455, "grad_norm": 1.7046600580215454, "learning_rate": 0.0005564190107351542, "loss": 3.363, "step": 52250 }, { "epoch": 3.550414458486207, "grad_norm": 1.6025866270065308, "learning_rate": 0.0005563765457263216, "loss": 3.3462, "step": 52255 }, { "epoch": 3.5507541785568693, "grad_norm": 2.289307117462158, "learning_rate": 0.0005563340807174888, "loss": 3.4008, "step": 52260 }, { "epoch": 3.551093898627531, "grad_norm": 1.7680013179779053, "learning_rate": 0.000556291615708656, "loss": 3.3424, "step": 52265 }, { "epoch": 3.5514336186981925, "grad_norm": 1.6992343664169312, "learning_rate": 0.0005562491506998234, "loss": 3.3089, "step": 52270 }, { "epoch": 3.5517733387688546, "grad_norm": 1.464585781097412, "learning_rate": 0.0005562066856909906, "loss": 3.5719, "step": 52275 }, { "epoch": 3.5521130588395162, "grad_norm": 2.268465280532837, "learning_rate": 0.0005561642206821579, "loss": 3.5386, "step": 52280 }, { "epoch": 3.552452778910178, "grad_norm": 2.058347463607788, "learning_rate": 0.0005561217556733253, "loss": 3.5331, "step": 52285 }, { "epoch": 3.55279249898084, "grad_norm": 1.8119367361068726, "learning_rate": 0.0005560792906644925, "loss": 3.5248, "step": 52290 }, { "epoch": 3.5531322190515016, "grad_norm": 2.217541217803955, "learning_rate": 0.0005560368256556597, "loss": 3.7588, "step": 52295 }, { "epoch": 3.553471939122163, "grad_norm": 1.7616069316864014, "learning_rate": 0.000555994360646827, "loss": 3.2441, "step": 52300 }, { "epoch": 3.5538116591928253, "grad_norm": 1.8872971534729004, "learning_rate": 0.0005559518956379943, "loss": 3.6205, "step": 52305 }, { "epoch": 3.554151379263487, "grad_norm": 2.0111923217773438, "learning_rate": 0.0005559094306291615, "loss": 3.3713, "step": 52310 }, { "epoch": 3.5544910993341485, "grad_norm": 1.6598223447799683, "learning_rate": 0.0005558669656203289, "loss": 3.4593, "step": 52315 }, { "epoch": 3.5548308194048106, "grad_norm": 1.8710285425186157, "learning_rate": 0.0005558245006114962, "loss": 3.4838, "step": 52320 }, { "epoch": 3.5551705394754722, "grad_norm": 1.5005764961242676, "learning_rate": 0.0005557820356026634, "loss": 3.5421, "step": 52325 }, { "epoch": 3.555510259546134, "grad_norm": 1.8042360544204712, "learning_rate": 0.0005557395705938307, "loss": 3.4838, "step": 52330 }, { "epoch": 3.555849979616796, "grad_norm": 2.2949821949005127, "learning_rate": 0.000555697105584998, "loss": 3.7143, "step": 52335 }, { "epoch": 3.5561896996874576, "grad_norm": 1.7418149709701538, "learning_rate": 0.0005556546405761652, "loss": 3.2105, "step": 52340 }, { "epoch": 3.556529419758119, "grad_norm": 2.54166841506958, "learning_rate": 0.0005556121755673325, "loss": 3.2502, "step": 52345 }, { "epoch": 3.5568691398287813, "grad_norm": 1.698671817779541, "learning_rate": 0.0005555697105584998, "loss": 3.3003, "step": 52350 }, { "epoch": 3.557208859899443, "grad_norm": 2.255687952041626, "learning_rate": 0.0005555272455496671, "loss": 3.3954, "step": 52355 }, { "epoch": 3.5575485799701045, "grad_norm": 1.6386967897415161, "learning_rate": 0.0005554847805408344, "loss": 3.5291, "step": 52360 }, { "epoch": 3.5578883000407666, "grad_norm": 1.7990524768829346, "learning_rate": 0.0005554423155320016, "loss": 3.3574, "step": 52365 }, { "epoch": 3.5582280201114282, "grad_norm": 1.9530600309371948, "learning_rate": 0.0005553998505231689, "loss": 3.3997, "step": 52370 }, { "epoch": 3.55856774018209, "grad_norm": 1.7075629234313965, "learning_rate": 0.0005553573855143362, "loss": 3.2856, "step": 52375 }, { "epoch": 3.558907460252752, "grad_norm": 1.745565414428711, "learning_rate": 0.0005553149205055034, "loss": 3.423, "step": 52380 }, { "epoch": 3.5592471803234136, "grad_norm": 1.5675907135009766, "learning_rate": 0.0005552724554966708, "loss": 3.3027, "step": 52385 }, { "epoch": 3.559586900394075, "grad_norm": 2.2970595359802246, "learning_rate": 0.0005552299904878381, "loss": 3.3743, "step": 52390 }, { "epoch": 3.5599266204647373, "grad_norm": 2.974139451980591, "learning_rate": 0.0005551875254790053, "loss": 3.2877, "step": 52395 }, { "epoch": 3.560266340535399, "grad_norm": 2.142066240310669, "learning_rate": 0.0005551450604701725, "loss": 3.3495, "step": 52400 }, { "epoch": 3.5606060606060606, "grad_norm": 2.0365500450134277, "learning_rate": 0.0005551025954613399, "loss": 3.3742, "step": 52405 }, { "epoch": 3.5609457806767226, "grad_norm": 1.57725989818573, "learning_rate": 0.0005550601304525071, "loss": 3.5243, "step": 52410 }, { "epoch": 3.5612855007473843, "grad_norm": 1.8563833236694336, "learning_rate": 0.0005550176654436743, "loss": 3.5046, "step": 52415 }, { "epoch": 3.561625220818046, "grad_norm": 2.008147716522217, "learning_rate": 0.0005549752004348418, "loss": 3.4848, "step": 52420 }, { "epoch": 3.561964940888708, "grad_norm": 1.677194356918335, "learning_rate": 0.000554932735426009, "loss": 3.5294, "step": 52425 }, { "epoch": 3.5623046609593696, "grad_norm": 1.7905510663986206, "learning_rate": 0.0005548902704171762, "loss": 3.0979, "step": 52430 }, { "epoch": 3.5626443810300312, "grad_norm": 1.9130877256393433, "learning_rate": 0.0005548478054083436, "loss": 3.6385, "step": 52435 }, { "epoch": 3.5629841011006933, "grad_norm": 2.1342642307281494, "learning_rate": 0.0005548053403995108, "loss": 3.272, "step": 52440 }, { "epoch": 3.563323821171355, "grad_norm": 2.2400147914886475, "learning_rate": 0.000554762875390678, "loss": 3.4388, "step": 52445 }, { "epoch": 3.5636635412420166, "grad_norm": 2.4125823974609375, "learning_rate": 0.0005547204103818455, "loss": 3.3878, "step": 52450 }, { "epoch": 3.564003261312678, "grad_norm": 2.70462703704834, "learning_rate": 0.0005546779453730127, "loss": 3.399, "step": 52455 }, { "epoch": 3.5643429813833403, "grad_norm": 1.938941240310669, "learning_rate": 0.0005546354803641799, "loss": 3.5444, "step": 52460 }, { "epoch": 3.564682701454002, "grad_norm": 1.8517647981643677, "learning_rate": 0.0005545930153553472, "loss": 3.3101, "step": 52465 }, { "epoch": 3.5650224215246635, "grad_norm": 1.9679268598556519, "learning_rate": 0.0005545505503465145, "loss": 3.3499, "step": 52470 }, { "epoch": 3.5653621415953256, "grad_norm": 2.0679264068603516, "learning_rate": 0.0005545080853376817, "loss": 3.4771, "step": 52475 }, { "epoch": 3.5657018616659872, "grad_norm": 2.3786652088165283, "learning_rate": 0.000554465620328849, "loss": 3.633, "step": 52480 }, { "epoch": 3.566041581736649, "grad_norm": 1.8627630472183228, "learning_rate": 0.0005544231553200164, "loss": 3.5593, "step": 52485 }, { "epoch": 3.5663813018073105, "grad_norm": 2.0454702377319336, "learning_rate": 0.0005543806903111836, "loss": 3.513, "step": 52490 }, { "epoch": 3.5667210218779726, "grad_norm": 1.8590103387832642, "learning_rate": 0.0005543382253023509, "loss": 3.7055, "step": 52495 }, { "epoch": 3.567060741948634, "grad_norm": 1.555958867073059, "learning_rate": 0.0005542957602935181, "loss": 3.2875, "step": 52500 }, { "epoch": 3.567400462019296, "grad_norm": 1.7650487422943115, "learning_rate": 0.0005542532952846854, "loss": 3.2782, "step": 52505 }, { "epoch": 3.567740182089958, "grad_norm": 1.393337607383728, "learning_rate": 0.0005542108302758527, "loss": 3.2607, "step": 52510 }, { "epoch": 3.5680799021606195, "grad_norm": 1.8808149099349976, "learning_rate": 0.0005541683652670199, "loss": 3.1564, "step": 52515 }, { "epoch": 3.568419622231281, "grad_norm": 1.8344056606292725, "learning_rate": 0.0005541259002581873, "loss": 3.5249, "step": 52520 }, { "epoch": 3.5687593423019432, "grad_norm": 2.0116519927978516, "learning_rate": 0.0005540834352493546, "loss": 3.7712, "step": 52525 }, { "epoch": 3.569099062372605, "grad_norm": 1.7268234491348267, "learning_rate": 0.0005540409702405218, "loss": 3.4186, "step": 52530 }, { "epoch": 3.5694387824432665, "grad_norm": 2.1797382831573486, "learning_rate": 0.0005539985052316892, "loss": 3.1493, "step": 52535 }, { "epoch": 3.5697785025139286, "grad_norm": 1.9512903690338135, "learning_rate": 0.0005539560402228564, "loss": 3.6599, "step": 52540 }, { "epoch": 3.57011822258459, "grad_norm": 2.229318857192993, "learning_rate": 0.0005539135752140236, "loss": 3.4583, "step": 52545 }, { "epoch": 3.570457942655252, "grad_norm": 2.082435131072998, "learning_rate": 0.0005538711102051909, "loss": 3.5287, "step": 52550 }, { "epoch": 3.570797662725914, "grad_norm": 2.050251007080078, "learning_rate": 0.0005538286451963583, "loss": 3.5007, "step": 52555 }, { "epoch": 3.5711373827965756, "grad_norm": 2.0973973274230957, "learning_rate": 0.0005537861801875255, "loss": 3.4097, "step": 52560 }, { "epoch": 3.571477102867237, "grad_norm": 1.7500768899917603, "learning_rate": 0.0005537437151786928, "loss": 3.3434, "step": 52565 }, { "epoch": 3.5718168229378993, "grad_norm": 2.1193082332611084, "learning_rate": 0.0005537012501698601, "loss": 3.5213, "step": 52570 }, { "epoch": 3.572156543008561, "grad_norm": 1.9414790868759155, "learning_rate": 0.0005536587851610273, "loss": 3.6615, "step": 52575 }, { "epoch": 3.5724962630792225, "grad_norm": 2.067239999771118, "learning_rate": 0.0005536163201521946, "loss": 3.4914, "step": 52580 }, { "epoch": 3.5728359831498846, "grad_norm": 2.02341890335083, "learning_rate": 0.0005535738551433619, "loss": 3.7665, "step": 52585 }, { "epoch": 3.5731757032205462, "grad_norm": 1.975623607635498, "learning_rate": 0.0005535313901345292, "loss": 3.6311, "step": 52590 }, { "epoch": 3.573515423291208, "grad_norm": 2.3367905616760254, "learning_rate": 0.0005534889251256965, "loss": 3.4582, "step": 52595 }, { "epoch": 3.57385514336187, "grad_norm": 1.6768022775650024, "learning_rate": 0.0005534464601168637, "loss": 3.3838, "step": 52600 }, { "epoch": 3.5741948634325316, "grad_norm": 1.3328009843826294, "learning_rate": 0.000553403995108031, "loss": 3.3091, "step": 52605 }, { "epoch": 3.574534583503193, "grad_norm": 2.6363794803619385, "learning_rate": 0.0005533615300991983, "loss": 3.6932, "step": 52610 }, { "epoch": 3.5748743035738553, "grad_norm": 2.5157668590545654, "learning_rate": 0.0005533190650903655, "loss": 3.4852, "step": 52615 }, { "epoch": 3.575214023644517, "grad_norm": 1.724245309829712, "learning_rate": 0.0005532766000815328, "loss": 3.4443, "step": 52620 }, { "epoch": 3.5755537437151785, "grad_norm": 1.8269842863082886, "learning_rate": 0.0005532341350727002, "loss": 3.4825, "step": 52625 }, { "epoch": 3.5758934637858406, "grad_norm": 2.1542463302612305, "learning_rate": 0.0005531916700638674, "loss": 3.5596, "step": 52630 }, { "epoch": 3.5762331838565022, "grad_norm": 2.142245054244995, "learning_rate": 0.0005531492050550347, "loss": 3.2894, "step": 52635 }, { "epoch": 3.576572903927164, "grad_norm": 2.3783023357391357, "learning_rate": 0.000553106740046202, "loss": 3.3101, "step": 52640 }, { "epoch": 3.576912623997826, "grad_norm": 1.8271338939666748, "learning_rate": 0.0005530642750373692, "loss": 3.3253, "step": 52645 }, { "epoch": 3.5772523440684876, "grad_norm": 1.9148919582366943, "learning_rate": 0.0005530218100285364, "loss": 3.7976, "step": 52650 }, { "epoch": 3.577592064139149, "grad_norm": 1.9036058187484741, "learning_rate": 0.0005529793450197038, "loss": 3.3544, "step": 52655 }, { "epoch": 3.5779317842098113, "grad_norm": 1.8700560331344604, "learning_rate": 0.0005529368800108711, "loss": 3.3594, "step": 52660 }, { "epoch": 3.578271504280473, "grad_norm": 1.711146593093872, "learning_rate": 0.0005528944150020383, "loss": 3.3195, "step": 52665 }, { "epoch": 3.5786112243511345, "grad_norm": 1.8058383464813232, "learning_rate": 0.0005528519499932057, "loss": 3.4222, "step": 52670 }, { "epoch": 3.5789509444217966, "grad_norm": 2.0077192783355713, "learning_rate": 0.0005528094849843729, "loss": 3.4314, "step": 52675 }, { "epoch": 3.5792906644924583, "grad_norm": 1.439239740371704, "learning_rate": 0.0005527670199755401, "loss": 3.5304, "step": 52680 }, { "epoch": 3.57963038456312, "grad_norm": 1.777100920677185, "learning_rate": 0.0005527245549667075, "loss": 3.5678, "step": 52685 }, { "epoch": 3.579970104633782, "grad_norm": 1.8739118576049805, "learning_rate": 0.0005526820899578747, "loss": 3.5682, "step": 52690 }, { "epoch": 3.5803098247044436, "grad_norm": 2.220618963241577, "learning_rate": 0.000552639624949042, "loss": 3.2921, "step": 52695 }, { "epoch": 3.580649544775105, "grad_norm": 1.8446080684661865, "learning_rate": 0.0005525971599402093, "loss": 3.7096, "step": 52700 }, { "epoch": 3.5809892648457673, "grad_norm": 1.872796654701233, "learning_rate": 0.0005525546949313766, "loss": 3.4556, "step": 52705 }, { "epoch": 3.581328984916429, "grad_norm": 1.7332967519760132, "learning_rate": 0.0005525122299225438, "loss": 3.4227, "step": 52710 }, { "epoch": 3.5816687049870906, "grad_norm": 1.5592119693756104, "learning_rate": 0.0005524697649137111, "loss": 3.4912, "step": 52715 }, { "epoch": 3.5820084250577526, "grad_norm": 2.218228578567505, "learning_rate": 0.0005524272999048784, "loss": 3.5646, "step": 52720 }, { "epoch": 3.5823481451284143, "grad_norm": 1.9286831617355347, "learning_rate": 0.0005523848348960456, "loss": 3.4809, "step": 52725 }, { "epoch": 3.582687865199076, "grad_norm": 2.068800926208496, "learning_rate": 0.000552342369887213, "loss": 3.3887, "step": 52730 }, { "epoch": 3.583027585269738, "grad_norm": 1.6519434452056885, "learning_rate": 0.0005522999048783803, "loss": 3.4686, "step": 52735 }, { "epoch": 3.5833673053403996, "grad_norm": 2.4680354595184326, "learning_rate": 0.0005522574398695475, "loss": 3.4248, "step": 52740 }, { "epoch": 3.5837070254110612, "grad_norm": 1.534803867340088, "learning_rate": 0.0005522149748607148, "loss": 3.1251, "step": 52745 }, { "epoch": 3.5840467454817233, "grad_norm": 1.5807781219482422, "learning_rate": 0.000552172509851882, "loss": 3.4061, "step": 52750 }, { "epoch": 3.584386465552385, "grad_norm": 1.8521102666854858, "learning_rate": 0.0005521300448430493, "loss": 3.2988, "step": 52755 }, { "epoch": 3.5847261856230466, "grad_norm": 2.2452144622802734, "learning_rate": 0.0005520875798342166, "loss": 3.6595, "step": 52760 }, { "epoch": 3.5850659056937086, "grad_norm": 2.1656692028045654, "learning_rate": 0.0005520451148253839, "loss": 3.462, "step": 52765 }, { "epoch": 3.5854056257643703, "grad_norm": 1.5556365251541138, "learning_rate": 0.0005520026498165512, "loss": 3.4742, "step": 52770 }, { "epoch": 3.585745345835032, "grad_norm": 1.7501078844070435, "learning_rate": 0.0005519601848077185, "loss": 3.4656, "step": 52775 }, { "epoch": 3.586085065905694, "grad_norm": 1.386871099472046, "learning_rate": 0.0005519177197988857, "loss": 3.3994, "step": 52780 }, { "epoch": 3.5864247859763556, "grad_norm": 2.062199831008911, "learning_rate": 0.0005518752547900529, "loss": 3.183, "step": 52785 }, { "epoch": 3.5867645060470172, "grad_norm": 1.7077778577804565, "learning_rate": 0.0005518327897812203, "loss": 3.6208, "step": 52790 }, { "epoch": 3.587104226117679, "grad_norm": 1.9446319341659546, "learning_rate": 0.0005517903247723875, "loss": 3.7011, "step": 52795 }, { "epoch": 3.587443946188341, "grad_norm": 1.4945656061172485, "learning_rate": 0.0005517478597635548, "loss": 3.703, "step": 52800 }, { "epoch": 3.5877836662590026, "grad_norm": 2.101854085922241, "learning_rate": 0.0005517053947547222, "loss": 3.4513, "step": 52805 }, { "epoch": 3.588123386329664, "grad_norm": 2.0931918621063232, "learning_rate": 0.0005516629297458894, "loss": 3.7526, "step": 52810 }, { "epoch": 3.5884631064003263, "grad_norm": 2.133587598800659, "learning_rate": 0.0005516204647370566, "loss": 3.5831, "step": 52815 }, { "epoch": 3.588802826470988, "grad_norm": 2.5074667930603027, "learning_rate": 0.000551577999728224, "loss": 3.427, "step": 52820 }, { "epoch": 3.5891425465416495, "grad_norm": 2.589832067489624, "learning_rate": 0.0005515355347193912, "loss": 3.5024, "step": 52825 }, { "epoch": 3.589482266612311, "grad_norm": 1.8808672428131104, "learning_rate": 0.0005514930697105584, "loss": 3.5742, "step": 52830 }, { "epoch": 3.5898219866829733, "grad_norm": 2.0472607612609863, "learning_rate": 0.0005514506047017259, "loss": 3.5298, "step": 52835 }, { "epoch": 3.590161706753635, "grad_norm": 1.5496289730072021, "learning_rate": 0.0005514081396928931, "loss": 3.6438, "step": 52840 }, { "epoch": 3.5905014268242965, "grad_norm": 1.8670960664749146, "learning_rate": 0.0005513656746840603, "loss": 3.4254, "step": 52845 }, { "epoch": 3.5908411468949586, "grad_norm": 1.8440130949020386, "learning_rate": 0.0005513232096752276, "loss": 3.483, "step": 52850 }, { "epoch": 3.59118086696562, "grad_norm": 2.5794565677642822, "learning_rate": 0.0005512807446663949, "loss": 3.5382, "step": 52855 }, { "epoch": 3.591520587036282, "grad_norm": 1.5751556158065796, "learning_rate": 0.0005512382796575621, "loss": 3.3527, "step": 52860 }, { "epoch": 3.591860307106944, "grad_norm": 2.0799593925476074, "learning_rate": 0.0005511958146487294, "loss": 3.4817, "step": 52865 }, { "epoch": 3.5922000271776056, "grad_norm": 2.326000928878784, "learning_rate": 0.0005511533496398968, "loss": 3.4902, "step": 52870 }, { "epoch": 3.592539747248267, "grad_norm": 1.7688058614730835, "learning_rate": 0.0005511108846310641, "loss": 3.1965, "step": 52875 }, { "epoch": 3.5928794673189293, "grad_norm": 1.8868188858032227, "learning_rate": 0.0005510684196222313, "loss": 3.4224, "step": 52880 }, { "epoch": 3.593219187389591, "grad_norm": 1.7931393384933472, "learning_rate": 0.0005510259546133985, "loss": 3.4946, "step": 52885 }, { "epoch": 3.5935589074602525, "grad_norm": 1.4935193061828613, "learning_rate": 0.0005509834896045659, "loss": 3.2822, "step": 52890 }, { "epoch": 3.5938986275309146, "grad_norm": 1.6135388612747192, "learning_rate": 0.0005509410245957331, "loss": 3.4099, "step": 52895 }, { "epoch": 3.5942383476015762, "grad_norm": 1.742673397064209, "learning_rate": 0.0005508985595869003, "loss": 3.2992, "step": 52900 }, { "epoch": 3.594578067672238, "grad_norm": 1.898037314414978, "learning_rate": 0.0005508560945780678, "loss": 3.4629, "step": 52905 }, { "epoch": 3.5949177877429, "grad_norm": 2.049947500228882, "learning_rate": 0.000550813629569235, "loss": 3.5928, "step": 52910 }, { "epoch": 3.5952575078135616, "grad_norm": 1.9593477249145508, "learning_rate": 0.0005507711645604022, "loss": 3.7411, "step": 52915 }, { "epoch": 3.595597227884223, "grad_norm": 1.5305238962173462, "learning_rate": 0.0005507286995515696, "loss": 3.4681, "step": 52920 }, { "epoch": 3.5959369479548853, "grad_norm": 1.6784719228744507, "learning_rate": 0.0005506862345427368, "loss": 2.9925, "step": 52925 }, { "epoch": 3.596276668025547, "grad_norm": 2.0775225162506104, "learning_rate": 0.000550643769533904, "loss": 3.4278, "step": 52930 }, { "epoch": 3.5966163880962085, "grad_norm": 1.7408491373062134, "learning_rate": 0.0005506013045250713, "loss": 3.2527, "step": 52935 }, { "epoch": 3.5969561081668706, "grad_norm": 1.9254573583602905, "learning_rate": 0.0005505588395162387, "loss": 3.2851, "step": 52940 }, { "epoch": 3.5972958282375322, "grad_norm": 1.904138207435608, "learning_rate": 0.0005505163745074059, "loss": 3.2536, "step": 52945 }, { "epoch": 3.597635548308194, "grad_norm": 1.967831015586853, "learning_rate": 0.0005504739094985732, "loss": 3.535, "step": 52950 }, { "epoch": 3.597975268378856, "grad_norm": 2.154146909713745, "learning_rate": 0.0005504314444897405, "loss": 3.4433, "step": 52955 }, { "epoch": 3.5983149884495176, "grad_norm": 1.9314614534378052, "learning_rate": 0.0005503889794809077, "loss": 3.3895, "step": 52960 }, { "epoch": 3.598654708520179, "grad_norm": 1.613595962524414, "learning_rate": 0.000550346514472075, "loss": 3.4405, "step": 52965 }, { "epoch": 3.5989944285908413, "grad_norm": 1.8953102827072144, "learning_rate": 0.0005503040494632423, "loss": 3.5023, "step": 52970 }, { "epoch": 3.599334148661503, "grad_norm": 1.4728856086730957, "learning_rate": 0.0005502615844544096, "loss": 3.622, "step": 52975 }, { "epoch": 3.5996738687321646, "grad_norm": 1.8940640687942505, "learning_rate": 0.0005502191194455769, "loss": 3.435, "step": 52980 }, { "epoch": 3.6000135888028266, "grad_norm": 2.4099597930908203, "learning_rate": 0.0005501766544367441, "loss": 3.2219, "step": 52985 }, { "epoch": 3.6003533088734883, "grad_norm": 1.8786498308181763, "learning_rate": 0.0005501341894279114, "loss": 3.4697, "step": 52990 }, { "epoch": 3.60069302894415, "grad_norm": 1.7531368732452393, "learning_rate": 0.0005500917244190787, "loss": 3.6484, "step": 52995 }, { "epoch": 3.601032749014812, "grad_norm": 1.5819377899169922, "learning_rate": 0.0005500492594102459, "loss": 3.3075, "step": 53000 }, { "epoch": 3.6013724690854736, "grad_norm": 1.3944185972213745, "learning_rate": 0.0005500067944014132, "loss": 3.4183, "step": 53005 }, { "epoch": 3.6017121891561352, "grad_norm": 1.8376531600952148, "learning_rate": 0.0005499643293925806, "loss": 3.3523, "step": 53010 }, { "epoch": 3.6020519092267973, "grad_norm": 1.7831653356552124, "learning_rate": 0.0005499218643837478, "loss": 3.3782, "step": 53015 }, { "epoch": 3.602391629297459, "grad_norm": 1.8364688158035278, "learning_rate": 0.0005498793993749151, "loss": 3.377, "step": 53020 }, { "epoch": 3.6027313493681206, "grad_norm": 2.1655073165893555, "learning_rate": 0.0005498369343660824, "loss": 3.3407, "step": 53025 }, { "epoch": 3.6030710694387826, "grad_norm": 1.9766308069229126, "learning_rate": 0.0005497944693572496, "loss": 3.3573, "step": 53030 }, { "epoch": 3.6034107895094443, "grad_norm": 1.6355592012405396, "learning_rate": 0.0005497520043484168, "loss": 3.3761, "step": 53035 }, { "epoch": 3.603750509580106, "grad_norm": 1.9879252910614014, "learning_rate": 0.0005497095393395843, "loss": 3.3225, "step": 53040 }, { "epoch": 3.604090229650768, "grad_norm": 2.0176854133605957, "learning_rate": 0.0005496670743307515, "loss": 3.395, "step": 53045 }, { "epoch": 3.6044299497214296, "grad_norm": 2.1952919960021973, "learning_rate": 0.0005496246093219187, "loss": 3.3804, "step": 53050 }, { "epoch": 3.6047696697920912, "grad_norm": 1.5101243257522583, "learning_rate": 0.0005495821443130861, "loss": 3.4712, "step": 53055 }, { "epoch": 3.6051093898627533, "grad_norm": 1.729300856590271, "learning_rate": 0.0005495396793042533, "loss": 3.516, "step": 53060 }, { "epoch": 3.605449109933415, "grad_norm": 2.126330614089966, "learning_rate": 0.0005494972142954205, "loss": 3.4726, "step": 53065 }, { "epoch": 3.6057888300040766, "grad_norm": 1.2944904565811157, "learning_rate": 0.0005494547492865879, "loss": 3.4474, "step": 53070 }, { "epoch": 3.6061285500747386, "grad_norm": 2.254160165786743, "learning_rate": 0.0005494122842777552, "loss": 3.4097, "step": 53075 }, { "epoch": 3.6064682701454003, "grad_norm": 1.881441354751587, "learning_rate": 0.0005493698192689224, "loss": 3.427, "step": 53080 }, { "epoch": 3.606807990216062, "grad_norm": 1.96063232421875, "learning_rate": 0.0005493273542600897, "loss": 3.2562, "step": 53085 }, { "epoch": 3.607147710286724, "grad_norm": 1.9577888250350952, "learning_rate": 0.000549284889251257, "loss": 3.3134, "step": 53090 }, { "epoch": 3.6074874303573856, "grad_norm": 1.4115610122680664, "learning_rate": 0.0005492424242424242, "loss": 3.3686, "step": 53095 }, { "epoch": 3.6078271504280472, "grad_norm": 1.7200084924697876, "learning_rate": 0.0005491999592335915, "loss": 3.5302, "step": 53100 }, { "epoch": 3.6081668704987093, "grad_norm": 1.5367457866668701, "learning_rate": 0.0005491574942247588, "loss": 3.5039, "step": 53105 }, { "epoch": 3.608506590569371, "grad_norm": 1.8037422895431519, "learning_rate": 0.0005491150292159261, "loss": 3.2051, "step": 53110 }, { "epoch": 3.6088463106400326, "grad_norm": 1.7725355625152588, "learning_rate": 0.0005490725642070934, "loss": 3.4988, "step": 53115 }, { "epoch": 3.6091860307106947, "grad_norm": 1.975118637084961, "learning_rate": 0.0005490300991982607, "loss": 3.5714, "step": 53120 }, { "epoch": 3.6095257507813563, "grad_norm": 2.2091453075408936, "learning_rate": 0.0005489876341894279, "loss": 3.63, "step": 53125 }, { "epoch": 3.609865470852018, "grad_norm": 2.1977477073669434, "learning_rate": 0.0005489451691805952, "loss": 3.6192, "step": 53130 }, { "epoch": 3.6102051909226796, "grad_norm": 1.8513799905776978, "learning_rate": 0.0005489027041717624, "loss": 3.688, "step": 53135 }, { "epoch": 3.6105449109933416, "grad_norm": 1.9214882850646973, "learning_rate": 0.0005488602391629297, "loss": 3.3137, "step": 53140 }, { "epoch": 3.6108846310640033, "grad_norm": 1.958956003189087, "learning_rate": 0.0005488177741540971, "loss": 3.333, "step": 53145 }, { "epoch": 3.611224351134665, "grad_norm": 1.9458521604537964, "learning_rate": 0.0005487753091452643, "loss": 3.5832, "step": 53150 }, { "epoch": 3.611564071205327, "grad_norm": 1.8535946607589722, "learning_rate": 0.0005487328441364316, "loss": 3.4209, "step": 53155 }, { "epoch": 3.6119037912759886, "grad_norm": 2.1469056606292725, "learning_rate": 0.0005486903791275989, "loss": 3.3265, "step": 53160 }, { "epoch": 3.6122435113466502, "grad_norm": 2.039844512939453, "learning_rate": 0.0005486479141187661, "loss": 3.6022, "step": 53165 }, { "epoch": 3.612583231417312, "grad_norm": 1.4110878705978394, "learning_rate": 0.0005486054491099333, "loss": 3.3065, "step": 53170 }, { "epoch": 3.612922951487974, "grad_norm": 2.1442294120788574, "learning_rate": 0.0005485629841011007, "loss": 3.3121, "step": 53175 }, { "epoch": 3.6132626715586356, "grad_norm": 1.869667887687683, "learning_rate": 0.000548520519092268, "loss": 3.4238, "step": 53180 }, { "epoch": 3.613602391629297, "grad_norm": 2.4349114894866943, "learning_rate": 0.0005484780540834352, "loss": 3.2551, "step": 53185 }, { "epoch": 3.6139421116999593, "grad_norm": 2.488600492477417, "learning_rate": 0.0005484355890746026, "loss": 3.4922, "step": 53190 }, { "epoch": 3.614281831770621, "grad_norm": 1.8827769756317139, "learning_rate": 0.0005483931240657698, "loss": 3.5, "step": 53195 }, { "epoch": 3.6146215518412825, "grad_norm": 2.3601186275482178, "learning_rate": 0.000548350659056937, "loss": 3.5323, "step": 53200 }, { "epoch": 3.6149612719119446, "grad_norm": 2.2068824768066406, "learning_rate": 0.0005483081940481044, "loss": 3.3143, "step": 53205 }, { "epoch": 3.6153009919826062, "grad_norm": 1.8890464305877686, "learning_rate": 0.0005482657290392716, "loss": 3.6629, "step": 53210 }, { "epoch": 3.615640712053268, "grad_norm": 2.215579032897949, "learning_rate": 0.000548223264030439, "loss": 3.7277, "step": 53215 }, { "epoch": 3.61598043212393, "grad_norm": 1.7892365455627441, "learning_rate": 0.0005481807990216063, "loss": 3.4489, "step": 53220 }, { "epoch": 3.6163201521945916, "grad_norm": 1.8626689910888672, "learning_rate": 0.0005481383340127735, "loss": 3.2626, "step": 53225 }, { "epoch": 3.616659872265253, "grad_norm": 1.7100642919540405, "learning_rate": 0.0005480958690039408, "loss": 3.4355, "step": 53230 }, { "epoch": 3.6169995923359153, "grad_norm": 1.7011083364486694, "learning_rate": 0.000548053403995108, "loss": 3.4984, "step": 53235 }, { "epoch": 3.617339312406577, "grad_norm": 2.0638468265533447, "learning_rate": 0.0005480109389862753, "loss": 3.5889, "step": 53240 }, { "epoch": 3.6176790324772385, "grad_norm": 2.791163206100464, "learning_rate": 0.0005479684739774426, "loss": 3.3698, "step": 53245 }, { "epoch": 3.6180187525479006, "grad_norm": 2.2863097190856934, "learning_rate": 0.0005479260089686099, "loss": 3.5052, "step": 53250 }, { "epoch": 3.6183584726185622, "grad_norm": 2.2264060974121094, "learning_rate": 0.0005478835439597772, "loss": 3.5303, "step": 53255 }, { "epoch": 3.618698192689224, "grad_norm": 1.8294191360473633, "learning_rate": 0.0005478410789509445, "loss": 3.424, "step": 53260 }, { "epoch": 3.619037912759886, "grad_norm": 1.879436731338501, "learning_rate": 0.0005477986139421117, "loss": 3.4231, "step": 53265 }, { "epoch": 3.6193776328305476, "grad_norm": 2.3295443058013916, "learning_rate": 0.000547756148933279, "loss": 3.2715, "step": 53270 }, { "epoch": 3.619717352901209, "grad_norm": 1.753043532371521, "learning_rate": 0.0005477136839244463, "loss": 3.3584, "step": 53275 }, { "epoch": 3.6200570729718713, "grad_norm": 1.9067312479019165, "learning_rate": 0.0005476712189156135, "loss": 3.6577, "step": 53280 }, { "epoch": 3.620396793042533, "grad_norm": 2.069767951965332, "learning_rate": 0.0005476287539067808, "loss": 3.3425, "step": 53285 }, { "epoch": 3.6207365131131946, "grad_norm": 1.6434533596038818, "learning_rate": 0.0005475862888979482, "loss": 3.4261, "step": 53290 }, { "epoch": 3.6210762331838566, "grad_norm": 2.1749870777130127, "learning_rate": 0.0005475438238891154, "loss": 3.3595, "step": 53295 }, { "epoch": 3.6214159532545183, "grad_norm": 1.9224485158920288, "learning_rate": 0.0005475013588802826, "loss": 3.4631, "step": 53300 }, { "epoch": 3.62175567332518, "grad_norm": 1.9714959859848022, "learning_rate": 0.00054745889387145, "loss": 3.4981, "step": 53305 }, { "epoch": 3.622095393395842, "grad_norm": 1.8908677101135254, "learning_rate": 0.0005474164288626172, "loss": 3.4748, "step": 53310 }, { "epoch": 3.6224351134665036, "grad_norm": 2.2043087482452393, "learning_rate": 0.0005473739638537844, "loss": 3.4606, "step": 53315 }, { "epoch": 3.6227748335371652, "grad_norm": 2.5422885417938232, "learning_rate": 0.0005473314988449519, "loss": 3.1635, "step": 53320 }, { "epoch": 3.6231145536078273, "grad_norm": 1.6218810081481934, "learning_rate": 0.0005472890338361191, "loss": 3.4275, "step": 53325 }, { "epoch": 3.623454273678489, "grad_norm": 2.0730843544006348, "learning_rate": 0.0005472465688272863, "loss": 3.5238, "step": 53330 }, { "epoch": 3.6237939937491506, "grad_norm": 1.8861808776855469, "learning_rate": 0.0005472041038184536, "loss": 3.3966, "step": 53335 }, { "epoch": 3.6241337138198126, "grad_norm": 1.8581992387771606, "learning_rate": 0.0005471616388096209, "loss": 3.5678, "step": 53340 }, { "epoch": 3.6244734338904743, "grad_norm": 2.2424509525299072, "learning_rate": 0.0005471191738007881, "loss": 3.2668, "step": 53345 }, { "epoch": 3.624813153961136, "grad_norm": 1.9102420806884766, "learning_rate": 0.0005470767087919554, "loss": 3.1841, "step": 53350 }, { "epoch": 3.625152874031798, "grad_norm": 1.5751029253005981, "learning_rate": 0.0005470342437831228, "loss": 3.4081, "step": 53355 }, { "epoch": 3.6254925941024596, "grad_norm": 1.9948062896728516, "learning_rate": 0.00054699177877429, "loss": 3.6546, "step": 53360 }, { "epoch": 3.6258323141731212, "grad_norm": 1.4717158079147339, "learning_rate": 0.0005469493137654573, "loss": 3.293, "step": 53365 }, { "epoch": 3.6261720342437833, "grad_norm": 1.6855881214141846, "learning_rate": 0.0005469068487566246, "loss": 3.5408, "step": 53370 }, { "epoch": 3.626511754314445, "grad_norm": 1.8941079378128052, "learning_rate": 0.0005468643837477918, "loss": 3.2529, "step": 53375 }, { "epoch": 3.6268514743851066, "grad_norm": 2.153730869293213, "learning_rate": 0.0005468219187389591, "loss": 3.6869, "step": 53380 }, { "epoch": 3.6271911944557687, "grad_norm": 1.4439630508422852, "learning_rate": 0.0005467794537301263, "loss": 3.5555, "step": 53385 }, { "epoch": 3.6275309145264303, "grad_norm": 2.13800048828125, "learning_rate": 0.0005467369887212937, "loss": 3.3459, "step": 53390 }, { "epoch": 3.627870634597092, "grad_norm": 2.1924571990966797, "learning_rate": 0.000546694523712461, "loss": 3.3856, "step": 53395 }, { "epoch": 3.628210354667754, "grad_norm": 2.0612361431121826, "learning_rate": 0.0005466520587036282, "loss": 3.3355, "step": 53400 }, { "epoch": 3.6285500747384156, "grad_norm": 2.1632096767425537, "learning_rate": 0.0005466095936947955, "loss": 3.5198, "step": 53405 }, { "epoch": 3.6288897948090773, "grad_norm": 1.8716487884521484, "learning_rate": 0.0005465671286859628, "loss": 3.7059, "step": 53410 }, { "epoch": 3.6292295148797393, "grad_norm": 1.8378849029541016, "learning_rate": 0.00054652466367713, "loss": 3.2645, "step": 53415 }, { "epoch": 3.629569234950401, "grad_norm": 1.7468448877334595, "learning_rate": 0.0005464821986682972, "loss": 3.2773, "step": 53420 }, { "epoch": 3.6299089550210626, "grad_norm": 1.6194730997085571, "learning_rate": 0.0005464397336594647, "loss": 3.5171, "step": 53425 }, { "epoch": 3.6302486750917247, "grad_norm": 2.1489098072052, "learning_rate": 0.0005463972686506319, "loss": 3.5252, "step": 53430 }, { "epoch": 3.6305883951623863, "grad_norm": 1.8192102909088135, "learning_rate": 0.0005463548036417991, "loss": 3.4674, "step": 53435 }, { "epoch": 3.630928115233048, "grad_norm": 2.1002745628356934, "learning_rate": 0.0005463123386329665, "loss": 3.59, "step": 53440 }, { "epoch": 3.63126783530371, "grad_norm": 1.745073914527893, "learning_rate": 0.0005462698736241337, "loss": 3.3462, "step": 53445 }, { "epoch": 3.6316075553743716, "grad_norm": 1.4709906578063965, "learning_rate": 0.0005462274086153009, "loss": 3.4842, "step": 53450 }, { "epoch": 3.6319472754450333, "grad_norm": 1.6148898601531982, "learning_rate": 0.0005461849436064683, "loss": 3.5281, "step": 53455 }, { "epoch": 3.6322869955156953, "grad_norm": 2.496788740158081, "learning_rate": 0.0005461424785976356, "loss": 3.232, "step": 53460 }, { "epoch": 3.632626715586357, "grad_norm": 1.930327296257019, "learning_rate": 0.0005461000135888028, "loss": 3.4633, "step": 53465 }, { "epoch": 3.6329664356570186, "grad_norm": 1.5352085828781128, "learning_rate": 0.0005460575485799702, "loss": 3.421, "step": 53470 }, { "epoch": 3.6333061557276802, "grad_norm": 2.1009037494659424, "learning_rate": 0.0005460150835711374, "loss": 3.447, "step": 53475 }, { "epoch": 3.6336458757983423, "grad_norm": 2.1993072032928467, "learning_rate": 0.0005459726185623046, "loss": 3.4382, "step": 53480 }, { "epoch": 3.633985595869004, "grad_norm": 1.8158808946609497, "learning_rate": 0.0005459301535534719, "loss": 3.5049, "step": 53485 }, { "epoch": 3.6343253159396656, "grad_norm": 2.2504963874816895, "learning_rate": 0.0005458876885446392, "loss": 3.4404, "step": 53490 }, { "epoch": 3.6346650360103276, "grad_norm": 1.876746654510498, "learning_rate": 0.0005458452235358065, "loss": 3.5138, "step": 53495 }, { "epoch": 3.6350047560809893, "grad_norm": 1.3994519710540771, "learning_rate": 0.0005458027585269738, "loss": 3.4789, "step": 53500 }, { "epoch": 3.635344476151651, "grad_norm": 1.9338151216506958, "learning_rate": 0.0005457602935181411, "loss": 3.365, "step": 53505 }, { "epoch": 3.635684196222313, "grad_norm": 1.5603091716766357, "learning_rate": 0.0005457178285093083, "loss": 3.4502, "step": 53510 }, { "epoch": 3.6360239162929746, "grad_norm": 1.9084718227386475, "learning_rate": 0.0005456753635004756, "loss": 3.4355, "step": 53515 }, { "epoch": 3.6363636363636362, "grad_norm": 1.5945464372634888, "learning_rate": 0.0005456328984916428, "loss": 3.6556, "step": 53520 }, { "epoch": 3.636703356434298, "grad_norm": 3.331761121749878, "learning_rate": 0.0005455904334828101, "loss": 3.2889, "step": 53525 }, { "epoch": 3.63704307650496, "grad_norm": 1.8996981382369995, "learning_rate": 0.0005455479684739775, "loss": 3.7192, "step": 53530 }, { "epoch": 3.6373827965756216, "grad_norm": 1.5773521661758423, "learning_rate": 0.0005455055034651447, "loss": 3.5751, "step": 53535 }, { "epoch": 3.637722516646283, "grad_norm": 2.2859930992126465, "learning_rate": 0.000545463038456312, "loss": 3.5774, "step": 53540 }, { "epoch": 3.6380622367169453, "grad_norm": 1.5912057161331177, "learning_rate": 0.0005454205734474793, "loss": 3.4758, "step": 53545 }, { "epoch": 3.638401956787607, "grad_norm": 1.9591286182403564, "learning_rate": 0.0005453781084386465, "loss": 3.5488, "step": 53550 }, { "epoch": 3.6387416768582685, "grad_norm": 1.8424758911132812, "learning_rate": 0.0005453356434298139, "loss": 3.7024, "step": 53555 }, { "epoch": 3.6390813969289306, "grad_norm": 2.182664394378662, "learning_rate": 0.0005452931784209811, "loss": 3.2085, "step": 53560 }, { "epoch": 3.6394211169995923, "grad_norm": 1.9774582386016846, "learning_rate": 0.0005452507134121484, "loss": 3.539, "step": 53565 }, { "epoch": 3.639760837070254, "grad_norm": 1.9388662576675415, "learning_rate": 0.0005452082484033158, "loss": 3.4217, "step": 53570 }, { "epoch": 3.640100557140916, "grad_norm": 2.1377108097076416, "learning_rate": 0.000545165783394483, "loss": 3.6915, "step": 53575 }, { "epoch": 3.6404402772115776, "grad_norm": 1.6715341806411743, "learning_rate": 0.0005451233183856502, "loss": 3.5265, "step": 53580 }, { "epoch": 3.6407799972822392, "grad_norm": 1.612004280090332, "learning_rate": 0.0005450808533768175, "loss": 3.2388, "step": 53585 }, { "epoch": 3.6411197173529013, "grad_norm": 1.9101146459579468, "learning_rate": 0.0005450383883679848, "loss": 3.7092, "step": 53590 }, { "epoch": 3.641459437423563, "grad_norm": 2.138204574584961, "learning_rate": 0.000544995923359152, "loss": 3.3634, "step": 53595 }, { "epoch": 3.6417991574942246, "grad_norm": 2.2646117210388184, "learning_rate": 0.0005449534583503194, "loss": 3.553, "step": 53600 }, { "epoch": 3.6421388775648866, "grad_norm": 1.638167381286621, "learning_rate": 0.0005449109933414867, "loss": 3.4603, "step": 53605 }, { "epoch": 3.6424785976355483, "grad_norm": 1.8690898418426514, "learning_rate": 0.0005448685283326539, "loss": 3.4, "step": 53610 }, { "epoch": 3.64281831770621, "grad_norm": 2.013385057449341, "learning_rate": 0.0005448260633238212, "loss": 3.5491, "step": 53615 }, { "epoch": 3.643158037776872, "grad_norm": 2.027219772338867, "learning_rate": 0.0005447835983149884, "loss": 3.494, "step": 53620 }, { "epoch": 3.6434977578475336, "grad_norm": 1.8726073503494263, "learning_rate": 0.0005447411333061557, "loss": 3.6232, "step": 53625 }, { "epoch": 3.6438374779181952, "grad_norm": 1.9094053506851196, "learning_rate": 0.0005446986682973231, "loss": 3.6631, "step": 53630 }, { "epoch": 3.6441771979888573, "grad_norm": 1.9433270692825317, "learning_rate": 0.0005446562032884903, "loss": 3.2924, "step": 53635 }, { "epoch": 3.644516918059519, "grad_norm": 1.9990684986114502, "learning_rate": 0.0005446137382796576, "loss": 3.6242, "step": 53640 }, { "epoch": 3.6448566381301806, "grad_norm": 1.9924262762069702, "learning_rate": 0.0005445712732708249, "loss": 3.2718, "step": 53645 }, { "epoch": 3.6451963582008426, "grad_norm": 1.7953916788101196, "learning_rate": 0.0005445288082619921, "loss": 3.4266, "step": 53650 }, { "epoch": 3.6455360782715043, "grad_norm": 2.1739730834960938, "learning_rate": 0.0005444863432531594, "loss": 3.4747, "step": 53655 }, { "epoch": 3.645875798342166, "grad_norm": 2.1459126472473145, "learning_rate": 0.0005444438782443267, "loss": 3.5991, "step": 53660 }, { "epoch": 3.646215518412828, "grad_norm": 1.7039035558700562, "learning_rate": 0.000544401413235494, "loss": 3.4495, "step": 53665 }, { "epoch": 3.6465552384834896, "grad_norm": 1.8222616910934448, "learning_rate": 0.0005443589482266612, "loss": 3.5342, "step": 53670 }, { "epoch": 3.6468949585541512, "grad_norm": 2.4193167686462402, "learning_rate": 0.0005443164832178286, "loss": 3.3948, "step": 53675 }, { "epoch": 3.6472346786248133, "grad_norm": 1.9119058847427368, "learning_rate": 0.0005442740182089958, "loss": 3.3357, "step": 53680 }, { "epoch": 3.647574398695475, "grad_norm": 1.6431210041046143, "learning_rate": 0.000544231553200163, "loss": 3.5011, "step": 53685 }, { "epoch": 3.6479141187661366, "grad_norm": 1.8385605812072754, "learning_rate": 0.0005441890881913304, "loss": 3.478, "step": 53690 }, { "epoch": 3.6482538388367987, "grad_norm": 2.0022435188293457, "learning_rate": 0.0005441466231824976, "loss": 3.48, "step": 53695 }, { "epoch": 3.6485935589074603, "grad_norm": 1.8014887571334839, "learning_rate": 0.0005441041581736649, "loss": 3.3972, "step": 53700 }, { "epoch": 3.648933278978122, "grad_norm": 2.5363433361053467, "learning_rate": 0.0005440616931648323, "loss": 3.519, "step": 53705 }, { "epoch": 3.649272999048784, "grad_norm": 2.0361733436584473, "learning_rate": 0.0005440192281559995, "loss": 3.5712, "step": 53710 }, { "epoch": 3.6496127191194456, "grad_norm": 1.5144215822219849, "learning_rate": 0.0005439767631471667, "loss": 3.6448, "step": 53715 }, { "epoch": 3.6499524391901073, "grad_norm": 1.8494157791137695, "learning_rate": 0.000543934298138334, "loss": 3.5192, "step": 53720 }, { "epoch": 3.6502921592607693, "grad_norm": 1.7853107452392578, "learning_rate": 0.0005438918331295013, "loss": 3.3808, "step": 53725 }, { "epoch": 3.650631879331431, "grad_norm": 2.5315847396850586, "learning_rate": 0.0005438493681206685, "loss": 3.5524, "step": 53730 }, { "epoch": 3.6509715994020926, "grad_norm": 2.1005985736846924, "learning_rate": 0.0005438069031118359, "loss": 3.4007, "step": 53735 }, { "epoch": 3.6513113194727547, "grad_norm": 2.146141767501831, "learning_rate": 0.0005437644381030032, "loss": 3.3008, "step": 53740 }, { "epoch": 3.6516510395434163, "grad_norm": 2.17653751373291, "learning_rate": 0.0005437219730941704, "loss": 3.2157, "step": 53745 }, { "epoch": 3.651990759614078, "grad_norm": 1.5294163227081299, "learning_rate": 0.0005436795080853377, "loss": 3.4818, "step": 53750 }, { "epoch": 3.65233047968474, "grad_norm": 1.5559446811676025, "learning_rate": 0.000543637043076505, "loss": 3.6222, "step": 53755 }, { "epoch": 3.6526701997554016, "grad_norm": 2.00982928276062, "learning_rate": 0.0005435945780676722, "loss": 3.3871, "step": 53760 }, { "epoch": 3.6530099198260633, "grad_norm": 1.8932557106018066, "learning_rate": 0.0005435521130588395, "loss": 3.431, "step": 53765 }, { "epoch": 3.6533496398967253, "grad_norm": 2.2350473403930664, "learning_rate": 0.0005435096480500068, "loss": 3.3146, "step": 53770 }, { "epoch": 3.653689359967387, "grad_norm": 1.5207518339157104, "learning_rate": 0.0005434671830411741, "loss": 3.5358, "step": 53775 }, { "epoch": 3.6540290800380486, "grad_norm": 1.8194490671157837, "learning_rate": 0.0005434247180323414, "loss": 3.482, "step": 53780 }, { "epoch": 3.6543688001087107, "grad_norm": 1.689945936203003, "learning_rate": 0.0005433822530235086, "loss": 3.4156, "step": 53785 }, { "epoch": 3.6547085201793723, "grad_norm": 2.2138662338256836, "learning_rate": 0.0005433397880146759, "loss": 3.4955, "step": 53790 }, { "epoch": 3.655048240250034, "grad_norm": 1.839550495147705, "learning_rate": 0.0005432973230058432, "loss": 3.4775, "step": 53795 }, { "epoch": 3.655387960320696, "grad_norm": 1.7429893016815186, "learning_rate": 0.0005432548579970104, "loss": 3.4515, "step": 53800 }, { "epoch": 3.6557276803913576, "grad_norm": 2.6166317462921143, "learning_rate": 0.0005432123929881778, "loss": 3.5973, "step": 53805 }, { "epoch": 3.6560674004620193, "grad_norm": 2.170365333557129, "learning_rate": 0.0005431699279793451, "loss": 3.3784, "step": 53810 }, { "epoch": 3.656407120532681, "grad_norm": 1.6436301469802856, "learning_rate": 0.0005431274629705123, "loss": 3.5666, "step": 53815 }, { "epoch": 3.656746840603343, "grad_norm": 1.7555209398269653, "learning_rate": 0.0005430849979616795, "loss": 3.6118, "step": 53820 }, { "epoch": 3.6570865606740046, "grad_norm": 2.5461063385009766, "learning_rate": 0.0005430425329528469, "loss": 3.563, "step": 53825 }, { "epoch": 3.6574262807446662, "grad_norm": 2.3068039417266846, "learning_rate": 0.0005430000679440141, "loss": 3.2609, "step": 53830 }, { "epoch": 3.6577660008153283, "grad_norm": 1.9939697980880737, "learning_rate": 0.0005429576029351813, "loss": 3.0431, "step": 53835 }, { "epoch": 3.65810572088599, "grad_norm": 2.541900873184204, "learning_rate": 0.0005429151379263488, "loss": 3.1928, "step": 53840 }, { "epoch": 3.6584454409566516, "grad_norm": 2.3987720012664795, "learning_rate": 0.000542872672917516, "loss": 3.4092, "step": 53845 }, { "epoch": 3.6587851610273137, "grad_norm": 1.5865113735198975, "learning_rate": 0.0005428302079086832, "loss": 3.5335, "step": 53850 }, { "epoch": 3.6591248810979753, "grad_norm": 1.990910530090332, "learning_rate": 0.0005427877428998506, "loss": 3.479, "step": 53855 }, { "epoch": 3.659464601168637, "grad_norm": 1.891648769378662, "learning_rate": 0.0005427452778910178, "loss": 3.4418, "step": 53860 }, { "epoch": 3.6598043212392986, "grad_norm": 1.547203779220581, "learning_rate": 0.000542702812882185, "loss": 3.197, "step": 53865 }, { "epoch": 3.6601440413099606, "grad_norm": 1.624226450920105, "learning_rate": 0.0005426603478733523, "loss": 3.3415, "step": 53870 }, { "epoch": 3.6604837613806223, "grad_norm": 1.550350546836853, "learning_rate": 0.0005426178828645197, "loss": 3.386, "step": 53875 }, { "epoch": 3.660823481451284, "grad_norm": 1.5987650156021118, "learning_rate": 0.0005425754178556869, "loss": 3.3693, "step": 53880 }, { "epoch": 3.661163201521946, "grad_norm": 1.6752468347549438, "learning_rate": 0.0005425329528468542, "loss": 3.2528, "step": 53885 }, { "epoch": 3.6615029215926076, "grad_norm": 1.759910225868225, "learning_rate": 0.0005424904878380215, "loss": 3.4217, "step": 53890 }, { "epoch": 3.6618426416632692, "grad_norm": 2.025421619415283, "learning_rate": 0.0005424480228291888, "loss": 3.5871, "step": 53895 }, { "epoch": 3.6621823617339313, "grad_norm": 1.835355281829834, "learning_rate": 0.000542405557820356, "loss": 3.3948, "step": 53900 }, { "epoch": 3.662522081804593, "grad_norm": 1.5701571702957153, "learning_rate": 0.0005423630928115232, "loss": 3.5524, "step": 53905 }, { "epoch": 3.6628618018752546, "grad_norm": 1.826490044593811, "learning_rate": 0.0005423206278026907, "loss": 3.4597, "step": 53910 }, { "epoch": 3.6632015219459166, "grad_norm": 2.0471792221069336, "learning_rate": 0.0005422781627938579, "loss": 3.3183, "step": 53915 }, { "epoch": 3.6635412420165783, "grad_norm": 1.9112790822982788, "learning_rate": 0.0005422356977850251, "loss": 3.4854, "step": 53920 }, { "epoch": 3.66388096208724, "grad_norm": 1.901716947555542, "learning_rate": 0.0005421932327761925, "loss": 3.5558, "step": 53925 }, { "epoch": 3.664220682157902, "grad_norm": 1.6255884170532227, "learning_rate": 0.0005421507677673597, "loss": 3.5982, "step": 53930 }, { "epoch": 3.6645604022285636, "grad_norm": 1.7617815732955933, "learning_rate": 0.0005421083027585269, "loss": 3.5027, "step": 53935 }, { "epoch": 3.6649001222992252, "grad_norm": 1.5772619247436523, "learning_rate": 0.0005420658377496943, "loss": 3.4322, "step": 53940 }, { "epoch": 3.6652398423698873, "grad_norm": 1.9720221757888794, "learning_rate": 0.0005420233727408616, "loss": 3.3025, "step": 53945 }, { "epoch": 3.665579562440549, "grad_norm": 1.956626057624817, "learning_rate": 0.0005419809077320288, "loss": 3.3417, "step": 53950 }, { "epoch": 3.6659192825112106, "grad_norm": 2.179570198059082, "learning_rate": 0.0005419384427231962, "loss": 3.6651, "step": 53955 }, { "epoch": 3.6662590025818727, "grad_norm": 2.003660202026367, "learning_rate": 0.0005418959777143634, "loss": 3.4611, "step": 53960 }, { "epoch": 3.6665987226525343, "grad_norm": 2.1508402824401855, "learning_rate": 0.0005418535127055306, "loss": 3.4855, "step": 53965 }, { "epoch": 3.666938442723196, "grad_norm": 2.51267409324646, "learning_rate": 0.0005418110476966979, "loss": 3.5196, "step": 53970 }, { "epoch": 3.667278162793858, "grad_norm": 1.993864893913269, "learning_rate": 0.0005417685826878652, "loss": 3.8334, "step": 53975 }, { "epoch": 3.6676178828645196, "grad_norm": 1.57415771484375, "learning_rate": 0.0005417261176790325, "loss": 3.5765, "step": 53980 }, { "epoch": 3.6679576029351812, "grad_norm": 1.8218576908111572, "learning_rate": 0.0005416836526701998, "loss": 3.5659, "step": 53985 }, { "epoch": 3.6682973230058433, "grad_norm": 2.0407400131225586, "learning_rate": 0.0005416411876613671, "loss": 3.1639, "step": 53990 }, { "epoch": 3.668637043076505, "grad_norm": 2.0537617206573486, "learning_rate": 0.0005415987226525343, "loss": 3.5236, "step": 53995 }, { "epoch": 3.6689767631471666, "grad_norm": 2.297276258468628, "learning_rate": 0.0005415562576437016, "loss": 3.5523, "step": 54000 }, { "epoch": 3.6693164832178287, "grad_norm": 1.7159141302108765, "learning_rate": 0.0005415137926348689, "loss": 3.5195, "step": 54005 }, { "epoch": 3.6696562032884903, "grad_norm": 1.5001583099365234, "learning_rate": 0.0005414713276260361, "loss": 3.3901, "step": 54010 }, { "epoch": 3.669995923359152, "grad_norm": 1.5518927574157715, "learning_rate": 0.0005414288626172035, "loss": 3.4743, "step": 54015 }, { "epoch": 3.670335643429814, "grad_norm": 1.5088250637054443, "learning_rate": 0.0005413863976083707, "loss": 3.4553, "step": 54020 }, { "epoch": 3.6706753635004756, "grad_norm": 1.9603084325790405, "learning_rate": 0.000541343932599538, "loss": 3.4544, "step": 54025 }, { "epoch": 3.6710150835711373, "grad_norm": 1.7692769765853882, "learning_rate": 0.0005413014675907053, "loss": 3.5166, "step": 54030 }, { "epoch": 3.6713548036417993, "grad_norm": 1.7936420440673828, "learning_rate": 0.0005412590025818725, "loss": 3.405, "step": 54035 }, { "epoch": 3.671694523712461, "grad_norm": 1.540076494216919, "learning_rate": 0.0005412165375730398, "loss": 3.706, "step": 54040 }, { "epoch": 3.6720342437831226, "grad_norm": 1.8455249071121216, "learning_rate": 0.0005411740725642071, "loss": 3.5202, "step": 54045 }, { "epoch": 3.6723739638537847, "grad_norm": 1.6159545183181763, "learning_rate": 0.0005411316075553744, "loss": 3.5121, "step": 54050 }, { "epoch": 3.6727136839244463, "grad_norm": 1.956270694732666, "learning_rate": 0.0005410891425465417, "loss": 3.4882, "step": 54055 }, { "epoch": 3.673053403995108, "grad_norm": 2.0444283485412598, "learning_rate": 0.000541046677537709, "loss": 3.5892, "step": 54060 }, { "epoch": 3.67339312406577, "grad_norm": 1.7389181852340698, "learning_rate": 0.0005410042125288762, "loss": 3.3405, "step": 54065 }, { "epoch": 3.6737328441364316, "grad_norm": 2.184706211090088, "learning_rate": 0.0005409617475200434, "loss": 3.5881, "step": 54070 }, { "epoch": 3.6740725642070933, "grad_norm": 1.7405126094818115, "learning_rate": 0.0005409192825112108, "loss": 3.7307, "step": 54075 }, { "epoch": 3.6744122842777553, "grad_norm": 1.6866602897644043, "learning_rate": 0.000540876817502378, "loss": 3.6218, "step": 54080 }, { "epoch": 3.674752004348417, "grad_norm": 2.0568020343780518, "learning_rate": 0.0005408343524935453, "loss": 3.6789, "step": 54085 }, { "epoch": 3.6750917244190786, "grad_norm": 1.4044793844223022, "learning_rate": 0.0005407918874847127, "loss": 3.4633, "step": 54090 }, { "epoch": 3.6754314444897407, "grad_norm": 1.7487481832504272, "learning_rate": 0.0005407494224758799, "loss": 3.0852, "step": 54095 }, { "epoch": 3.6757711645604023, "grad_norm": 1.7966734170913696, "learning_rate": 0.0005407069574670471, "loss": 3.5633, "step": 54100 }, { "epoch": 3.676110884631064, "grad_norm": 1.9551242589950562, "learning_rate": 0.0005406644924582145, "loss": 3.4128, "step": 54105 }, { "epoch": 3.676450604701726, "grad_norm": 2.2116870880126953, "learning_rate": 0.0005406220274493817, "loss": 3.4875, "step": 54110 }, { "epoch": 3.6767903247723877, "grad_norm": 1.9670182466506958, "learning_rate": 0.0005405795624405489, "loss": 3.4328, "step": 54115 }, { "epoch": 3.6771300448430493, "grad_norm": 2.151655673980713, "learning_rate": 0.0005405370974317163, "loss": 3.464, "step": 54120 }, { "epoch": 3.6774697649137114, "grad_norm": 1.8549625873565674, "learning_rate": 0.0005404946324228836, "loss": 3.3899, "step": 54125 }, { "epoch": 3.677809484984373, "grad_norm": 1.6319129467010498, "learning_rate": 0.0005404521674140508, "loss": 3.292, "step": 54130 }, { "epoch": 3.6781492050550346, "grad_norm": 1.936620831489563, "learning_rate": 0.0005404097024052181, "loss": 3.6115, "step": 54135 }, { "epoch": 3.6784889251256967, "grad_norm": 2.2574989795684814, "learning_rate": 0.0005403672373963854, "loss": 3.563, "step": 54140 }, { "epoch": 3.6788286451963583, "grad_norm": 1.7752981185913086, "learning_rate": 0.0005403247723875526, "loss": 3.5744, "step": 54145 }, { "epoch": 3.67916836526702, "grad_norm": 1.5993852615356445, "learning_rate": 0.00054028230737872, "loss": 3.4492, "step": 54150 }, { "epoch": 3.6795080853376816, "grad_norm": 1.8479286432266235, "learning_rate": 0.0005402398423698873, "loss": 3.4425, "step": 54155 }, { "epoch": 3.6798478054083437, "grad_norm": 1.6671843528747559, "learning_rate": 0.0005401973773610545, "loss": 3.5183, "step": 54160 }, { "epoch": 3.6801875254790053, "grad_norm": 2.4013736248016357, "learning_rate": 0.0005401549123522218, "loss": 3.5872, "step": 54165 }, { "epoch": 3.680527245549667, "grad_norm": 1.9871336221694946, "learning_rate": 0.000540112447343389, "loss": 3.1355, "step": 54170 }, { "epoch": 3.680866965620329, "grad_norm": 1.9447956085205078, "learning_rate": 0.0005400699823345563, "loss": 3.7131, "step": 54175 }, { "epoch": 3.6812066856909906, "grad_norm": 1.486487865447998, "learning_rate": 0.0005400275173257236, "loss": 3.2332, "step": 54180 }, { "epoch": 3.6815464057616523, "grad_norm": 1.9012190103530884, "learning_rate": 0.0005399850523168909, "loss": 3.4556, "step": 54185 }, { "epoch": 3.6818861258323143, "grad_norm": 1.5879663228988647, "learning_rate": 0.0005399425873080582, "loss": 3.3767, "step": 54190 }, { "epoch": 3.682225845902976, "grad_norm": 1.3820147514343262, "learning_rate": 0.0005399001222992255, "loss": 3.2713, "step": 54195 }, { "epoch": 3.6825655659736376, "grad_norm": 1.8751254081726074, "learning_rate": 0.0005398576572903927, "loss": 3.5244, "step": 54200 }, { "epoch": 3.6829052860442992, "grad_norm": 1.87424635887146, "learning_rate": 0.0005398151922815599, "loss": 3.3987, "step": 54205 }, { "epoch": 3.6832450061149613, "grad_norm": 2.09529185295105, "learning_rate": 0.0005397727272727273, "loss": 3.5673, "step": 54210 }, { "epoch": 3.683584726185623, "grad_norm": 1.612015962600708, "learning_rate": 0.0005397302622638945, "loss": 3.4119, "step": 54215 }, { "epoch": 3.6839244462562846, "grad_norm": 1.6168537139892578, "learning_rate": 0.0005396877972550618, "loss": 3.379, "step": 54220 }, { "epoch": 3.6842641663269466, "grad_norm": 2.2885754108428955, "learning_rate": 0.0005396453322462292, "loss": 3.3158, "step": 54225 }, { "epoch": 3.6846038863976083, "grad_norm": 1.9794656038284302, "learning_rate": 0.0005396028672373964, "loss": 3.5146, "step": 54230 }, { "epoch": 3.68494360646827, "grad_norm": 1.5698736906051636, "learning_rate": 0.0005395604022285637, "loss": 3.4911, "step": 54235 }, { "epoch": 3.685283326538932, "grad_norm": 1.8862860202789307, "learning_rate": 0.000539517937219731, "loss": 3.1103, "step": 54240 }, { "epoch": 3.6856230466095936, "grad_norm": 2.1658756732940674, "learning_rate": 0.0005394754722108982, "loss": 3.5896, "step": 54245 }, { "epoch": 3.6859627666802552, "grad_norm": 2.0543875694274902, "learning_rate": 0.0005394330072020655, "loss": 3.59, "step": 54250 }, { "epoch": 3.6863024867509173, "grad_norm": 2.151986598968506, "learning_rate": 0.0005393905421932329, "loss": 3.6119, "step": 54255 }, { "epoch": 3.686642206821579, "grad_norm": 1.5295363664627075, "learning_rate": 0.0005393480771844001, "loss": 3.2084, "step": 54260 }, { "epoch": 3.6869819268922406, "grad_norm": 1.3463952541351318, "learning_rate": 0.0005393056121755674, "loss": 3.6139, "step": 54265 }, { "epoch": 3.6873216469629027, "grad_norm": 1.6326818466186523, "learning_rate": 0.0005392631471667346, "loss": 3.4921, "step": 54270 }, { "epoch": 3.6876613670335643, "grad_norm": 1.7177001237869263, "learning_rate": 0.0005392206821579019, "loss": 3.4306, "step": 54275 }, { "epoch": 3.688001087104226, "grad_norm": 1.941763162612915, "learning_rate": 0.0005391782171490692, "loss": 3.6289, "step": 54280 }, { "epoch": 3.688340807174888, "grad_norm": 1.5715619325637817, "learning_rate": 0.0005391357521402364, "loss": 3.4233, "step": 54285 }, { "epoch": 3.6886805272455496, "grad_norm": 1.6123746633529663, "learning_rate": 0.0005390932871314038, "loss": 3.3838, "step": 54290 }, { "epoch": 3.6890202473162113, "grad_norm": 1.5273535251617432, "learning_rate": 0.0005390508221225711, "loss": 3.415, "step": 54295 }, { "epoch": 3.6893599673868733, "grad_norm": 2.121662139892578, "learning_rate": 0.0005390083571137383, "loss": 3.474, "step": 54300 }, { "epoch": 3.689699687457535, "grad_norm": 2.162774085998535, "learning_rate": 0.0005389658921049055, "loss": 3.5854, "step": 54305 }, { "epoch": 3.6900394075281966, "grad_norm": 1.7826855182647705, "learning_rate": 0.0005389234270960729, "loss": 3.1478, "step": 54310 }, { "epoch": 3.6903791275988587, "grad_norm": 1.8566758632659912, "learning_rate": 0.0005388809620872401, "loss": 3.5631, "step": 54315 }, { "epoch": 3.6907188476695203, "grad_norm": 1.5559028387069702, "learning_rate": 0.0005388384970784073, "loss": 3.6794, "step": 54320 }, { "epoch": 3.691058567740182, "grad_norm": 1.8592290878295898, "learning_rate": 0.0005387960320695748, "loss": 3.4336, "step": 54325 }, { "epoch": 3.691398287810844, "grad_norm": 1.4469977617263794, "learning_rate": 0.000538753567060742, "loss": 3.3289, "step": 54330 }, { "epoch": 3.6917380078815056, "grad_norm": 1.9707086086273193, "learning_rate": 0.0005387111020519092, "loss": 3.4927, "step": 54335 }, { "epoch": 3.6920777279521673, "grad_norm": 1.8547186851501465, "learning_rate": 0.0005386686370430766, "loss": 3.2339, "step": 54340 }, { "epoch": 3.6924174480228293, "grad_norm": 1.7410579919815063, "learning_rate": 0.0005386261720342438, "loss": 3.3226, "step": 54345 }, { "epoch": 3.692757168093491, "grad_norm": 1.5785223245620728, "learning_rate": 0.000538583707025411, "loss": 3.614, "step": 54350 }, { "epoch": 3.6930968881641526, "grad_norm": 1.6335039138793945, "learning_rate": 0.0005385412420165783, "loss": 3.5996, "step": 54355 }, { "epoch": 3.6934366082348147, "grad_norm": 1.4906717538833618, "learning_rate": 0.0005384987770077457, "loss": 3.4559, "step": 54360 }, { "epoch": 3.6937763283054763, "grad_norm": 1.39930260181427, "learning_rate": 0.0005384563119989129, "loss": 3.3882, "step": 54365 }, { "epoch": 3.694116048376138, "grad_norm": 1.7136521339416504, "learning_rate": 0.0005384138469900802, "loss": 3.4007, "step": 54370 }, { "epoch": 3.6944557684468, "grad_norm": 2.5364606380462646, "learning_rate": 0.0005383713819812475, "loss": 3.5516, "step": 54375 }, { "epoch": 3.6947954885174616, "grad_norm": 1.8839082717895508, "learning_rate": 0.0005383289169724147, "loss": 3.1732, "step": 54380 }, { "epoch": 3.6951352085881233, "grad_norm": 2.184008836746216, "learning_rate": 0.000538286451963582, "loss": 3.4116, "step": 54385 }, { "epoch": 3.6954749286587854, "grad_norm": 2.1872544288635254, "learning_rate": 0.0005382439869547493, "loss": 3.4026, "step": 54390 }, { "epoch": 3.695814648729447, "grad_norm": 1.833688497543335, "learning_rate": 0.0005382015219459166, "loss": 3.3116, "step": 54395 }, { "epoch": 3.6961543688001086, "grad_norm": 2.3800532817840576, "learning_rate": 0.0005381590569370839, "loss": 3.3487, "step": 54400 }, { "epoch": 3.6964940888707707, "grad_norm": 1.7154514789581299, "learning_rate": 0.0005381165919282511, "loss": 3.5605, "step": 54405 }, { "epoch": 3.6968338089414323, "grad_norm": 1.6959989070892334, "learning_rate": 0.0005380741269194184, "loss": 3.4011, "step": 54410 }, { "epoch": 3.697173529012094, "grad_norm": 1.941423773765564, "learning_rate": 0.0005380316619105857, "loss": 3.3378, "step": 54415 }, { "epoch": 3.697513249082756, "grad_norm": 2.154273509979248, "learning_rate": 0.0005379891969017529, "loss": 3.5487, "step": 54420 }, { "epoch": 3.6978529691534177, "grad_norm": 1.7022929191589355, "learning_rate": 0.0005379467318929202, "loss": 3.2845, "step": 54425 }, { "epoch": 3.6981926892240793, "grad_norm": 1.849098563194275, "learning_rate": 0.0005379042668840876, "loss": 3.3791, "step": 54430 }, { "epoch": 3.6985324092947414, "grad_norm": 1.8551573753356934, "learning_rate": 0.0005378618018752548, "loss": 3.4365, "step": 54435 }, { "epoch": 3.698872129365403, "grad_norm": 1.939499855041504, "learning_rate": 0.000537819336866422, "loss": 3.419, "step": 54440 }, { "epoch": 3.6992118494360646, "grad_norm": 1.947914719581604, "learning_rate": 0.0005377768718575894, "loss": 3.5578, "step": 54445 }, { "epoch": 3.6995515695067267, "grad_norm": 1.796814203262329, "learning_rate": 0.0005377344068487566, "loss": 3.24, "step": 54450 }, { "epoch": 3.6998912895773883, "grad_norm": 2.157236337661743, "learning_rate": 0.0005376919418399238, "loss": 3.59, "step": 54455 }, { "epoch": 3.70023100964805, "grad_norm": 1.8105074167251587, "learning_rate": 0.0005376494768310912, "loss": 3.4454, "step": 54460 }, { "epoch": 3.700570729718712, "grad_norm": 1.7728534936904907, "learning_rate": 0.0005376070118222585, "loss": 3.4743, "step": 54465 }, { "epoch": 3.7009104497893737, "grad_norm": 1.6344404220581055, "learning_rate": 0.0005375645468134257, "loss": 3.515, "step": 54470 }, { "epoch": 3.7012501698600353, "grad_norm": 1.5263609886169434, "learning_rate": 0.0005375220818045931, "loss": 3.4527, "step": 54475 }, { "epoch": 3.7015898899306974, "grad_norm": 1.9821267127990723, "learning_rate": 0.0005374796167957603, "loss": 3.5137, "step": 54480 }, { "epoch": 3.701929610001359, "grad_norm": 1.5194021463394165, "learning_rate": 0.0005374371517869275, "loss": 3.5287, "step": 54485 }, { "epoch": 3.7022693300720206, "grad_norm": 1.8539392948150635, "learning_rate": 0.0005373946867780949, "loss": 3.5209, "step": 54490 }, { "epoch": 3.7026090501426823, "grad_norm": 1.8368165493011475, "learning_rate": 0.0005373522217692621, "loss": 3.398, "step": 54495 }, { "epoch": 3.7029487702133443, "grad_norm": 1.9656931161880493, "learning_rate": 0.0005373097567604294, "loss": 3.4772, "step": 54500 }, { "epoch": 3.703288490284006, "grad_norm": 2.071417808532715, "learning_rate": 0.0005372672917515967, "loss": 3.3094, "step": 54505 }, { "epoch": 3.7036282103546676, "grad_norm": 2.3606784343719482, "learning_rate": 0.000537224826742764, "loss": 3.5562, "step": 54510 }, { "epoch": 3.7039679304253297, "grad_norm": 1.6315256357192993, "learning_rate": 0.0005371823617339312, "loss": 3.1422, "step": 54515 }, { "epoch": 3.7043076504959913, "grad_norm": 2.185640335083008, "learning_rate": 0.0005371398967250985, "loss": 3.3113, "step": 54520 }, { "epoch": 3.704647370566653, "grad_norm": 1.6338634490966797, "learning_rate": 0.0005370974317162658, "loss": 3.618, "step": 54525 }, { "epoch": 3.704987090637315, "grad_norm": 1.69529390335083, "learning_rate": 0.000537054966707433, "loss": 3.5085, "step": 54530 }, { "epoch": 3.7053268107079766, "grad_norm": 1.6686667203903198, "learning_rate": 0.0005370125016986004, "loss": 3.416, "step": 54535 }, { "epoch": 3.7056665307786383, "grad_norm": 1.8303894996643066, "learning_rate": 0.0005369700366897677, "loss": 3.506, "step": 54540 }, { "epoch": 3.7060062508493, "grad_norm": 1.519055724143982, "learning_rate": 0.0005369275716809349, "loss": 3.5221, "step": 54545 }, { "epoch": 3.706345970919962, "grad_norm": 2.154876232147217, "learning_rate": 0.0005368851066721022, "loss": 3.4404, "step": 54550 }, { "epoch": 3.7066856909906236, "grad_norm": 1.9603818655014038, "learning_rate": 0.0005368426416632694, "loss": 3.277, "step": 54555 }, { "epoch": 3.7070254110612852, "grad_norm": 1.8086951971054077, "learning_rate": 0.0005368001766544367, "loss": 3.3706, "step": 54560 }, { "epoch": 3.7073651311319473, "grad_norm": 1.6507807970046997, "learning_rate": 0.000536757711645604, "loss": 3.6722, "step": 54565 }, { "epoch": 3.707704851202609, "grad_norm": 2.088239908218384, "learning_rate": 0.0005367152466367713, "loss": 3.2988, "step": 54570 }, { "epoch": 3.7080445712732706, "grad_norm": 1.6458840370178223, "learning_rate": 0.0005366727816279387, "loss": 3.4276, "step": 54575 }, { "epoch": 3.7083842913439327, "grad_norm": 1.7444394826889038, "learning_rate": 0.0005366303166191059, "loss": 3.4638, "step": 54580 }, { "epoch": 3.7087240114145943, "grad_norm": 2.0322587490081787, "learning_rate": 0.0005365878516102731, "loss": 3.3233, "step": 54585 }, { "epoch": 3.709063731485256, "grad_norm": 2.214991331100464, "learning_rate": 0.0005365453866014405, "loss": 3.8426, "step": 54590 }, { "epoch": 3.709403451555918, "grad_norm": 1.7499955892562866, "learning_rate": 0.0005365029215926077, "loss": 3.5385, "step": 54595 }, { "epoch": 3.7097431716265796, "grad_norm": 1.9025768041610718, "learning_rate": 0.0005364604565837749, "loss": 3.4074, "step": 54600 }, { "epoch": 3.7100828916972413, "grad_norm": 1.8167027235031128, "learning_rate": 0.0005364179915749424, "loss": 3.4512, "step": 54605 }, { "epoch": 3.7104226117679033, "grad_norm": 2.2518324851989746, "learning_rate": 0.0005363755265661096, "loss": 3.2735, "step": 54610 }, { "epoch": 3.710762331838565, "grad_norm": 1.9860566854476929, "learning_rate": 0.0005363330615572768, "loss": 3.2176, "step": 54615 }, { "epoch": 3.7111020519092266, "grad_norm": 1.634863018989563, "learning_rate": 0.0005362905965484441, "loss": 3.2542, "step": 54620 }, { "epoch": 3.7114417719798887, "grad_norm": 1.8720160722732544, "learning_rate": 0.0005362481315396114, "loss": 3.3892, "step": 54625 }, { "epoch": 3.7117814920505503, "grad_norm": 2.307030200958252, "learning_rate": 0.0005362056665307786, "loss": 3.7181, "step": 54630 }, { "epoch": 3.712121212121212, "grad_norm": 1.5753376483917236, "learning_rate": 0.0005361632015219459, "loss": 3.5311, "step": 54635 }, { "epoch": 3.712460932191874, "grad_norm": 2.061112403869629, "learning_rate": 0.0005361207365131133, "loss": 3.4247, "step": 54640 }, { "epoch": 3.7128006522625356, "grad_norm": 2.1108546257019043, "learning_rate": 0.0005360782715042805, "loss": 3.4795, "step": 54645 }, { "epoch": 3.7131403723331973, "grad_norm": 1.822342038154602, "learning_rate": 0.0005360358064954478, "loss": 3.3045, "step": 54650 }, { "epoch": 3.7134800924038593, "grad_norm": 2.4274280071258545, "learning_rate": 0.000535993341486615, "loss": 3.4498, "step": 54655 }, { "epoch": 3.713819812474521, "grad_norm": 1.6548429727554321, "learning_rate": 0.0005359508764777823, "loss": 3.2227, "step": 54660 }, { "epoch": 3.7141595325451826, "grad_norm": 1.8304104804992676, "learning_rate": 0.0005359084114689496, "loss": 3.2652, "step": 54665 }, { "epoch": 3.7144992526158447, "grad_norm": 1.6960697174072266, "learning_rate": 0.0005358659464601168, "loss": 3.5373, "step": 54670 }, { "epoch": 3.7148389726865063, "grad_norm": 1.5809153318405151, "learning_rate": 0.0005358234814512842, "loss": 3.4784, "step": 54675 }, { "epoch": 3.715178692757168, "grad_norm": 2.518695116043091, "learning_rate": 0.0005357810164424515, "loss": 3.2834, "step": 54680 }, { "epoch": 3.71551841282783, "grad_norm": 1.9014945030212402, "learning_rate": 0.0005357385514336187, "loss": 3.5446, "step": 54685 }, { "epoch": 3.7158581328984917, "grad_norm": 2.046128273010254, "learning_rate": 0.000535696086424786, "loss": 3.6321, "step": 54690 }, { "epoch": 3.7161978529691533, "grad_norm": 1.7774966955184937, "learning_rate": 0.0005356536214159533, "loss": 3.4845, "step": 54695 }, { "epoch": 3.7165375730398154, "grad_norm": 1.8449735641479492, "learning_rate": 0.0005356111564071205, "loss": 3.5219, "step": 54700 }, { "epoch": 3.716877293110477, "grad_norm": 2.128568410873413, "learning_rate": 0.0005355686913982877, "loss": 3.4566, "step": 54705 }, { "epoch": 3.7172170131811386, "grad_norm": 1.7288687229156494, "learning_rate": 0.0005355262263894552, "loss": 3.5914, "step": 54710 }, { "epoch": 3.7175567332518007, "grad_norm": 1.5507612228393555, "learning_rate": 0.0005354837613806224, "loss": 3.3668, "step": 54715 }, { "epoch": 3.7178964533224623, "grad_norm": 1.580640196800232, "learning_rate": 0.0005354412963717896, "loss": 3.4028, "step": 54720 }, { "epoch": 3.718236173393124, "grad_norm": 2.5240044593811035, "learning_rate": 0.000535398831362957, "loss": 3.3286, "step": 54725 }, { "epoch": 3.718575893463786, "grad_norm": 1.8671232461929321, "learning_rate": 0.0005353563663541242, "loss": 3.541, "step": 54730 }, { "epoch": 3.7189156135344477, "grad_norm": 1.633223056793213, "learning_rate": 0.0005353139013452914, "loss": 3.2668, "step": 54735 }, { "epoch": 3.7192553336051093, "grad_norm": 1.989274263381958, "learning_rate": 0.0005352714363364589, "loss": 3.5095, "step": 54740 }, { "epoch": 3.7195950536757714, "grad_norm": 1.9687703847885132, "learning_rate": 0.0005352289713276261, "loss": 3.4457, "step": 54745 }, { "epoch": 3.719934773746433, "grad_norm": 1.7926980257034302, "learning_rate": 0.0005351865063187933, "loss": 3.4585, "step": 54750 }, { "epoch": 3.7202744938170946, "grad_norm": 1.4092168807983398, "learning_rate": 0.0005351440413099606, "loss": 3.5537, "step": 54755 }, { "epoch": 3.7206142138877567, "grad_norm": 1.3983807563781738, "learning_rate": 0.0005351015763011279, "loss": 3.4695, "step": 54760 }, { "epoch": 3.7209539339584183, "grad_norm": 1.468345284461975, "learning_rate": 0.0005350591112922951, "loss": 3.3043, "step": 54765 }, { "epoch": 3.72129365402908, "grad_norm": 1.7676290273666382, "learning_rate": 0.0005350166462834624, "loss": 3.4711, "step": 54770 }, { "epoch": 3.721633374099742, "grad_norm": 1.8611031770706177, "learning_rate": 0.0005349741812746298, "loss": 3.3662, "step": 54775 }, { "epoch": 3.7219730941704037, "grad_norm": 2.0337014198303223, "learning_rate": 0.000534931716265797, "loss": 3.5304, "step": 54780 }, { "epoch": 3.7223128142410653, "grad_norm": 1.4708248376846313, "learning_rate": 0.0005348892512569643, "loss": 3.2999, "step": 54785 }, { "epoch": 3.7226525343117274, "grad_norm": 1.9961810111999512, "learning_rate": 0.0005348467862481316, "loss": 3.6764, "step": 54790 }, { "epoch": 3.722992254382389, "grad_norm": 2.078481912612915, "learning_rate": 0.0005348043212392988, "loss": 3.538, "step": 54795 }, { "epoch": 3.7233319744530506, "grad_norm": 1.722714900970459, "learning_rate": 0.0005347618562304661, "loss": 3.1559, "step": 54800 }, { "epoch": 3.7236716945237127, "grad_norm": 1.5875859260559082, "learning_rate": 0.0005347193912216333, "loss": 3.4398, "step": 54805 }, { "epoch": 3.7240114145943743, "grad_norm": 1.8622376918792725, "learning_rate": 0.0005346769262128007, "loss": 3.3096, "step": 54810 }, { "epoch": 3.724351134665036, "grad_norm": 1.7148796319961548, "learning_rate": 0.000534634461203968, "loss": 3.3006, "step": 54815 }, { "epoch": 3.724690854735698, "grad_norm": 1.855646014213562, "learning_rate": 0.0005345919961951352, "loss": 3.7462, "step": 54820 }, { "epoch": 3.7250305748063597, "grad_norm": 2.072575807571411, "learning_rate": 0.0005345495311863025, "loss": 3.195, "step": 54825 }, { "epoch": 3.7253702948770213, "grad_norm": 2.348234176635742, "learning_rate": 0.0005345070661774698, "loss": 3.3809, "step": 54830 }, { "epoch": 3.725710014947683, "grad_norm": 1.9952149391174316, "learning_rate": 0.000534464601168637, "loss": 3.3069, "step": 54835 }, { "epoch": 3.726049735018345, "grad_norm": 2.0818097591400146, "learning_rate": 0.0005344221361598042, "loss": 3.3908, "step": 54840 }, { "epoch": 3.7263894550890067, "grad_norm": 1.5496246814727783, "learning_rate": 0.0005343796711509717, "loss": 3.4283, "step": 54845 }, { "epoch": 3.7267291751596683, "grad_norm": 1.6850157976150513, "learning_rate": 0.0005343372061421389, "loss": 3.1482, "step": 54850 }, { "epoch": 3.7270688952303304, "grad_norm": 1.7380801439285278, "learning_rate": 0.0005342947411333061, "loss": 3.3938, "step": 54855 }, { "epoch": 3.727408615300992, "grad_norm": 1.6457829475402832, "learning_rate": 0.0005342522761244735, "loss": 3.5098, "step": 54860 }, { "epoch": 3.7277483353716536, "grad_norm": 1.7161179780960083, "learning_rate": 0.0005342098111156407, "loss": 3.617, "step": 54865 }, { "epoch": 3.7280880554423157, "grad_norm": 2.060438394546509, "learning_rate": 0.0005341673461068079, "loss": 3.392, "step": 54870 }, { "epoch": 3.7284277755129773, "grad_norm": 1.7997746467590332, "learning_rate": 0.0005341248810979753, "loss": 3.2764, "step": 54875 }, { "epoch": 3.728767495583639, "grad_norm": 1.6276236772537231, "learning_rate": 0.0005340824160891426, "loss": 3.3387, "step": 54880 }, { "epoch": 3.7291072156543006, "grad_norm": 1.74946928024292, "learning_rate": 0.0005340399510803098, "loss": 3.4344, "step": 54885 }, { "epoch": 3.7294469357249627, "grad_norm": 2.0709519386291504, "learning_rate": 0.0005339974860714772, "loss": 3.4953, "step": 54890 }, { "epoch": 3.7297866557956243, "grad_norm": 2.440624237060547, "learning_rate": 0.0005339550210626444, "loss": 3.5669, "step": 54895 }, { "epoch": 3.730126375866286, "grad_norm": 2.198991537094116, "learning_rate": 0.0005339125560538116, "loss": 3.4675, "step": 54900 }, { "epoch": 3.730466095936948, "grad_norm": 1.7489351034164429, "learning_rate": 0.0005338700910449789, "loss": 3.2086, "step": 54905 }, { "epoch": 3.7308058160076096, "grad_norm": 1.784835696220398, "learning_rate": 0.0005338276260361462, "loss": 3.4467, "step": 54910 }, { "epoch": 3.7311455360782713, "grad_norm": 2.104658842086792, "learning_rate": 0.0005337851610273136, "loss": 3.2843, "step": 54915 }, { "epoch": 3.7314852561489333, "grad_norm": 1.8037006855010986, "learning_rate": 0.0005337426960184808, "loss": 3.1364, "step": 54920 }, { "epoch": 3.731824976219595, "grad_norm": 1.7599098682403564, "learning_rate": 0.0005337002310096481, "loss": 3.5251, "step": 54925 }, { "epoch": 3.7321646962902566, "grad_norm": 1.9907008409500122, "learning_rate": 0.0005336577660008154, "loss": 3.4571, "step": 54930 }, { "epoch": 3.7325044163609187, "grad_norm": 1.5669249296188354, "learning_rate": 0.0005336153009919826, "loss": 3.5745, "step": 54935 }, { "epoch": 3.7328441364315803, "grad_norm": 1.8272604942321777, "learning_rate": 0.0005335728359831498, "loss": 3.4787, "step": 54940 }, { "epoch": 3.733183856502242, "grad_norm": 1.835807204246521, "learning_rate": 0.0005335303709743172, "loss": 3.3011, "step": 54945 }, { "epoch": 3.733523576572904, "grad_norm": 2.212357521057129, "learning_rate": 0.0005334879059654845, "loss": 3.2123, "step": 54950 }, { "epoch": 3.7338632966435656, "grad_norm": 2.003722667694092, "learning_rate": 0.0005334454409566517, "loss": 3.6225, "step": 54955 }, { "epoch": 3.7342030167142273, "grad_norm": 1.6888723373413086, "learning_rate": 0.0005334029759478191, "loss": 3.4837, "step": 54960 }, { "epoch": 3.7345427367848893, "grad_norm": 1.9219130277633667, "learning_rate": 0.0005333605109389863, "loss": 3.4024, "step": 54965 }, { "epoch": 3.734882456855551, "grad_norm": 1.6249024868011475, "learning_rate": 0.0005333180459301535, "loss": 3.3718, "step": 54970 }, { "epoch": 3.7352221769262126, "grad_norm": 2.1200449466705322, "learning_rate": 0.0005332755809213209, "loss": 3.565, "step": 54975 }, { "epoch": 3.7355618969968747, "grad_norm": 1.9027870893478394, "learning_rate": 0.0005332331159124881, "loss": 3.5057, "step": 54980 }, { "epoch": 3.7359016170675363, "grad_norm": 1.6390622854232788, "learning_rate": 0.0005331906509036554, "loss": 3.5408, "step": 54985 }, { "epoch": 3.736241337138198, "grad_norm": 1.5117318630218506, "learning_rate": 0.0005331481858948228, "loss": 3.3621, "step": 54990 }, { "epoch": 3.73658105720886, "grad_norm": 1.7937045097351074, "learning_rate": 0.00053310572088599, "loss": 3.5246, "step": 54995 }, { "epoch": 3.7369207772795217, "grad_norm": 2.0400407314300537, "learning_rate": 0.0005330632558771572, "loss": 3.3783, "step": 55000 }, { "epoch": 3.7372604973501833, "grad_norm": 1.616540789604187, "learning_rate": 0.0005330207908683245, "loss": 3.396, "step": 55005 }, { "epoch": 3.7376002174208454, "grad_norm": 1.9413738250732422, "learning_rate": 0.0005329783258594918, "loss": 3.5571, "step": 55010 }, { "epoch": 3.737939937491507, "grad_norm": 1.8397784233093262, "learning_rate": 0.000532935860850659, "loss": 3.6102, "step": 55015 }, { "epoch": 3.7382796575621686, "grad_norm": 1.629338026046753, "learning_rate": 0.0005328933958418264, "loss": 3.571, "step": 55020 }, { "epoch": 3.7386193776328307, "grad_norm": 2.2904062271118164, "learning_rate": 0.0005328509308329937, "loss": 3.4435, "step": 55025 }, { "epoch": 3.7389590977034923, "grad_norm": 2.2154500484466553, "learning_rate": 0.0005328084658241609, "loss": 3.4496, "step": 55030 }, { "epoch": 3.739298817774154, "grad_norm": 2.2641243934631348, "learning_rate": 0.0005327660008153282, "loss": 3.2919, "step": 55035 }, { "epoch": 3.739638537844816, "grad_norm": 2.340609073638916, "learning_rate": 0.0005327235358064954, "loss": 3.4858, "step": 55040 }, { "epoch": 3.7399782579154777, "grad_norm": 1.854552984237671, "learning_rate": 0.0005326810707976627, "loss": 3.0776, "step": 55045 }, { "epoch": 3.7403179779861393, "grad_norm": 2.1908323764801025, "learning_rate": 0.00053263860578883, "loss": 3.2851, "step": 55050 }, { "epoch": 3.7406576980568014, "grad_norm": 1.899868369102478, "learning_rate": 0.0005325961407799973, "loss": 3.4117, "step": 55055 }, { "epoch": 3.740997418127463, "grad_norm": 2.285890817642212, "learning_rate": 0.0005325536757711646, "loss": 3.5478, "step": 55060 }, { "epoch": 3.7413371381981246, "grad_norm": 1.5849239826202393, "learning_rate": 0.0005325112107623319, "loss": 3.521, "step": 55065 }, { "epoch": 3.7416768582687867, "grad_norm": 1.6322144269943237, "learning_rate": 0.0005324687457534991, "loss": 3.6127, "step": 55070 }, { "epoch": 3.7420165783394483, "grad_norm": 1.6890840530395508, "learning_rate": 0.0005324262807446664, "loss": 3.7422, "step": 55075 }, { "epoch": 3.74235629841011, "grad_norm": 1.6041229963302612, "learning_rate": 0.0005323838157358337, "loss": 3.2134, "step": 55080 }, { "epoch": 3.742696018480772, "grad_norm": 1.717625379562378, "learning_rate": 0.0005323413507270009, "loss": 3.2544, "step": 55085 }, { "epoch": 3.7430357385514337, "grad_norm": 3.2664895057678223, "learning_rate": 0.0005322988857181682, "loss": 3.5028, "step": 55090 }, { "epoch": 3.7433754586220953, "grad_norm": 1.9073545932769775, "learning_rate": 0.0005322564207093356, "loss": 3.3939, "step": 55095 }, { "epoch": 3.7437151786927574, "grad_norm": 2.413302421569824, "learning_rate": 0.0005322139557005028, "loss": 3.3902, "step": 55100 }, { "epoch": 3.744054898763419, "grad_norm": 2.181100368499756, "learning_rate": 0.00053217149069167, "loss": 3.2404, "step": 55105 }, { "epoch": 3.7443946188340806, "grad_norm": 1.8229402303695679, "learning_rate": 0.0005321290256828374, "loss": 3.2104, "step": 55110 }, { "epoch": 3.7447343389047427, "grad_norm": 2.1048569679260254, "learning_rate": 0.0005320865606740046, "loss": 3.6233, "step": 55115 }, { "epoch": 3.7450740589754044, "grad_norm": 1.6683108806610107, "learning_rate": 0.0005320440956651718, "loss": 3.109, "step": 55120 }, { "epoch": 3.745413779046066, "grad_norm": 2.128232717514038, "learning_rate": 0.0005320016306563393, "loss": 3.0579, "step": 55125 }, { "epoch": 3.745753499116728, "grad_norm": 2.0570223331451416, "learning_rate": 0.0005319591656475065, "loss": 3.5705, "step": 55130 }, { "epoch": 3.7460932191873897, "grad_norm": 1.906028151512146, "learning_rate": 0.0005319167006386737, "loss": 3.4413, "step": 55135 }, { "epoch": 3.7464329392580513, "grad_norm": 1.3279261589050293, "learning_rate": 0.000531874235629841, "loss": 3.3529, "step": 55140 }, { "epoch": 3.7467726593287134, "grad_norm": 1.912585735321045, "learning_rate": 0.0005318317706210083, "loss": 3.2548, "step": 55145 }, { "epoch": 3.747112379399375, "grad_norm": 1.758428692817688, "learning_rate": 0.0005317893056121755, "loss": 3.3567, "step": 55150 }, { "epoch": 3.7474520994700367, "grad_norm": 2.134077548980713, "learning_rate": 0.0005317468406033428, "loss": 3.4437, "step": 55155 }, { "epoch": 3.7477918195406987, "grad_norm": 1.947124719619751, "learning_rate": 0.0005317043755945102, "loss": 3.3601, "step": 55160 }, { "epoch": 3.7481315396113604, "grad_norm": 1.842604160308838, "learning_rate": 0.0005316619105856774, "loss": 3.5632, "step": 55165 }, { "epoch": 3.748471259682022, "grad_norm": 1.8756141662597656, "learning_rate": 0.0005316194455768447, "loss": 3.3108, "step": 55170 }, { "epoch": 3.7488109797526836, "grad_norm": 2.14288067817688, "learning_rate": 0.000531576980568012, "loss": 3.3598, "step": 55175 }, { "epoch": 3.7491506998233457, "grad_norm": 2.5356638431549072, "learning_rate": 0.0005315345155591792, "loss": 3.454, "step": 55180 }, { "epoch": 3.7494904198940073, "grad_norm": 1.859325647354126, "learning_rate": 0.0005314920505503465, "loss": 3.3239, "step": 55185 }, { "epoch": 3.749830139964669, "grad_norm": 1.708429217338562, "learning_rate": 0.0005314495855415137, "loss": 3.5221, "step": 55190 }, { "epoch": 3.750169860035331, "grad_norm": 2.0170204639434814, "learning_rate": 0.0005314071205326811, "loss": 3.1335, "step": 55195 }, { "epoch": 3.7505095801059927, "grad_norm": 2.168632745742798, "learning_rate": 0.0005313646555238484, "loss": 3.1592, "step": 55200 }, { "epoch": 3.7508493001766543, "grad_norm": 1.7774120569229126, "learning_rate": 0.0005313221905150156, "loss": 3.5656, "step": 55205 }, { "epoch": 3.7511890202473164, "grad_norm": 2.386305332183838, "learning_rate": 0.0005312797255061829, "loss": 3.4292, "step": 55210 }, { "epoch": 3.751528740317978, "grad_norm": 1.3217724561691284, "learning_rate": 0.0005312372604973502, "loss": 3.3985, "step": 55215 }, { "epoch": 3.7518684603886396, "grad_norm": 1.585676670074463, "learning_rate": 0.0005311947954885174, "loss": 3.2405, "step": 55220 }, { "epoch": 3.7522081804593013, "grad_norm": 2.0473828315734863, "learning_rate": 0.0005311523304796846, "loss": 3.4134, "step": 55225 }, { "epoch": 3.7525479005299633, "grad_norm": 1.779537558555603, "learning_rate": 0.0005311098654708521, "loss": 3.7592, "step": 55230 }, { "epoch": 3.752887620600625, "grad_norm": 2.129729986190796, "learning_rate": 0.0005310674004620193, "loss": 3.4897, "step": 55235 }, { "epoch": 3.7532273406712866, "grad_norm": 1.5459778308868408, "learning_rate": 0.0005310249354531865, "loss": 3.1689, "step": 55240 }, { "epoch": 3.7535670607419487, "grad_norm": 2.3889963626861572, "learning_rate": 0.0005309824704443539, "loss": 3.7321, "step": 55245 }, { "epoch": 3.7539067808126103, "grad_norm": 1.818556547164917, "learning_rate": 0.0005309400054355211, "loss": 3.6288, "step": 55250 }, { "epoch": 3.754246500883272, "grad_norm": 1.4560266733169556, "learning_rate": 0.0005308975404266884, "loss": 3.3244, "step": 55255 }, { "epoch": 3.754586220953934, "grad_norm": 1.6920768022537231, "learning_rate": 0.0005308550754178557, "loss": 3.5311, "step": 55260 }, { "epoch": 3.7549259410245956, "grad_norm": 2.0480754375457764, "learning_rate": 0.000530812610409023, "loss": 3.4688, "step": 55265 }, { "epoch": 3.7552656610952573, "grad_norm": 1.6716047525405884, "learning_rate": 0.0005307701454001903, "loss": 3.4673, "step": 55270 }, { "epoch": 3.7556053811659194, "grad_norm": 2.134683132171631, "learning_rate": 0.0005307276803913576, "loss": 3.4379, "step": 55275 }, { "epoch": 3.755945101236581, "grad_norm": 2.2512810230255127, "learning_rate": 0.0005306852153825248, "loss": 3.4, "step": 55280 }, { "epoch": 3.7562848213072426, "grad_norm": 1.9371016025543213, "learning_rate": 0.0005306427503736921, "loss": 3.4442, "step": 55285 }, { "epoch": 3.7566245413779047, "grad_norm": 2.057915449142456, "learning_rate": 0.0005306002853648593, "loss": 3.4724, "step": 55290 }, { "epoch": 3.7569642614485663, "grad_norm": 2.1152236461639404, "learning_rate": 0.0005305578203560266, "loss": 3.4713, "step": 55295 }, { "epoch": 3.757303981519228, "grad_norm": 2.2023532390594482, "learning_rate": 0.000530515355347194, "loss": 3.5791, "step": 55300 }, { "epoch": 3.75764370158989, "grad_norm": 1.9817359447479248, "learning_rate": 0.0005304728903383612, "loss": 3.4635, "step": 55305 }, { "epoch": 3.7579834216605517, "grad_norm": 2.5221073627471924, "learning_rate": 0.0005304304253295285, "loss": 3.5869, "step": 55310 }, { "epoch": 3.7583231417312133, "grad_norm": 2.04654598236084, "learning_rate": 0.0005303879603206958, "loss": 3.5903, "step": 55315 }, { "epoch": 3.7586628618018754, "grad_norm": 1.9190698862075806, "learning_rate": 0.000530345495311863, "loss": 3.2907, "step": 55320 }, { "epoch": 3.759002581872537, "grad_norm": 1.3496122360229492, "learning_rate": 0.0005303030303030302, "loss": 3.5145, "step": 55325 }, { "epoch": 3.7593423019431986, "grad_norm": 2.210658073425293, "learning_rate": 0.0005302605652941977, "loss": 3.3357, "step": 55330 }, { "epoch": 3.7596820220138607, "grad_norm": 1.601770043373108, "learning_rate": 0.0005302181002853649, "loss": 3.2811, "step": 55335 }, { "epoch": 3.7600217420845223, "grad_norm": 1.4879475831985474, "learning_rate": 0.0005301756352765321, "loss": 3.5325, "step": 55340 }, { "epoch": 3.760361462155184, "grad_norm": 1.9976351261138916, "learning_rate": 0.0005301331702676995, "loss": 3.5425, "step": 55345 }, { "epoch": 3.760701182225846, "grad_norm": 1.9686448574066162, "learning_rate": 0.0005300907052588667, "loss": 3.2292, "step": 55350 }, { "epoch": 3.7610409022965077, "grad_norm": 1.9455204010009766, "learning_rate": 0.0005300482402500339, "loss": 3.7285, "step": 55355 }, { "epoch": 3.7613806223671693, "grad_norm": 1.6223429441452026, "learning_rate": 0.0005300057752412013, "loss": 3.562, "step": 55360 }, { "epoch": 3.7617203424378314, "grad_norm": 1.8613299131393433, "learning_rate": 0.0005299633102323686, "loss": 3.5163, "step": 55365 }, { "epoch": 3.762060062508493, "grad_norm": 2.3485569953918457, "learning_rate": 0.0005299208452235358, "loss": 3.173, "step": 55370 }, { "epoch": 3.7623997825791546, "grad_norm": 1.8056925535202026, "learning_rate": 0.0005298783802147032, "loss": 3.5036, "step": 55375 }, { "epoch": 3.7627395026498167, "grad_norm": 1.86170494556427, "learning_rate": 0.0005298359152058704, "loss": 3.5214, "step": 55380 }, { "epoch": 3.7630792227204783, "grad_norm": 1.667903184890747, "learning_rate": 0.0005297934501970376, "loss": 3.3748, "step": 55385 }, { "epoch": 3.76341894279114, "grad_norm": 1.6923538446426392, "learning_rate": 0.0005297509851882049, "loss": 3.5669, "step": 55390 }, { "epoch": 3.763758662861802, "grad_norm": 2.2418441772460938, "learning_rate": 0.0005297085201793722, "loss": 3.9952, "step": 55395 }, { "epoch": 3.7640983829324637, "grad_norm": 1.7523930072784424, "learning_rate": 0.0005296660551705395, "loss": 3.4541, "step": 55400 }, { "epoch": 3.7644381030031253, "grad_norm": 1.74329674243927, "learning_rate": 0.0005296235901617068, "loss": 3.2922, "step": 55405 }, { "epoch": 3.7647778230737874, "grad_norm": 1.4313979148864746, "learning_rate": 0.0005295811251528741, "loss": 3.4099, "step": 55410 }, { "epoch": 3.765117543144449, "grad_norm": 1.51057767868042, "learning_rate": 0.0005295386601440413, "loss": 3.4174, "step": 55415 }, { "epoch": 3.7654572632151107, "grad_norm": 2.525444507598877, "learning_rate": 0.0005294961951352086, "loss": 3.607, "step": 55420 }, { "epoch": 3.7657969832857727, "grad_norm": 1.374161720275879, "learning_rate": 0.0005294537301263758, "loss": 3.5623, "step": 55425 }, { "epoch": 3.7661367033564344, "grad_norm": 2.3190865516662598, "learning_rate": 0.0005294112651175431, "loss": 3.4343, "step": 55430 }, { "epoch": 3.766476423427096, "grad_norm": 2.4992904663085938, "learning_rate": 0.0005293688001087105, "loss": 3.7452, "step": 55435 }, { "epoch": 3.766816143497758, "grad_norm": 1.7824842929840088, "learning_rate": 0.0005293263350998777, "loss": 3.421, "step": 55440 }, { "epoch": 3.7671558635684197, "grad_norm": 2.0119450092315674, "learning_rate": 0.000529283870091045, "loss": 3.3967, "step": 55445 }, { "epoch": 3.7674955836390813, "grad_norm": 1.8634960651397705, "learning_rate": 0.0005292414050822123, "loss": 3.593, "step": 55450 }, { "epoch": 3.7678353037097434, "grad_norm": 1.8709455728530884, "learning_rate": 0.0005291989400733795, "loss": 3.5826, "step": 55455 }, { "epoch": 3.768175023780405, "grad_norm": 2.4712281227111816, "learning_rate": 0.0005291564750645468, "loss": 3.3559, "step": 55460 }, { "epoch": 3.7685147438510667, "grad_norm": 1.9348088502883911, "learning_rate": 0.0005291140100557141, "loss": 3.2635, "step": 55465 }, { "epoch": 3.7688544639217287, "grad_norm": 2.1156423091888428, "learning_rate": 0.0005290715450468814, "loss": 3.2271, "step": 55470 }, { "epoch": 3.7691941839923904, "grad_norm": 2.6081044673919678, "learning_rate": 0.0005290290800380487, "loss": 3.3235, "step": 55475 }, { "epoch": 3.769533904063052, "grad_norm": 1.701334834098816, "learning_rate": 0.000528986615029216, "loss": 3.378, "step": 55480 }, { "epoch": 3.769873624133714, "grad_norm": 1.689050555229187, "learning_rate": 0.0005289441500203832, "loss": 3.4384, "step": 55485 }, { "epoch": 3.7702133442043757, "grad_norm": 1.6534862518310547, "learning_rate": 0.0005289016850115504, "loss": 3.5042, "step": 55490 }, { "epoch": 3.7705530642750373, "grad_norm": 1.5780415534973145, "learning_rate": 0.0005288592200027178, "loss": 3.474, "step": 55495 }, { "epoch": 3.7708927843456994, "grad_norm": 1.989170789718628, "learning_rate": 0.000528816754993885, "loss": 3.5434, "step": 55500 }, { "epoch": 3.771232504416361, "grad_norm": 2.2251765727996826, "learning_rate": 0.0005287742899850523, "loss": 3.424, "step": 55505 }, { "epoch": 3.7715722244870227, "grad_norm": 1.9173011779785156, "learning_rate": 0.0005287318249762197, "loss": 3.278, "step": 55510 }, { "epoch": 3.7719119445576843, "grad_norm": 2.047563076019287, "learning_rate": 0.0005286893599673869, "loss": 3.6382, "step": 55515 }, { "epoch": 3.7722516646283464, "grad_norm": 1.5679314136505127, "learning_rate": 0.0005286468949585541, "loss": 3.6744, "step": 55520 }, { "epoch": 3.772591384699008, "grad_norm": 2.1436476707458496, "learning_rate": 0.0005286044299497215, "loss": 3.5815, "step": 55525 }, { "epoch": 3.7729311047696696, "grad_norm": 1.894406795501709, "learning_rate": 0.0005285619649408887, "loss": 3.2118, "step": 55530 }, { "epoch": 3.7732708248403317, "grad_norm": 1.612512469291687, "learning_rate": 0.0005285194999320559, "loss": 3.3018, "step": 55535 }, { "epoch": 3.7736105449109933, "grad_norm": 1.7531726360321045, "learning_rate": 0.0005284770349232233, "loss": 3.3925, "step": 55540 }, { "epoch": 3.773950264981655, "grad_norm": 1.898634433746338, "learning_rate": 0.0005284345699143906, "loss": 3.5548, "step": 55545 }, { "epoch": 3.774289985052317, "grad_norm": 1.7812073230743408, "learning_rate": 0.0005283921049055578, "loss": 3.5709, "step": 55550 }, { "epoch": 3.7746297051229787, "grad_norm": 1.9506351947784424, "learning_rate": 0.0005283496398967251, "loss": 3.4878, "step": 55555 }, { "epoch": 3.7749694251936403, "grad_norm": 2.117535352706909, "learning_rate": 0.0005283071748878924, "loss": 3.4148, "step": 55560 }, { "epoch": 3.775309145264302, "grad_norm": 1.8540388345718384, "learning_rate": 0.0005282647098790596, "loss": 3.0894, "step": 55565 }, { "epoch": 3.775648865334964, "grad_norm": 1.936050534248352, "learning_rate": 0.0005282222448702269, "loss": 3.3603, "step": 55570 }, { "epoch": 3.7759885854056257, "grad_norm": 1.6563512086868286, "learning_rate": 0.0005281797798613943, "loss": 3.7064, "step": 55575 }, { "epoch": 3.7763283054762873, "grad_norm": 1.7847243547439575, "learning_rate": 0.0005281373148525615, "loss": 3.4528, "step": 55580 }, { "epoch": 3.7766680255469494, "grad_norm": 2.335728883743286, "learning_rate": 0.0005280948498437288, "loss": 3.4922, "step": 55585 }, { "epoch": 3.777007745617611, "grad_norm": 2.1107048988342285, "learning_rate": 0.000528052384834896, "loss": 3.5193, "step": 55590 }, { "epoch": 3.7773474656882726, "grad_norm": 2.613905191421509, "learning_rate": 0.0005280099198260634, "loss": 3.5614, "step": 55595 }, { "epoch": 3.7776871857589347, "grad_norm": 2.117048978805542, "learning_rate": 0.0005279674548172306, "loss": 3.5556, "step": 55600 }, { "epoch": 3.7780269058295963, "grad_norm": 1.8302650451660156, "learning_rate": 0.0005279249898083978, "loss": 3.4327, "step": 55605 }, { "epoch": 3.778366625900258, "grad_norm": 1.754012942314148, "learning_rate": 0.0005278825247995653, "loss": 3.3934, "step": 55610 }, { "epoch": 3.77870634597092, "grad_norm": 1.4287631511688232, "learning_rate": 0.0005278400597907325, "loss": 3.5819, "step": 55615 }, { "epoch": 3.7790460660415817, "grad_norm": 1.717603325843811, "learning_rate": 0.0005277975947818997, "loss": 3.3804, "step": 55620 }, { "epoch": 3.7793857861122433, "grad_norm": 1.5927060842514038, "learning_rate": 0.000527755129773067, "loss": 3.6074, "step": 55625 }, { "epoch": 3.7797255061829054, "grad_norm": 1.7569125890731812, "learning_rate": 0.0005277126647642343, "loss": 3.7683, "step": 55630 }, { "epoch": 3.780065226253567, "grad_norm": 1.7092812061309814, "learning_rate": 0.0005276701997554015, "loss": 3.5692, "step": 55635 }, { "epoch": 3.7804049463242286, "grad_norm": 1.7692021131515503, "learning_rate": 0.0005276277347465688, "loss": 3.7045, "step": 55640 }, { "epoch": 3.7807446663948907, "grad_norm": 1.520294427871704, "learning_rate": 0.0005275852697377362, "loss": 3.4324, "step": 55645 }, { "epoch": 3.7810843864655523, "grad_norm": 1.7630409002304077, "learning_rate": 0.0005275428047289034, "loss": 3.2787, "step": 55650 }, { "epoch": 3.781424106536214, "grad_norm": 1.6613885164260864, "learning_rate": 0.0005275003397200707, "loss": 3.4424, "step": 55655 }, { "epoch": 3.781763826606876, "grad_norm": 1.8930119276046753, "learning_rate": 0.000527457874711238, "loss": 3.4485, "step": 55660 }, { "epoch": 3.7821035466775377, "grad_norm": 2.6137962341308594, "learning_rate": 0.0005274154097024052, "loss": 3.2863, "step": 55665 }, { "epoch": 3.7824432667481993, "grad_norm": 1.5790824890136719, "learning_rate": 0.0005273729446935725, "loss": 3.5707, "step": 55670 }, { "epoch": 3.7827829868188614, "grad_norm": 2.031020402908325, "learning_rate": 0.0005273304796847397, "loss": 3.4084, "step": 55675 }, { "epoch": 3.783122706889523, "grad_norm": 1.6755127906799316, "learning_rate": 0.0005272880146759071, "loss": 3.3467, "step": 55680 }, { "epoch": 3.7834624269601846, "grad_norm": 2.1285560131073, "learning_rate": 0.0005272455496670744, "loss": 3.5197, "step": 55685 }, { "epoch": 3.7838021470308467, "grad_norm": 2.402208089828491, "learning_rate": 0.0005272030846582416, "loss": 3.3214, "step": 55690 }, { "epoch": 3.7841418671015083, "grad_norm": 2.073760986328125, "learning_rate": 0.0005271606196494089, "loss": 3.6803, "step": 55695 }, { "epoch": 3.78448158717217, "grad_norm": 1.6750720739364624, "learning_rate": 0.0005271181546405762, "loss": 3.2982, "step": 55700 }, { "epoch": 3.784821307242832, "grad_norm": 2.0943262577056885, "learning_rate": 0.0005270756896317434, "loss": 3.3685, "step": 55705 }, { "epoch": 3.7851610273134937, "grad_norm": 1.562645673751831, "learning_rate": 0.0005270332246229107, "loss": 3.3956, "step": 55710 }, { "epoch": 3.7855007473841553, "grad_norm": 2.023630142211914, "learning_rate": 0.0005269907596140781, "loss": 3.3377, "step": 55715 }, { "epoch": 3.7858404674548174, "grad_norm": 2.0104846954345703, "learning_rate": 0.0005269482946052453, "loss": 3.4535, "step": 55720 }, { "epoch": 3.786180187525479, "grad_norm": 1.8681259155273438, "learning_rate": 0.0005269058295964125, "loss": 3.3523, "step": 55725 }, { "epoch": 3.7865199075961407, "grad_norm": 2.213981866836548, "learning_rate": 0.0005268633645875799, "loss": 3.5845, "step": 55730 }, { "epoch": 3.7868596276668027, "grad_norm": 2.1018192768096924, "learning_rate": 0.0005268208995787471, "loss": 3.2641, "step": 55735 }, { "epoch": 3.7871993477374644, "grad_norm": 1.8812464475631714, "learning_rate": 0.0005267784345699143, "loss": 3.5201, "step": 55740 }, { "epoch": 3.787539067808126, "grad_norm": 1.6625432968139648, "learning_rate": 0.0005267359695610817, "loss": 3.4567, "step": 55745 }, { "epoch": 3.787878787878788, "grad_norm": 1.8049448728561401, "learning_rate": 0.000526693504552249, "loss": 3.4246, "step": 55750 }, { "epoch": 3.7882185079494497, "grad_norm": 1.6656819581985474, "learning_rate": 0.0005266510395434162, "loss": 3.5873, "step": 55755 }, { "epoch": 3.7885582280201113, "grad_norm": 1.6167460680007935, "learning_rate": 0.0005266085745345836, "loss": 3.4593, "step": 55760 }, { "epoch": 3.7888979480907734, "grad_norm": 1.5557466745376587, "learning_rate": 0.0005265661095257508, "loss": 3.4214, "step": 55765 }, { "epoch": 3.789237668161435, "grad_norm": 2.5322985649108887, "learning_rate": 0.000526523644516918, "loss": 3.5826, "step": 55770 }, { "epoch": 3.7895773882320967, "grad_norm": 1.9429457187652588, "learning_rate": 0.0005264811795080853, "loss": 3.5139, "step": 55775 }, { "epoch": 3.7899171083027587, "grad_norm": 1.9452896118164062, "learning_rate": 0.0005264387144992526, "loss": 3.3801, "step": 55780 }, { "epoch": 3.7902568283734204, "grad_norm": 1.7305188179016113, "learning_rate": 0.0005263962494904199, "loss": 3.6894, "step": 55785 }, { "epoch": 3.790596548444082, "grad_norm": 2.0356836318969727, "learning_rate": 0.0005263537844815872, "loss": 3.3821, "step": 55790 }, { "epoch": 3.790936268514744, "grad_norm": 1.9077064990997314, "learning_rate": 0.0005263113194727545, "loss": 3.4991, "step": 55795 }, { "epoch": 3.7912759885854057, "grad_norm": 2.0173778533935547, "learning_rate": 0.0005262688544639217, "loss": 3.2705, "step": 55800 }, { "epoch": 3.7916157086560673, "grad_norm": 3.092163562774658, "learning_rate": 0.000526226389455089, "loss": 3.3644, "step": 55805 }, { "epoch": 3.7919554287267294, "grad_norm": 1.6025919914245605, "learning_rate": 0.0005261839244462563, "loss": 3.4067, "step": 55810 }, { "epoch": 3.792295148797391, "grad_norm": 1.5897855758666992, "learning_rate": 0.0005261414594374235, "loss": 3.33, "step": 55815 }, { "epoch": 3.7926348688680527, "grad_norm": 1.6401877403259277, "learning_rate": 0.0005260989944285909, "loss": 3.6107, "step": 55820 }, { "epoch": 3.7929745889387148, "grad_norm": 2.2028963565826416, "learning_rate": 0.0005260565294197581, "loss": 3.3893, "step": 55825 }, { "epoch": 3.7933143090093764, "grad_norm": 1.6065057516098022, "learning_rate": 0.0005260140644109254, "loss": 3.6125, "step": 55830 }, { "epoch": 3.793654029080038, "grad_norm": 2.3435218334198, "learning_rate": 0.0005259715994020927, "loss": 3.36, "step": 55835 }, { "epoch": 3.7939937491507, "grad_norm": 2.3530399799346924, "learning_rate": 0.0005259291343932599, "loss": 3.4351, "step": 55840 }, { "epoch": 3.7943334692213617, "grad_norm": 1.9259883165359497, "learning_rate": 0.0005258866693844272, "loss": 3.4347, "step": 55845 }, { "epoch": 3.7946731892920234, "grad_norm": 2.0996170043945312, "learning_rate": 0.0005258442043755946, "loss": 3.2872, "step": 55850 }, { "epoch": 3.795012909362685, "grad_norm": 2.2108538150787354, "learning_rate": 0.0005258017393667618, "loss": 3.3347, "step": 55855 }, { "epoch": 3.795352629433347, "grad_norm": 1.6035150289535522, "learning_rate": 0.000525759274357929, "loss": 3.4678, "step": 55860 }, { "epoch": 3.7956923495040087, "grad_norm": 2.029435396194458, "learning_rate": 0.0005257168093490964, "loss": 3.5533, "step": 55865 }, { "epoch": 3.7960320695746703, "grad_norm": 1.9311431646347046, "learning_rate": 0.0005256743443402636, "loss": 3.3948, "step": 55870 }, { "epoch": 3.7963717896453324, "grad_norm": 1.9820033311843872, "learning_rate": 0.0005256318793314308, "loss": 3.3094, "step": 55875 }, { "epoch": 3.796711509715994, "grad_norm": 1.6739240884780884, "learning_rate": 0.0005255894143225982, "loss": 3.5407, "step": 55880 }, { "epoch": 3.7970512297866557, "grad_norm": 1.7327452898025513, "learning_rate": 0.0005255469493137655, "loss": 3.5171, "step": 55885 }, { "epoch": 3.7973909498573177, "grad_norm": 1.8988897800445557, "learning_rate": 0.0005255044843049327, "loss": 3.5789, "step": 55890 }, { "epoch": 3.7977306699279794, "grad_norm": 2.0399746894836426, "learning_rate": 0.0005254620192961001, "loss": 3.3742, "step": 55895 }, { "epoch": 3.798070389998641, "grad_norm": 1.838778018951416, "learning_rate": 0.0005254195542872673, "loss": 3.52, "step": 55900 }, { "epoch": 3.7984101100693026, "grad_norm": 2.1578874588012695, "learning_rate": 0.0005253770892784345, "loss": 3.583, "step": 55905 }, { "epoch": 3.7987498301399647, "grad_norm": 1.8120077848434448, "learning_rate": 0.0005253346242696019, "loss": 3.015, "step": 55910 }, { "epoch": 3.7990895502106263, "grad_norm": 1.435647964477539, "learning_rate": 0.0005252921592607691, "loss": 3.36, "step": 55915 }, { "epoch": 3.799429270281288, "grad_norm": 2.4029204845428467, "learning_rate": 0.0005252496942519364, "loss": 3.5862, "step": 55920 }, { "epoch": 3.79976899035195, "grad_norm": 1.7441885471343994, "learning_rate": 0.0005252072292431037, "loss": 3.5444, "step": 55925 }, { "epoch": 3.8001087104226117, "grad_norm": 1.9138578176498413, "learning_rate": 0.000525164764234271, "loss": 3.5295, "step": 55930 }, { "epoch": 3.8004484304932733, "grad_norm": 1.6173923015594482, "learning_rate": 0.0005251222992254383, "loss": 3.6347, "step": 55935 }, { "epoch": 3.8007881505639354, "grad_norm": 1.5516712665557861, "learning_rate": 0.0005250798342166055, "loss": 3.2983, "step": 55940 }, { "epoch": 3.801127870634597, "grad_norm": 1.9508719444274902, "learning_rate": 0.0005250373692077728, "loss": 3.5137, "step": 55945 }, { "epoch": 3.8014675907052586, "grad_norm": 2.133676767349243, "learning_rate": 0.0005249949041989401, "loss": 3.4944, "step": 55950 }, { "epoch": 3.8018073107759207, "grad_norm": 1.7556829452514648, "learning_rate": 0.0005249524391901074, "loss": 3.1924, "step": 55955 }, { "epoch": 3.8021470308465823, "grad_norm": 1.700169324874878, "learning_rate": 0.0005249099741812747, "loss": 3.5034, "step": 55960 }, { "epoch": 3.802486750917244, "grad_norm": 1.6956685781478882, "learning_rate": 0.000524867509172442, "loss": 3.2917, "step": 55965 }, { "epoch": 3.802826470987906, "grad_norm": 1.9309977293014526, "learning_rate": 0.0005248250441636092, "loss": 3.5499, "step": 55970 }, { "epoch": 3.8031661910585677, "grad_norm": 2.245073080062866, "learning_rate": 0.0005247825791547764, "loss": 3.5645, "step": 55975 }, { "epoch": 3.8035059111292293, "grad_norm": 1.826834797859192, "learning_rate": 0.0005247401141459438, "loss": 3.3656, "step": 55980 }, { "epoch": 3.8038456311998914, "grad_norm": 1.7826026678085327, "learning_rate": 0.000524697649137111, "loss": 3.4674, "step": 55985 }, { "epoch": 3.804185351270553, "grad_norm": 2.1349375247955322, "learning_rate": 0.0005246551841282783, "loss": 3.36, "step": 55990 }, { "epoch": 3.8045250713412146, "grad_norm": 1.9215924739837646, "learning_rate": 0.0005246127191194457, "loss": 3.4324, "step": 55995 }, { "epoch": 3.8048647914118767, "grad_norm": 2.1435868740081787, "learning_rate": 0.0005245702541106129, "loss": 3.3726, "step": 56000 }, { "epoch": 3.8052045114825384, "grad_norm": 2.0689401626586914, "learning_rate": 0.0005245277891017801, "loss": 3.3694, "step": 56005 }, { "epoch": 3.8055442315532, "grad_norm": 2.3081674575805664, "learning_rate": 0.0005244853240929475, "loss": 3.302, "step": 56010 }, { "epoch": 3.805883951623862, "grad_norm": 1.646492600440979, "learning_rate": 0.0005244428590841147, "loss": 3.4199, "step": 56015 }, { "epoch": 3.8062236716945237, "grad_norm": 1.5609017610549927, "learning_rate": 0.0005244003940752819, "loss": 3.4345, "step": 56020 }, { "epoch": 3.8065633917651853, "grad_norm": 2.1326582431793213, "learning_rate": 0.0005243579290664494, "loss": 3.4787, "step": 56025 }, { "epoch": 3.8069031118358474, "grad_norm": 1.5865806341171265, "learning_rate": 0.0005243154640576166, "loss": 3.5187, "step": 56030 }, { "epoch": 3.807242831906509, "grad_norm": 1.7897887229919434, "learning_rate": 0.0005242729990487838, "loss": 3.2478, "step": 56035 }, { "epoch": 3.8075825519771707, "grad_norm": 1.420507788658142, "learning_rate": 0.0005242305340399511, "loss": 3.4709, "step": 56040 }, { "epoch": 3.8079222720478327, "grad_norm": 1.798278570175171, "learning_rate": 0.0005241880690311184, "loss": 3.2663, "step": 56045 }, { "epoch": 3.8082619921184944, "grad_norm": 2.600043296813965, "learning_rate": 0.0005241456040222856, "loss": 3.5629, "step": 56050 }, { "epoch": 3.808601712189156, "grad_norm": 1.932758092880249, "learning_rate": 0.0005241031390134529, "loss": 3.5564, "step": 56055 }, { "epoch": 3.808941432259818, "grad_norm": 1.7225732803344727, "learning_rate": 0.0005240606740046203, "loss": 3.4077, "step": 56060 }, { "epoch": 3.8092811523304797, "grad_norm": 1.6614980697631836, "learning_rate": 0.0005240182089957875, "loss": 3.5898, "step": 56065 }, { "epoch": 3.8096208724011413, "grad_norm": 2.323591470718384, "learning_rate": 0.0005239757439869548, "loss": 3.4605, "step": 56070 }, { "epoch": 3.8099605924718034, "grad_norm": 2.2951924800872803, "learning_rate": 0.000523933278978122, "loss": 3.6698, "step": 56075 }, { "epoch": 3.810300312542465, "grad_norm": 2.121885299682617, "learning_rate": 0.0005238908139692893, "loss": 3.5196, "step": 56080 }, { "epoch": 3.8106400326131267, "grad_norm": 1.7401796579360962, "learning_rate": 0.0005238483489604566, "loss": 3.3938, "step": 56085 }, { "epoch": 3.8109797526837887, "grad_norm": 2.7168753147125244, "learning_rate": 0.0005238058839516238, "loss": 3.3145, "step": 56090 }, { "epoch": 3.8113194727544504, "grad_norm": 2.378253936767578, "learning_rate": 0.0005237634189427912, "loss": 3.6049, "step": 56095 }, { "epoch": 3.811659192825112, "grad_norm": 1.7565844058990479, "learning_rate": 0.0005237209539339585, "loss": 3.5107, "step": 56100 }, { "epoch": 3.811998912895774, "grad_norm": 1.8009421825408936, "learning_rate": 0.0005236784889251257, "loss": 3.5001, "step": 56105 }, { "epoch": 3.8123386329664357, "grad_norm": 1.8739805221557617, "learning_rate": 0.000523636023916293, "loss": 3.6464, "step": 56110 }, { "epoch": 3.8126783530370973, "grad_norm": 1.9965872764587402, "learning_rate": 0.0005235935589074603, "loss": 3.7131, "step": 56115 }, { "epoch": 3.8130180731077594, "grad_norm": 1.706512451171875, "learning_rate": 0.0005235510938986275, "loss": 3.3421, "step": 56120 }, { "epoch": 3.813357793178421, "grad_norm": 1.5874963998794556, "learning_rate": 0.0005235086288897947, "loss": 3.5084, "step": 56125 }, { "epoch": 3.8136975132490827, "grad_norm": 2.2582056522369385, "learning_rate": 0.0005234661638809622, "loss": 3.4315, "step": 56130 }, { "epoch": 3.8140372333197448, "grad_norm": 1.7992987632751465, "learning_rate": 0.0005234236988721294, "loss": 3.2945, "step": 56135 }, { "epoch": 3.8143769533904064, "grad_norm": 2.3208250999450684, "learning_rate": 0.0005233812338632966, "loss": 3.3316, "step": 56140 }, { "epoch": 3.814716673461068, "grad_norm": 2.3270037174224854, "learning_rate": 0.000523338768854464, "loss": 3.4187, "step": 56145 }, { "epoch": 3.81505639353173, "grad_norm": 1.7226805686950684, "learning_rate": 0.0005232963038456312, "loss": 3.6656, "step": 56150 }, { "epoch": 3.8153961136023917, "grad_norm": 2.4646873474121094, "learning_rate": 0.0005232538388367984, "loss": 3.4711, "step": 56155 }, { "epoch": 3.8157358336730534, "grad_norm": 2.919675827026367, "learning_rate": 0.0005232113738279658, "loss": 3.2646, "step": 56160 }, { "epoch": 3.8160755537437154, "grad_norm": 2.568830728530884, "learning_rate": 0.0005231689088191331, "loss": 3.5903, "step": 56165 }, { "epoch": 3.816415273814377, "grad_norm": 2.0602989196777344, "learning_rate": 0.0005231264438103003, "loss": 3.4026, "step": 56170 }, { "epoch": 3.8167549938850387, "grad_norm": 1.6581648588180542, "learning_rate": 0.0005230839788014676, "loss": 3.3745, "step": 56175 }, { "epoch": 3.8170947139557008, "grad_norm": 1.904107689857483, "learning_rate": 0.0005230415137926349, "loss": 3.5431, "step": 56180 }, { "epoch": 3.8174344340263624, "grad_norm": 1.6213016510009766, "learning_rate": 0.0005229990487838021, "loss": 3.5025, "step": 56185 }, { "epoch": 3.817774154097024, "grad_norm": 1.8769398927688599, "learning_rate": 0.0005229565837749694, "loss": 3.5245, "step": 56190 }, { "epoch": 3.8181138741676857, "grad_norm": 2.0565736293792725, "learning_rate": 0.0005229141187661367, "loss": 3.6692, "step": 56195 }, { "epoch": 3.8184535942383477, "grad_norm": 2.1841049194335938, "learning_rate": 0.000522871653757304, "loss": 3.652, "step": 56200 }, { "epoch": 3.8187933143090094, "grad_norm": 1.8778131008148193, "learning_rate": 0.0005228291887484713, "loss": 3.2074, "step": 56205 }, { "epoch": 3.819133034379671, "grad_norm": 1.7164629697799683, "learning_rate": 0.0005227867237396386, "loss": 3.4267, "step": 56210 }, { "epoch": 3.819472754450333, "grad_norm": 1.884993553161621, "learning_rate": 0.0005227442587308058, "loss": 3.4101, "step": 56215 }, { "epoch": 3.8198124745209947, "grad_norm": 2.013399839401245, "learning_rate": 0.0005227017937219731, "loss": 3.6667, "step": 56220 }, { "epoch": 3.8201521945916563, "grad_norm": 2.032045364379883, "learning_rate": 0.0005226593287131403, "loss": 3.3365, "step": 56225 }, { "epoch": 3.8204919146623184, "grad_norm": 1.7949182987213135, "learning_rate": 0.0005226168637043076, "loss": 3.2971, "step": 56230 }, { "epoch": 3.82083163473298, "grad_norm": 1.8714070320129395, "learning_rate": 0.000522574398695475, "loss": 3.5614, "step": 56235 }, { "epoch": 3.8211713548036417, "grad_norm": 1.517962098121643, "learning_rate": 0.0005225319336866422, "loss": 3.5056, "step": 56240 }, { "epoch": 3.8215110748743033, "grad_norm": 1.7734912633895874, "learning_rate": 0.0005224894686778095, "loss": 3.3849, "step": 56245 }, { "epoch": 3.8218507949449654, "grad_norm": 2.3587560653686523, "learning_rate": 0.0005224470036689768, "loss": 3.3254, "step": 56250 }, { "epoch": 3.822190515015627, "grad_norm": 1.9491736888885498, "learning_rate": 0.000522404538660144, "loss": 3.5992, "step": 56255 }, { "epoch": 3.8225302350862886, "grad_norm": 1.8891503810882568, "learning_rate": 0.0005223620736513112, "loss": 3.4013, "step": 56260 }, { "epoch": 3.8228699551569507, "grad_norm": 1.4972347021102905, "learning_rate": 0.0005223196086424786, "loss": 3.3121, "step": 56265 }, { "epoch": 3.8232096752276123, "grad_norm": 1.5590264797210693, "learning_rate": 0.0005222771436336459, "loss": 3.1469, "step": 56270 }, { "epoch": 3.823549395298274, "grad_norm": 1.6475564241409302, "learning_rate": 0.0005222346786248132, "loss": 3.4558, "step": 56275 }, { "epoch": 3.823889115368936, "grad_norm": 2.267247438430786, "learning_rate": 0.0005221922136159805, "loss": 3.5725, "step": 56280 }, { "epoch": 3.8242288354395977, "grad_norm": 2.1973986625671387, "learning_rate": 0.0005221497486071477, "loss": 3.64, "step": 56285 }, { "epoch": 3.8245685555102593, "grad_norm": 1.9077941179275513, "learning_rate": 0.000522107283598315, "loss": 3.5623, "step": 56290 }, { "epoch": 3.8249082755809214, "grad_norm": 1.6832075119018555, "learning_rate": 0.0005220648185894823, "loss": 3.4349, "step": 56295 }, { "epoch": 3.825247995651583, "grad_norm": 1.6154874563217163, "learning_rate": 0.0005220223535806495, "loss": 3.4548, "step": 56300 }, { "epoch": 3.8255877157222447, "grad_norm": 1.7961463928222656, "learning_rate": 0.0005219798885718169, "loss": 3.4311, "step": 56305 }, { "epoch": 3.8259274357929067, "grad_norm": 3.387350559234619, "learning_rate": 0.0005219374235629842, "loss": 3.614, "step": 56310 }, { "epoch": 3.8262671558635684, "grad_norm": 1.4625571966171265, "learning_rate": 0.0005218949585541514, "loss": 3.5123, "step": 56315 }, { "epoch": 3.82660687593423, "grad_norm": 1.7498159408569336, "learning_rate": 0.0005218524935453187, "loss": 3.1678, "step": 56320 }, { "epoch": 3.826946596004892, "grad_norm": 1.8614647388458252, "learning_rate": 0.0005218100285364859, "loss": 3.4392, "step": 56325 }, { "epoch": 3.8272863160755537, "grad_norm": 1.6291477680206299, "learning_rate": 0.0005217675635276532, "loss": 3.3991, "step": 56330 }, { "epoch": 3.8276260361462153, "grad_norm": 2.1615827083587646, "learning_rate": 0.0005217250985188205, "loss": 3.5151, "step": 56335 }, { "epoch": 3.8279657562168774, "grad_norm": 1.4465514421463013, "learning_rate": 0.0005216826335099878, "loss": 3.5101, "step": 56340 }, { "epoch": 3.828305476287539, "grad_norm": 1.7455044984817505, "learning_rate": 0.0005216401685011551, "loss": 3.5266, "step": 56345 }, { "epoch": 3.8286451963582007, "grad_norm": 1.7186927795410156, "learning_rate": 0.0005215977034923224, "loss": 3.6044, "step": 56350 }, { "epoch": 3.8289849164288627, "grad_norm": 2.2354276180267334, "learning_rate": 0.0005215552384834896, "loss": 3.2585, "step": 56355 }, { "epoch": 3.8293246364995244, "grad_norm": 1.8309717178344727, "learning_rate": 0.0005215127734746568, "loss": 3.4577, "step": 56360 }, { "epoch": 3.829664356570186, "grad_norm": 1.873216152191162, "learning_rate": 0.0005214703084658242, "loss": 3.5369, "step": 56365 }, { "epoch": 3.830004076640848, "grad_norm": 1.6108942031860352, "learning_rate": 0.0005214278434569914, "loss": 3.4287, "step": 56370 }, { "epoch": 3.8303437967115097, "grad_norm": 2.792599678039551, "learning_rate": 0.0005213853784481587, "loss": 3.3896, "step": 56375 }, { "epoch": 3.8306835167821713, "grad_norm": 2.1357202529907227, "learning_rate": 0.0005213429134393261, "loss": 3.2753, "step": 56380 }, { "epoch": 3.8310232368528334, "grad_norm": 1.8952093124389648, "learning_rate": 0.0005213004484304933, "loss": 3.2901, "step": 56385 }, { "epoch": 3.831362956923495, "grad_norm": 2.1644492149353027, "learning_rate": 0.0005212579834216605, "loss": 3.3572, "step": 56390 }, { "epoch": 3.8317026769941567, "grad_norm": 2.574598789215088, "learning_rate": 0.0005212155184128279, "loss": 3.5868, "step": 56395 }, { "epoch": 3.8320423970648188, "grad_norm": 1.51866614818573, "learning_rate": 0.0005211730534039951, "loss": 3.3999, "step": 56400 }, { "epoch": 3.8323821171354804, "grad_norm": 2.297222375869751, "learning_rate": 0.0005211305883951623, "loss": 3.6392, "step": 56405 }, { "epoch": 3.832721837206142, "grad_norm": 1.583422303199768, "learning_rate": 0.0005210881233863298, "loss": 3.5416, "step": 56410 }, { "epoch": 3.833061557276804, "grad_norm": 1.8863917589187622, "learning_rate": 0.000521045658377497, "loss": 3.2943, "step": 56415 }, { "epoch": 3.8334012773474657, "grad_norm": 1.9636296033859253, "learning_rate": 0.0005210031933686642, "loss": 3.2019, "step": 56420 }, { "epoch": 3.8337409974181273, "grad_norm": 2.1097607612609863, "learning_rate": 0.0005209607283598315, "loss": 3.258, "step": 56425 }, { "epoch": 3.8340807174887894, "grad_norm": 1.974997878074646, "learning_rate": 0.0005209182633509988, "loss": 3.3431, "step": 56430 }, { "epoch": 3.834420437559451, "grad_norm": 1.775015115737915, "learning_rate": 0.000520875798342166, "loss": 3.473, "step": 56435 }, { "epoch": 3.8347601576301127, "grad_norm": 1.9030455350875854, "learning_rate": 0.0005208333333333334, "loss": 3.3157, "step": 56440 }, { "epoch": 3.8350998777007748, "grad_norm": 1.9272626638412476, "learning_rate": 0.0005207908683245007, "loss": 3.3128, "step": 56445 }, { "epoch": 3.8354395977714364, "grad_norm": 1.9636307954788208, "learning_rate": 0.0005207484033156679, "loss": 3.4192, "step": 56450 }, { "epoch": 3.835779317842098, "grad_norm": 1.6346676349639893, "learning_rate": 0.0005207059383068352, "loss": 3.6051, "step": 56455 }, { "epoch": 3.83611903791276, "grad_norm": 1.5665277242660522, "learning_rate": 0.0005206634732980024, "loss": 3.5705, "step": 56460 }, { "epoch": 3.8364587579834217, "grad_norm": 1.8047435283660889, "learning_rate": 0.0005206210082891697, "loss": 3.3865, "step": 56465 }, { "epoch": 3.8367984780540834, "grad_norm": 1.711665153503418, "learning_rate": 0.000520578543280337, "loss": 3.371, "step": 56470 }, { "epoch": 3.8371381981247454, "grad_norm": 1.983363151550293, "learning_rate": 0.0005205360782715043, "loss": 3.5342, "step": 56475 }, { "epoch": 3.837477918195407, "grad_norm": 2.281032085418701, "learning_rate": 0.0005204936132626716, "loss": 3.422, "step": 56480 }, { "epoch": 3.8378176382660687, "grad_norm": 1.9767829179763794, "learning_rate": 0.0005204511482538389, "loss": 3.6395, "step": 56485 }, { "epoch": 3.8381573583367308, "grad_norm": 2.704519510269165, "learning_rate": 0.0005204086832450061, "loss": 3.391, "step": 56490 }, { "epoch": 3.8384970784073924, "grad_norm": 1.6564918756484985, "learning_rate": 0.0005203662182361734, "loss": 3.691, "step": 56495 }, { "epoch": 3.838836798478054, "grad_norm": 2.0542209148406982, "learning_rate": 0.0005203237532273407, "loss": 3.415, "step": 56500 }, { "epoch": 3.839176518548716, "grad_norm": 1.5699189901351929, "learning_rate": 0.0005202812882185079, "loss": 3.3828, "step": 56505 }, { "epoch": 3.8395162386193777, "grad_norm": 1.839170217514038, "learning_rate": 0.0005202388232096752, "loss": 3.5596, "step": 56510 }, { "epoch": 3.8398559586900394, "grad_norm": 1.8421016931533813, "learning_rate": 0.0005201963582008426, "loss": 3.5039, "step": 56515 }, { "epoch": 3.8401956787607014, "grad_norm": 2.272763967514038, "learning_rate": 0.0005201538931920098, "loss": 3.4977, "step": 56520 }, { "epoch": 3.840535398831363, "grad_norm": 1.6236696243286133, "learning_rate": 0.000520111428183177, "loss": 3.1814, "step": 56525 }, { "epoch": 3.8408751189020247, "grad_norm": 1.8019353151321411, "learning_rate": 0.0005200689631743444, "loss": 3.441, "step": 56530 }, { "epoch": 3.8412148389726863, "grad_norm": 2.849261522293091, "learning_rate": 0.0005200264981655116, "loss": 3.3432, "step": 56535 }, { "epoch": 3.8415545590433484, "grad_norm": 1.8153879642486572, "learning_rate": 0.0005199840331566788, "loss": 3.7936, "step": 56540 }, { "epoch": 3.84189427911401, "grad_norm": 2.3116796016693115, "learning_rate": 0.0005199415681478463, "loss": 3.6807, "step": 56545 }, { "epoch": 3.8422339991846717, "grad_norm": 1.5093411207199097, "learning_rate": 0.0005198991031390135, "loss": 3.4616, "step": 56550 }, { "epoch": 3.8425737192553338, "grad_norm": 2.210508346557617, "learning_rate": 0.0005198566381301807, "loss": 3.6339, "step": 56555 }, { "epoch": 3.8429134393259954, "grad_norm": 1.8574151992797852, "learning_rate": 0.000519814173121348, "loss": 3.5562, "step": 56560 }, { "epoch": 3.843253159396657, "grad_norm": 1.8639459609985352, "learning_rate": 0.0005197717081125153, "loss": 3.2869, "step": 56565 }, { "epoch": 3.843592879467319, "grad_norm": 1.9504622220993042, "learning_rate": 0.0005197292431036825, "loss": 3.4462, "step": 56570 }, { "epoch": 3.8439325995379807, "grad_norm": 1.9851340055465698, "learning_rate": 0.0005196867780948498, "loss": 3.2411, "step": 56575 }, { "epoch": 3.8442723196086424, "grad_norm": 1.4339059591293335, "learning_rate": 0.0005196443130860172, "loss": 3.5343, "step": 56580 }, { "epoch": 3.844612039679304, "grad_norm": 2.4247779846191406, "learning_rate": 0.0005196018480771844, "loss": 3.7475, "step": 56585 }, { "epoch": 3.844951759749966, "grad_norm": 1.6892426013946533, "learning_rate": 0.0005195593830683517, "loss": 3.3173, "step": 56590 }, { "epoch": 3.8452914798206277, "grad_norm": 1.6432455778121948, "learning_rate": 0.000519516918059519, "loss": 3.3894, "step": 56595 }, { "epoch": 3.8456311998912893, "grad_norm": 1.927565336227417, "learning_rate": 0.0005194744530506862, "loss": 3.56, "step": 56600 }, { "epoch": 3.8459709199619514, "grad_norm": 1.8826857805252075, "learning_rate": 0.0005194319880418535, "loss": 3.3192, "step": 56605 }, { "epoch": 3.846310640032613, "grad_norm": 1.2688192129135132, "learning_rate": 0.0005193895230330207, "loss": 3.2488, "step": 56610 }, { "epoch": 3.8466503601032747, "grad_norm": 2.965852975845337, "learning_rate": 0.0005193470580241882, "loss": 3.5087, "step": 56615 }, { "epoch": 3.8469900801739367, "grad_norm": 1.6783664226531982, "learning_rate": 0.0005193045930153554, "loss": 3.408, "step": 56620 }, { "epoch": 3.8473298002445984, "grad_norm": 1.594416856765747, "learning_rate": 0.0005192621280065226, "loss": 3.2021, "step": 56625 }, { "epoch": 3.84766952031526, "grad_norm": 2.016493320465088, "learning_rate": 0.00051921966299769, "loss": 3.479, "step": 56630 }, { "epoch": 3.848009240385922, "grad_norm": 1.9712432622909546, "learning_rate": 0.0005191771979888572, "loss": 3.6411, "step": 56635 }, { "epoch": 3.8483489604565837, "grad_norm": 1.6085023880004883, "learning_rate": 0.0005191347329800244, "loss": 3.5387, "step": 56640 }, { "epoch": 3.8486886805272453, "grad_norm": 1.5447635650634766, "learning_rate": 0.0005190922679711918, "loss": 3.5448, "step": 56645 }, { "epoch": 3.8490284005979074, "grad_norm": 1.538460373878479, "learning_rate": 0.0005190498029623591, "loss": 3.2715, "step": 56650 }, { "epoch": 3.849368120668569, "grad_norm": 1.9164814949035645, "learning_rate": 0.0005190073379535263, "loss": 3.4397, "step": 56655 }, { "epoch": 3.8497078407392307, "grad_norm": 1.758644700050354, "learning_rate": 0.0005189648729446936, "loss": 3.5849, "step": 56660 }, { "epoch": 3.8500475608098927, "grad_norm": 2.140761613845825, "learning_rate": 0.0005189224079358609, "loss": 3.3438, "step": 56665 }, { "epoch": 3.8503872808805544, "grad_norm": 1.6525764465332031, "learning_rate": 0.0005188799429270281, "loss": 3.2786, "step": 56670 }, { "epoch": 3.850727000951216, "grad_norm": 1.5179671049118042, "learning_rate": 0.0005188374779181954, "loss": 3.5134, "step": 56675 }, { "epoch": 3.851066721021878, "grad_norm": 2.57875657081604, "learning_rate": 0.0005187950129093627, "loss": 3.4221, "step": 56680 }, { "epoch": 3.8514064410925397, "grad_norm": 2.0156517028808594, "learning_rate": 0.00051875254790053, "loss": 3.2414, "step": 56685 }, { "epoch": 3.8517461611632013, "grad_norm": 1.9602997303009033, "learning_rate": 0.0005187100828916973, "loss": 3.2998, "step": 56690 }, { "epoch": 3.8520858812338634, "grad_norm": 2.113525629043579, "learning_rate": 0.0005186676178828646, "loss": 3.586, "step": 56695 }, { "epoch": 3.852425601304525, "grad_norm": 1.99051833152771, "learning_rate": 0.0005186251528740318, "loss": 3.2453, "step": 56700 }, { "epoch": 3.8527653213751867, "grad_norm": 1.749464750289917, "learning_rate": 0.0005185826878651991, "loss": 3.2161, "step": 56705 }, { "epoch": 3.8531050414458488, "grad_norm": 1.6223424673080444, "learning_rate": 0.0005185402228563663, "loss": 3.4392, "step": 56710 }, { "epoch": 3.8534447615165104, "grad_norm": 1.8647465705871582, "learning_rate": 0.0005184977578475336, "loss": 3.4767, "step": 56715 }, { "epoch": 3.853784481587172, "grad_norm": 1.4310219287872314, "learning_rate": 0.000518455292838701, "loss": 3.502, "step": 56720 }, { "epoch": 3.854124201657834, "grad_norm": 1.7508015632629395, "learning_rate": 0.0005184128278298682, "loss": 3.3908, "step": 56725 }, { "epoch": 3.8544639217284957, "grad_norm": 1.761857032775879, "learning_rate": 0.0005183703628210355, "loss": 3.4078, "step": 56730 }, { "epoch": 3.8548036417991574, "grad_norm": 1.7241103649139404, "learning_rate": 0.0005183278978122028, "loss": 3.4268, "step": 56735 }, { "epoch": 3.8551433618698194, "grad_norm": 1.8744779825210571, "learning_rate": 0.00051828543280337, "loss": 3.503, "step": 56740 }, { "epoch": 3.855483081940481, "grad_norm": 2.029709815979004, "learning_rate": 0.0005182429677945372, "loss": 3.2483, "step": 56745 }, { "epoch": 3.8558228020111427, "grad_norm": 1.5692871809005737, "learning_rate": 0.0005182005027857046, "loss": 3.4528, "step": 56750 }, { "epoch": 3.8561625220818048, "grad_norm": 1.6527471542358398, "learning_rate": 0.0005181580377768719, "loss": 3.4993, "step": 56755 }, { "epoch": 3.8565022421524664, "grad_norm": 1.6351873874664307, "learning_rate": 0.0005181155727680391, "loss": 3.4992, "step": 56760 }, { "epoch": 3.856841962223128, "grad_norm": 1.743904709815979, "learning_rate": 0.0005180731077592065, "loss": 3.3604, "step": 56765 }, { "epoch": 3.85718168229379, "grad_norm": 2.1273772716522217, "learning_rate": 0.0005180306427503737, "loss": 3.6733, "step": 56770 }, { "epoch": 3.8575214023644517, "grad_norm": 1.329409122467041, "learning_rate": 0.0005179881777415409, "loss": 3.486, "step": 56775 }, { "epoch": 3.8578611224351134, "grad_norm": 1.9864355325698853, "learning_rate": 0.0005179457127327083, "loss": 3.3163, "step": 56780 }, { "epoch": 3.8582008425057754, "grad_norm": 1.530501127243042, "learning_rate": 0.0005179032477238755, "loss": 3.3235, "step": 56785 }, { "epoch": 3.858540562576437, "grad_norm": 1.8782172203063965, "learning_rate": 0.0005178607827150428, "loss": 3.3292, "step": 56790 }, { "epoch": 3.8588802826470987, "grad_norm": 1.5279768705368042, "learning_rate": 0.0005178183177062102, "loss": 3.5768, "step": 56795 }, { "epoch": 3.8592200027177608, "grad_norm": 2.0877845287323, "learning_rate": 0.0005177758526973774, "loss": 3.2947, "step": 56800 }, { "epoch": 3.8595597227884224, "grad_norm": 2.42905330657959, "learning_rate": 0.0005177333876885446, "loss": 3.2956, "step": 56805 }, { "epoch": 3.859899442859084, "grad_norm": 1.6479971408843994, "learning_rate": 0.0005176909226797119, "loss": 3.4689, "step": 56810 }, { "epoch": 3.860239162929746, "grad_norm": 2.058882474899292, "learning_rate": 0.0005176484576708792, "loss": 3.4386, "step": 56815 }, { "epoch": 3.8605788830004077, "grad_norm": 1.7744802236557007, "learning_rate": 0.0005176059926620464, "loss": 3.6069, "step": 56820 }, { "epoch": 3.8609186030710694, "grad_norm": 1.7920500040054321, "learning_rate": 0.0005175635276532138, "loss": 3.4747, "step": 56825 }, { "epoch": 3.8612583231417315, "grad_norm": 1.5849707126617432, "learning_rate": 0.0005175210626443811, "loss": 3.2393, "step": 56830 }, { "epoch": 3.861598043212393, "grad_norm": 1.8212707042694092, "learning_rate": 0.0005174785976355483, "loss": 3.2144, "step": 56835 }, { "epoch": 3.8619377632830547, "grad_norm": 1.9311002492904663, "learning_rate": 0.0005174361326267156, "loss": 3.3675, "step": 56840 }, { "epoch": 3.862277483353717, "grad_norm": 1.8887048959732056, "learning_rate": 0.0005173936676178828, "loss": 3.3124, "step": 56845 }, { "epoch": 3.8626172034243784, "grad_norm": 2.000714063644409, "learning_rate": 0.0005173512026090501, "loss": 3.2561, "step": 56850 }, { "epoch": 3.86295692349504, "grad_norm": 1.963029146194458, "learning_rate": 0.0005173087376002174, "loss": 3.4852, "step": 56855 }, { "epoch": 3.863296643565702, "grad_norm": 1.5295010805130005, "learning_rate": 0.0005172662725913847, "loss": 3.2129, "step": 56860 }, { "epoch": 3.8636363636363638, "grad_norm": 2.5352933406829834, "learning_rate": 0.000517223807582552, "loss": 3.161, "step": 56865 }, { "epoch": 3.8639760837070254, "grad_norm": 1.7956359386444092, "learning_rate": 0.0005171813425737193, "loss": 3.4015, "step": 56870 }, { "epoch": 3.864315803777687, "grad_norm": 1.8703429698944092, "learning_rate": 0.0005171388775648865, "loss": 3.7039, "step": 56875 }, { "epoch": 3.864655523848349, "grad_norm": 1.9282222986221313, "learning_rate": 0.0005170964125560538, "loss": 3.5474, "step": 56880 }, { "epoch": 3.8649952439190107, "grad_norm": 2.1137328147888184, "learning_rate": 0.0005170539475472211, "loss": 3.3425, "step": 56885 }, { "epoch": 3.8653349639896724, "grad_norm": 1.7813395261764526, "learning_rate": 0.0005170114825383883, "loss": 3.4778, "step": 56890 }, { "epoch": 3.8656746840603344, "grad_norm": 1.8722331523895264, "learning_rate": 0.0005169690175295557, "loss": 3.2934, "step": 56895 }, { "epoch": 3.866014404130996, "grad_norm": 1.9904848337173462, "learning_rate": 0.000516926552520723, "loss": 3.4977, "step": 56900 }, { "epoch": 3.8663541242016577, "grad_norm": 1.5605456829071045, "learning_rate": 0.0005168840875118902, "loss": 3.4343, "step": 56905 }, { "epoch": 3.8666938442723198, "grad_norm": 2.237401008605957, "learning_rate": 0.0005168416225030574, "loss": 3.6544, "step": 56910 }, { "epoch": 3.8670335643429814, "grad_norm": 2.053285837173462, "learning_rate": 0.0005167991574942248, "loss": 3.4436, "step": 56915 }, { "epoch": 3.867373284413643, "grad_norm": 1.7044247388839722, "learning_rate": 0.000516756692485392, "loss": 3.4074, "step": 56920 }, { "epoch": 3.8677130044843047, "grad_norm": 1.8312864303588867, "learning_rate": 0.0005167142274765592, "loss": 3.2798, "step": 56925 }, { "epoch": 3.8680527245549667, "grad_norm": 2.0606801509857178, "learning_rate": 0.0005166717624677267, "loss": 3.4719, "step": 56930 }, { "epoch": 3.8683924446256284, "grad_norm": 1.8545942306518555, "learning_rate": 0.0005166292974588939, "loss": 3.213, "step": 56935 }, { "epoch": 3.86873216469629, "grad_norm": 2.3927206993103027, "learning_rate": 0.0005165868324500611, "loss": 3.5101, "step": 56940 }, { "epoch": 3.869071884766952, "grad_norm": 2.581562042236328, "learning_rate": 0.0005165443674412285, "loss": 3.7093, "step": 56945 }, { "epoch": 3.8694116048376137, "grad_norm": 1.7696739435195923, "learning_rate": 0.0005165019024323957, "loss": 3.3519, "step": 56950 }, { "epoch": 3.8697513249082753, "grad_norm": 1.8828167915344238, "learning_rate": 0.000516459437423563, "loss": 3.4809, "step": 56955 }, { "epoch": 3.8700910449789374, "grad_norm": 2.1703224182128906, "learning_rate": 0.0005164169724147302, "loss": 3.5859, "step": 56960 }, { "epoch": 3.870430765049599, "grad_norm": 1.7020065784454346, "learning_rate": 0.0005163745074058976, "loss": 3.5413, "step": 56965 }, { "epoch": 3.8707704851202607, "grad_norm": 1.731671929359436, "learning_rate": 0.0005163320423970649, "loss": 3.4394, "step": 56970 }, { "epoch": 3.8711102051909227, "grad_norm": 2.179205894470215, "learning_rate": 0.0005162895773882321, "loss": 3.3843, "step": 56975 }, { "epoch": 3.8714499252615844, "grad_norm": 1.5532723665237427, "learning_rate": 0.0005162471123793994, "loss": 3.509, "step": 56980 }, { "epoch": 3.871789645332246, "grad_norm": 1.934831142425537, "learning_rate": 0.0005162046473705667, "loss": 3.1584, "step": 56985 }, { "epoch": 3.872129365402908, "grad_norm": 3.062343120574951, "learning_rate": 0.0005161621823617339, "loss": 3.5056, "step": 56990 }, { "epoch": 3.8724690854735697, "grad_norm": 1.7867380380630493, "learning_rate": 0.0005161197173529011, "loss": 3.4353, "step": 56995 }, { "epoch": 3.8728088055442313, "grad_norm": 1.4394221305847168, "learning_rate": 0.0005160772523440686, "loss": 3.4208, "step": 57000 }, { "epoch": 3.8731485256148934, "grad_norm": 1.5559203624725342, "learning_rate": 0.0005160347873352358, "loss": 3.455, "step": 57005 }, { "epoch": 3.873488245685555, "grad_norm": 1.937985897064209, "learning_rate": 0.000515992322326403, "loss": 3.4371, "step": 57010 }, { "epoch": 3.8738279657562167, "grad_norm": 1.9290037155151367, "learning_rate": 0.0005159498573175704, "loss": 3.3296, "step": 57015 }, { "epoch": 3.8741676858268788, "grad_norm": 1.8603309392929077, "learning_rate": 0.0005159073923087376, "loss": 3.5468, "step": 57020 }, { "epoch": 3.8745074058975404, "grad_norm": 1.934470295906067, "learning_rate": 0.0005158649272999048, "loss": 3.4204, "step": 57025 }, { "epoch": 3.874847125968202, "grad_norm": 1.8621978759765625, "learning_rate": 0.0005158224622910723, "loss": 3.3315, "step": 57030 }, { "epoch": 3.875186846038864, "grad_norm": 1.8340799808502197, "learning_rate": 0.0005157799972822395, "loss": 3.5245, "step": 57035 }, { "epoch": 3.8755265661095257, "grad_norm": 1.6276984214782715, "learning_rate": 0.0005157375322734067, "loss": 3.3609, "step": 57040 }, { "epoch": 3.8758662861801874, "grad_norm": 1.8919336795806885, "learning_rate": 0.000515695067264574, "loss": 3.2678, "step": 57045 }, { "epoch": 3.8762060062508494, "grad_norm": 2.2390122413635254, "learning_rate": 0.0005156526022557413, "loss": 3.3798, "step": 57050 }, { "epoch": 3.876545726321511, "grad_norm": 2.0421628952026367, "learning_rate": 0.0005156101372469085, "loss": 3.3049, "step": 57055 }, { "epoch": 3.8768854463921727, "grad_norm": 1.7271175384521484, "learning_rate": 0.0005155676722380758, "loss": 3.3776, "step": 57060 }, { "epoch": 3.8772251664628348, "grad_norm": 2.178217887878418, "learning_rate": 0.0005155252072292432, "loss": 3.2488, "step": 57065 }, { "epoch": 3.8775648865334964, "grad_norm": 2.1899025440216064, "learning_rate": 0.0005154827422204104, "loss": 3.2784, "step": 57070 }, { "epoch": 3.877904606604158, "grad_norm": 1.6713200807571411, "learning_rate": 0.0005154402772115777, "loss": 3.5762, "step": 57075 }, { "epoch": 3.87824432667482, "grad_norm": 1.8602046966552734, "learning_rate": 0.000515397812202745, "loss": 3.2848, "step": 57080 }, { "epoch": 3.8785840467454817, "grad_norm": 1.7790327072143555, "learning_rate": 0.0005153553471939122, "loss": 3.706, "step": 57085 }, { "epoch": 3.8789237668161434, "grad_norm": 2.268230438232422, "learning_rate": 0.0005153128821850795, "loss": 3.5301, "step": 57090 }, { "epoch": 3.8792634868868054, "grad_norm": 1.9787895679473877, "learning_rate": 0.0005152704171762467, "loss": 3.4613, "step": 57095 }, { "epoch": 3.879603206957467, "grad_norm": 1.796850323677063, "learning_rate": 0.0005152279521674141, "loss": 3.3896, "step": 57100 }, { "epoch": 3.8799429270281287, "grad_norm": 2.049726963043213, "learning_rate": 0.0005151854871585814, "loss": 3.5191, "step": 57105 }, { "epoch": 3.880282647098791, "grad_norm": 1.7300852537155151, "learning_rate": 0.0005151430221497486, "loss": 3.4034, "step": 57110 }, { "epoch": 3.8806223671694524, "grad_norm": 1.4972370862960815, "learning_rate": 0.0005151005571409159, "loss": 3.5188, "step": 57115 }, { "epoch": 3.880962087240114, "grad_norm": 2.1972970962524414, "learning_rate": 0.0005150580921320832, "loss": 3.3048, "step": 57120 }, { "epoch": 3.881301807310776, "grad_norm": 1.7082237005233765, "learning_rate": 0.0005150156271232504, "loss": 3.3918, "step": 57125 }, { "epoch": 3.8816415273814378, "grad_norm": 1.7614011764526367, "learning_rate": 0.0005149731621144177, "loss": 3.5823, "step": 57130 }, { "epoch": 3.8819812474520994, "grad_norm": 1.851216197013855, "learning_rate": 0.0005149306971055851, "loss": 3.3471, "step": 57135 }, { "epoch": 3.8823209675227615, "grad_norm": 1.8169565200805664, "learning_rate": 0.0005148882320967523, "loss": 3.3761, "step": 57140 }, { "epoch": 3.882660687593423, "grad_norm": 1.727287769317627, "learning_rate": 0.0005148457670879195, "loss": 3.3406, "step": 57145 }, { "epoch": 3.8830004076640847, "grad_norm": 1.8948643207550049, "learning_rate": 0.0005148033020790869, "loss": 3.3098, "step": 57150 }, { "epoch": 3.883340127734747, "grad_norm": 1.6867369413375854, "learning_rate": 0.0005147608370702541, "loss": 3.2372, "step": 57155 }, { "epoch": 3.8836798478054084, "grad_norm": 1.632028341293335, "learning_rate": 0.0005147183720614213, "loss": 3.7895, "step": 57160 }, { "epoch": 3.88401956787607, "grad_norm": 1.6719268560409546, "learning_rate": 0.0005146759070525887, "loss": 3.5479, "step": 57165 }, { "epoch": 3.884359287946732, "grad_norm": 1.591551661491394, "learning_rate": 0.000514633442043756, "loss": 3.4784, "step": 57170 }, { "epoch": 3.8846990080173938, "grad_norm": 1.890044093132019, "learning_rate": 0.0005145909770349232, "loss": 3.2253, "step": 57175 }, { "epoch": 3.8850387280880554, "grad_norm": 1.6856324672698975, "learning_rate": 0.0005145485120260906, "loss": 3.2592, "step": 57180 }, { "epoch": 3.8853784481587175, "grad_norm": 1.663657307624817, "learning_rate": 0.0005145060470172578, "loss": 3.4465, "step": 57185 }, { "epoch": 3.885718168229379, "grad_norm": 1.6580034494400024, "learning_rate": 0.000514463582008425, "loss": 3.4213, "step": 57190 }, { "epoch": 3.8860578883000407, "grad_norm": 1.3169679641723633, "learning_rate": 0.0005144211169995923, "loss": 3.4661, "step": 57195 }, { "epoch": 3.886397608370703, "grad_norm": 1.5900590419769287, "learning_rate": 0.0005143786519907596, "loss": 3.1132, "step": 57200 }, { "epoch": 3.8867373284413644, "grad_norm": 1.6860158443450928, "learning_rate": 0.0005143361869819269, "loss": 3.4967, "step": 57205 }, { "epoch": 3.887077048512026, "grad_norm": 1.615679383277893, "learning_rate": 0.0005142937219730942, "loss": 3.5511, "step": 57210 }, { "epoch": 3.887416768582688, "grad_norm": 2.9982047080993652, "learning_rate": 0.0005142512569642615, "loss": 3.6675, "step": 57215 }, { "epoch": 3.8877564886533498, "grad_norm": 1.735037922859192, "learning_rate": 0.0005142087919554287, "loss": 3.4766, "step": 57220 }, { "epoch": 3.8880962087240114, "grad_norm": 1.75558340549469, "learning_rate": 0.000514166326946596, "loss": 3.3334, "step": 57225 }, { "epoch": 3.888435928794673, "grad_norm": 2.3491098880767822, "learning_rate": 0.0005141238619377633, "loss": 3.3112, "step": 57230 }, { "epoch": 3.888775648865335, "grad_norm": 1.9729537963867188, "learning_rate": 0.0005140813969289305, "loss": 3.4145, "step": 57235 }, { "epoch": 3.8891153689359967, "grad_norm": 1.5204596519470215, "learning_rate": 0.0005140389319200979, "loss": 3.313, "step": 57240 }, { "epoch": 3.8894550890066584, "grad_norm": 2.114959716796875, "learning_rate": 0.0005139964669112651, "loss": 3.4171, "step": 57245 }, { "epoch": 3.8897948090773204, "grad_norm": 1.5437709093093872, "learning_rate": 0.0005139540019024324, "loss": 3.2883, "step": 57250 }, { "epoch": 3.890134529147982, "grad_norm": 1.6688003540039062, "learning_rate": 0.0005139115368935997, "loss": 3.529, "step": 57255 }, { "epoch": 3.8904742492186437, "grad_norm": 2.0703647136688232, "learning_rate": 0.0005138690718847669, "loss": 3.4903, "step": 57260 }, { "epoch": 3.8908139692893053, "grad_norm": 1.985534906387329, "learning_rate": 0.0005138266068759342, "loss": 3.5896, "step": 57265 }, { "epoch": 3.8911536893599674, "grad_norm": 2.187878370285034, "learning_rate": 0.0005137841418671015, "loss": 3.3209, "step": 57270 }, { "epoch": 3.891493409430629, "grad_norm": 2.305326461791992, "learning_rate": 0.0005137416768582688, "loss": 3.4181, "step": 57275 }, { "epoch": 3.8918331295012907, "grad_norm": 2.038255214691162, "learning_rate": 0.000513699211849436, "loss": 3.2795, "step": 57280 }, { "epoch": 3.8921728495719528, "grad_norm": 1.7676920890808105, "learning_rate": 0.0005136567468406034, "loss": 3.4139, "step": 57285 }, { "epoch": 3.8925125696426144, "grad_norm": 1.8548632860183716, "learning_rate": 0.0005136142818317706, "loss": 3.5003, "step": 57290 }, { "epoch": 3.892852289713276, "grad_norm": 1.6744433641433716, "learning_rate": 0.000513571816822938, "loss": 3.5385, "step": 57295 }, { "epoch": 3.893192009783938, "grad_norm": 2.331648111343384, "learning_rate": 0.0005135293518141052, "loss": 3.3867, "step": 57300 }, { "epoch": 3.8935317298545997, "grad_norm": 1.8029060363769531, "learning_rate": 0.0005134868868052724, "loss": 3.6093, "step": 57305 }, { "epoch": 3.8938714499252614, "grad_norm": 1.7405658960342407, "learning_rate": 0.0005134444217964398, "loss": 3.3681, "step": 57310 }, { "epoch": 3.8942111699959234, "grad_norm": 2.1547529697418213, "learning_rate": 0.0005134019567876071, "loss": 3.4259, "step": 57315 }, { "epoch": 3.894550890066585, "grad_norm": 2.400317907333374, "learning_rate": 0.0005133594917787743, "loss": 3.5229, "step": 57320 }, { "epoch": 3.8948906101372467, "grad_norm": 1.8595784902572632, "learning_rate": 0.0005133170267699416, "loss": 3.5384, "step": 57325 }, { "epoch": 3.8952303302079088, "grad_norm": 1.8828003406524658, "learning_rate": 0.0005132745617611089, "loss": 3.3868, "step": 57330 }, { "epoch": 3.8955700502785704, "grad_norm": 2.080796718597412, "learning_rate": 0.0005132320967522761, "loss": 3.425, "step": 57335 }, { "epoch": 3.895909770349232, "grad_norm": 1.5875126123428345, "learning_rate": 0.0005131896317434434, "loss": 3.23, "step": 57340 }, { "epoch": 3.896249490419894, "grad_norm": 2.2161006927490234, "learning_rate": 0.0005131471667346107, "loss": 3.6118, "step": 57345 }, { "epoch": 3.8965892104905557, "grad_norm": 1.3159123659133911, "learning_rate": 0.000513104701725778, "loss": 3.6672, "step": 57350 }, { "epoch": 3.8969289305612174, "grad_norm": 1.6459141969680786, "learning_rate": 0.0005130622367169453, "loss": 3.6017, "step": 57355 }, { "epoch": 3.8972686506318794, "grad_norm": 1.8749366998672485, "learning_rate": 0.0005130197717081125, "loss": 3.4977, "step": 57360 }, { "epoch": 3.897608370702541, "grad_norm": 1.5797371864318848, "learning_rate": 0.0005129773066992798, "loss": 3.5714, "step": 57365 }, { "epoch": 3.8979480907732027, "grad_norm": 1.5921072959899902, "learning_rate": 0.0005129348416904471, "loss": 3.5314, "step": 57370 }, { "epoch": 3.8982878108438648, "grad_norm": 2.6125447750091553, "learning_rate": 0.0005128923766816143, "loss": 3.6095, "step": 57375 }, { "epoch": 3.8986275309145264, "grad_norm": 2.5152101516723633, "learning_rate": 0.0005128499116727817, "loss": 3.6701, "step": 57380 }, { "epoch": 3.898967250985188, "grad_norm": 1.752758264541626, "learning_rate": 0.000512807446663949, "loss": 3.3644, "step": 57385 }, { "epoch": 3.89930697105585, "grad_norm": 1.8924732208251953, "learning_rate": 0.0005127649816551162, "loss": 3.5234, "step": 57390 }, { "epoch": 3.8996466911265117, "grad_norm": 2.302945852279663, "learning_rate": 0.0005127225166462834, "loss": 3.3458, "step": 57395 }, { "epoch": 3.8999864111971734, "grad_norm": 2.517491340637207, "learning_rate": 0.0005126800516374508, "loss": 3.3318, "step": 57400 }, { "epoch": 3.9003261312678354, "grad_norm": 1.4028021097183228, "learning_rate": 0.000512637586628618, "loss": 3.1892, "step": 57405 }, { "epoch": 3.900665851338497, "grad_norm": 1.5610827207565308, "learning_rate": 0.0005125951216197852, "loss": 3.3301, "step": 57410 }, { "epoch": 3.9010055714091587, "grad_norm": 1.8527189493179321, "learning_rate": 0.0005125526566109527, "loss": 3.1281, "step": 57415 }, { "epoch": 3.901345291479821, "grad_norm": 2.2038862705230713, "learning_rate": 0.0005125101916021199, "loss": 3.4387, "step": 57420 }, { "epoch": 3.9016850115504824, "grad_norm": 1.8362178802490234, "learning_rate": 0.0005124677265932871, "loss": 3.5228, "step": 57425 }, { "epoch": 3.902024731621144, "grad_norm": 1.7935258150100708, "learning_rate": 0.0005124252615844545, "loss": 3.3723, "step": 57430 }, { "epoch": 3.902364451691806, "grad_norm": 1.9717274904251099, "learning_rate": 0.0005123827965756217, "loss": 3.3369, "step": 57435 }, { "epoch": 3.9027041717624678, "grad_norm": 2.635434150695801, "learning_rate": 0.0005123403315667889, "loss": 3.4138, "step": 57440 }, { "epoch": 3.9030438918331294, "grad_norm": 2.2583396434783936, "learning_rate": 0.0005122978665579562, "loss": 3.1631, "step": 57445 }, { "epoch": 3.9033836119037915, "grad_norm": 1.6088206768035889, "learning_rate": 0.0005122554015491236, "loss": 3.2972, "step": 57450 }, { "epoch": 3.903723331974453, "grad_norm": 1.7687972784042358, "learning_rate": 0.0005122129365402908, "loss": 3.7401, "step": 57455 }, { "epoch": 3.9040630520451147, "grad_norm": 1.5413424968719482, "learning_rate": 0.0005121704715314581, "loss": 3.5071, "step": 57460 }, { "epoch": 3.904402772115777, "grad_norm": 1.4847745895385742, "learning_rate": 0.0005121280065226254, "loss": 3.406, "step": 57465 }, { "epoch": 3.9047424921864384, "grad_norm": 2.169926881790161, "learning_rate": 0.0005120855415137926, "loss": 3.4735, "step": 57470 }, { "epoch": 3.9050822122571, "grad_norm": 1.711479663848877, "learning_rate": 0.0005120430765049599, "loss": 3.3361, "step": 57475 }, { "epoch": 3.905421932327762, "grad_norm": 2.0004773139953613, "learning_rate": 0.0005120006114961271, "loss": 3.4715, "step": 57480 }, { "epoch": 3.9057616523984238, "grad_norm": 2.0894887447357178, "learning_rate": 0.0005119581464872945, "loss": 3.319, "step": 57485 }, { "epoch": 3.9061013724690854, "grad_norm": 1.738634705543518, "learning_rate": 0.0005119156814784618, "loss": 2.966, "step": 57490 }, { "epoch": 3.9064410925397475, "grad_norm": 2.0151519775390625, "learning_rate": 0.000511873216469629, "loss": 3.4845, "step": 57495 }, { "epoch": 3.906780812610409, "grad_norm": 1.854235053062439, "learning_rate": 0.0005118307514607963, "loss": 3.4287, "step": 57500 }, { "epoch": 3.9071205326810707, "grad_norm": 1.5538082122802734, "learning_rate": 0.0005117882864519636, "loss": 3.6098, "step": 57505 }, { "epoch": 3.907460252751733, "grad_norm": 1.65574049949646, "learning_rate": 0.0005117458214431308, "loss": 3.3279, "step": 57510 }, { "epoch": 3.9077999728223944, "grad_norm": 2.092219829559326, "learning_rate": 0.0005117033564342981, "loss": 3.5144, "step": 57515 }, { "epoch": 3.908139692893056, "grad_norm": 1.6568050384521484, "learning_rate": 0.0005116608914254655, "loss": 3.5315, "step": 57520 }, { "epoch": 3.908479412963718, "grad_norm": 2.6003901958465576, "learning_rate": 0.0005116184264166327, "loss": 3.1122, "step": 57525 }, { "epoch": 3.90881913303438, "grad_norm": 2.011430263519287, "learning_rate": 0.0005115759614078, "loss": 3.2682, "step": 57530 }, { "epoch": 3.9091588531050414, "grad_norm": 2.1224653720855713, "learning_rate": 0.0005115334963989673, "loss": 3.5019, "step": 57535 }, { "epoch": 3.9094985731757035, "grad_norm": 1.7641552686691284, "learning_rate": 0.0005114910313901345, "loss": 3.4312, "step": 57540 }, { "epoch": 3.909838293246365, "grad_norm": 1.4813203811645508, "learning_rate": 0.0005114485663813017, "loss": 3.3875, "step": 57545 }, { "epoch": 3.9101780133170267, "grad_norm": 2.265653133392334, "learning_rate": 0.0005114061013724691, "loss": 3.4352, "step": 57550 }, { "epoch": 3.910517733387689, "grad_norm": 1.6285457611083984, "learning_rate": 0.0005113636363636364, "loss": 3.3835, "step": 57555 }, { "epoch": 3.9108574534583505, "grad_norm": 1.5655994415283203, "learning_rate": 0.0005113211713548036, "loss": 3.5144, "step": 57560 }, { "epoch": 3.911197173529012, "grad_norm": 1.631917119026184, "learning_rate": 0.000511278706345971, "loss": 3.3988, "step": 57565 }, { "epoch": 3.9115368935996737, "grad_norm": 1.7586544752120972, "learning_rate": 0.0005112362413371382, "loss": 3.4522, "step": 57570 }, { "epoch": 3.911876613670336, "grad_norm": 1.8999146223068237, "learning_rate": 0.0005111937763283054, "loss": 3.416, "step": 57575 }, { "epoch": 3.9122163337409974, "grad_norm": 1.6062387228012085, "learning_rate": 0.0005111513113194728, "loss": 3.4358, "step": 57580 }, { "epoch": 3.912556053811659, "grad_norm": 1.876452088356018, "learning_rate": 0.00051110884631064, "loss": 3.3502, "step": 57585 }, { "epoch": 3.912895773882321, "grad_norm": 2.0426177978515625, "learning_rate": 0.0005110663813018073, "loss": 3.6268, "step": 57590 }, { "epoch": 3.9132354939529828, "grad_norm": 2.2611563205718994, "learning_rate": 0.0005110239162929746, "loss": 3.3456, "step": 57595 }, { "epoch": 3.9135752140236444, "grad_norm": 2.2030417919158936, "learning_rate": 0.0005109814512841419, "loss": 3.3718, "step": 57600 }, { "epoch": 3.913914934094306, "grad_norm": 2.0321953296661377, "learning_rate": 0.0005109389862753091, "loss": 3.5095, "step": 57605 }, { "epoch": 3.914254654164968, "grad_norm": 1.9420185089111328, "learning_rate": 0.0005108965212664764, "loss": 3.3681, "step": 57610 }, { "epoch": 3.9145943742356297, "grad_norm": 2.6362526416778564, "learning_rate": 0.0005108540562576437, "loss": 3.5358, "step": 57615 }, { "epoch": 3.9149340943062914, "grad_norm": 1.8472044467926025, "learning_rate": 0.0005108115912488109, "loss": 3.4432, "step": 57620 }, { "epoch": 3.9152738143769534, "grad_norm": 1.5523854494094849, "learning_rate": 0.0005107691262399783, "loss": 3.5383, "step": 57625 }, { "epoch": 3.915613534447615, "grad_norm": 2.506770610809326, "learning_rate": 0.0005107266612311456, "loss": 3.4056, "step": 57630 }, { "epoch": 3.9159532545182767, "grad_norm": 1.8298780918121338, "learning_rate": 0.0005106841962223129, "loss": 3.558, "step": 57635 }, { "epoch": 3.9162929745889388, "grad_norm": 1.9293732643127441, "learning_rate": 0.0005106417312134801, "loss": 3.5461, "step": 57640 }, { "epoch": 3.9166326946596004, "grad_norm": 1.6462363004684448, "learning_rate": 0.0005105992662046473, "loss": 3.5719, "step": 57645 }, { "epoch": 3.916972414730262, "grad_norm": 1.5366971492767334, "learning_rate": 0.0005105568011958147, "loss": 3.308, "step": 57650 }, { "epoch": 3.917312134800924, "grad_norm": 1.8420181274414062, "learning_rate": 0.000510514336186982, "loss": 3.209, "step": 57655 }, { "epoch": 3.9176518548715857, "grad_norm": 2.1272850036621094, "learning_rate": 0.0005104718711781492, "loss": 3.2802, "step": 57660 }, { "epoch": 3.9179915749422474, "grad_norm": 1.2226091623306274, "learning_rate": 0.0005104294061693166, "loss": 3.5357, "step": 57665 }, { "epoch": 3.9183312950129094, "grad_norm": 2.0867319107055664, "learning_rate": 0.0005103869411604838, "loss": 3.4708, "step": 57670 }, { "epoch": 3.918671015083571, "grad_norm": 1.475280523300171, "learning_rate": 0.000510344476151651, "loss": 3.5644, "step": 57675 }, { "epoch": 3.9190107351542327, "grad_norm": 1.531124234199524, "learning_rate": 0.0005103020111428184, "loss": 3.4031, "step": 57680 }, { "epoch": 3.919350455224895, "grad_norm": 1.5928181409835815, "learning_rate": 0.0005102595461339856, "loss": 3.3889, "step": 57685 }, { "epoch": 3.9196901752955564, "grad_norm": 1.4934546947479248, "learning_rate": 0.0005102170811251529, "loss": 3.4543, "step": 57690 }, { "epoch": 3.920029895366218, "grad_norm": 1.471187710762024, "learning_rate": 0.0005101746161163202, "loss": 3.6648, "step": 57695 }, { "epoch": 3.92036961543688, "grad_norm": 1.6250571012496948, "learning_rate": 0.0005101321511074875, "loss": 3.5586, "step": 57700 }, { "epoch": 3.9207093355075417, "grad_norm": 1.536060094833374, "learning_rate": 0.0005100896860986547, "loss": 3.6704, "step": 57705 }, { "epoch": 3.9210490555782034, "grad_norm": 2.173341751098633, "learning_rate": 0.000510047221089822, "loss": 3.4092, "step": 57710 }, { "epoch": 3.9213887756488655, "grad_norm": 1.8797255754470825, "learning_rate": 0.0005100047560809893, "loss": 3.2193, "step": 57715 }, { "epoch": 3.921728495719527, "grad_norm": 2.323857069015503, "learning_rate": 0.0005099622910721565, "loss": 3.169, "step": 57720 }, { "epoch": 3.9220682157901887, "grad_norm": 2.1193926334381104, "learning_rate": 0.0005099198260633239, "loss": 3.1028, "step": 57725 }, { "epoch": 3.922407935860851, "grad_norm": 1.76458740234375, "learning_rate": 0.0005098773610544912, "loss": 3.4237, "step": 57730 }, { "epoch": 3.9227476559315124, "grad_norm": 1.6770023107528687, "learning_rate": 0.0005098348960456584, "loss": 3.4805, "step": 57735 }, { "epoch": 3.923087376002174, "grad_norm": 2.0689072608947754, "learning_rate": 0.0005097924310368257, "loss": 3.3583, "step": 57740 }, { "epoch": 3.923427096072836, "grad_norm": 1.6327658891677856, "learning_rate": 0.0005097499660279929, "loss": 3.3259, "step": 57745 }, { "epoch": 3.9237668161434978, "grad_norm": 2.081298351287842, "learning_rate": 0.0005097075010191602, "loss": 3.6077, "step": 57750 }, { "epoch": 3.9241065362141594, "grad_norm": 2.481914758682251, "learning_rate": 0.0005096650360103275, "loss": 3.3624, "step": 57755 }, { "epoch": 3.9244462562848215, "grad_norm": 1.4678184986114502, "learning_rate": 0.0005096225710014948, "loss": 3.3843, "step": 57760 }, { "epoch": 3.924785976355483, "grad_norm": 1.4376804828643799, "learning_rate": 0.0005095801059926621, "loss": 3.4111, "step": 57765 }, { "epoch": 3.9251256964261447, "grad_norm": 2.187279462814331, "learning_rate": 0.0005095376409838294, "loss": 3.2782, "step": 57770 }, { "epoch": 3.925465416496807, "grad_norm": 1.5958460569381714, "learning_rate": 0.0005094951759749966, "loss": 3.3902, "step": 57775 }, { "epoch": 3.9258051365674684, "grad_norm": 1.6435755491256714, "learning_rate": 0.0005094527109661638, "loss": 3.2443, "step": 57780 }, { "epoch": 3.92614485663813, "grad_norm": 2.529425621032715, "learning_rate": 0.0005094102459573312, "loss": 3.4772, "step": 57785 }, { "epoch": 3.926484576708792, "grad_norm": 1.6545387506484985, "learning_rate": 0.0005093677809484984, "loss": 3.5861, "step": 57790 }, { "epoch": 3.9268242967794538, "grad_norm": 1.6536802053451538, "learning_rate": 0.0005093253159396657, "loss": 3.099, "step": 57795 }, { "epoch": 3.9271640168501154, "grad_norm": 1.8424315452575684, "learning_rate": 0.0005092828509308331, "loss": 3.476, "step": 57800 }, { "epoch": 3.9275037369207775, "grad_norm": 1.7331821918487549, "learning_rate": 0.0005092403859220003, "loss": 3.6269, "step": 57805 }, { "epoch": 3.927843456991439, "grad_norm": 2.1645472049713135, "learning_rate": 0.0005091979209131675, "loss": 3.49, "step": 57810 }, { "epoch": 3.9281831770621007, "grad_norm": 2.2487897872924805, "learning_rate": 0.0005091554559043349, "loss": 3.3883, "step": 57815 }, { "epoch": 3.928522897132763, "grad_norm": 1.9636189937591553, "learning_rate": 0.0005091129908955021, "loss": 3.1459, "step": 57820 }, { "epoch": 3.9288626172034244, "grad_norm": 2.3286774158477783, "learning_rate": 0.0005090705258866693, "loss": 3.5344, "step": 57825 }, { "epoch": 3.929202337274086, "grad_norm": 1.632469892501831, "learning_rate": 0.0005090280608778368, "loss": 3.4869, "step": 57830 }, { "epoch": 3.929542057344748, "grad_norm": 2.221296787261963, "learning_rate": 0.000508985595869004, "loss": 3.1875, "step": 57835 }, { "epoch": 3.92988177741541, "grad_norm": 1.814892053604126, "learning_rate": 0.0005089431308601712, "loss": 3.2841, "step": 57840 }, { "epoch": 3.9302214974860714, "grad_norm": 2.3048741817474365, "learning_rate": 0.0005089006658513385, "loss": 3.5318, "step": 57845 }, { "epoch": 3.9305612175567335, "grad_norm": 1.8817839622497559, "learning_rate": 0.0005088582008425058, "loss": 3.3494, "step": 57850 }, { "epoch": 3.930900937627395, "grad_norm": 1.4273558855056763, "learning_rate": 0.000508815735833673, "loss": 3.5336, "step": 57855 }, { "epoch": 3.9312406576980568, "grad_norm": 2.0082027912139893, "learning_rate": 0.0005087732708248403, "loss": 3.5716, "step": 57860 }, { "epoch": 3.931580377768719, "grad_norm": 1.9055250883102417, "learning_rate": 0.0005087308058160077, "loss": 3.3404, "step": 57865 }, { "epoch": 3.9319200978393805, "grad_norm": 1.7219785451889038, "learning_rate": 0.0005086883408071749, "loss": 3.2736, "step": 57870 }, { "epoch": 3.932259817910042, "grad_norm": 1.8815370798110962, "learning_rate": 0.0005086458757983422, "loss": 3.5931, "step": 57875 }, { "epoch": 3.932599537980704, "grad_norm": 1.6211597919464111, "learning_rate": 0.0005086034107895094, "loss": 3.4277, "step": 57880 }, { "epoch": 3.932939258051366, "grad_norm": 2.2997794151306152, "learning_rate": 0.0005085609457806767, "loss": 3.5313, "step": 57885 }, { "epoch": 3.9332789781220274, "grad_norm": 1.3698185682296753, "learning_rate": 0.000508518480771844, "loss": 3.2864, "step": 57890 }, { "epoch": 3.9336186981926895, "grad_norm": 1.6002880334854126, "learning_rate": 0.0005084760157630112, "loss": 3.3537, "step": 57895 }, { "epoch": 3.933958418263351, "grad_norm": 1.44249427318573, "learning_rate": 0.0005084335507541786, "loss": 3.3194, "step": 57900 }, { "epoch": 3.9342981383340128, "grad_norm": 2.1902964115142822, "learning_rate": 0.0005083910857453459, "loss": 3.5304, "step": 57905 }, { "epoch": 3.9346378584046744, "grad_norm": 1.6125507354736328, "learning_rate": 0.0005083486207365131, "loss": 3.6567, "step": 57910 }, { "epoch": 3.9349775784753365, "grad_norm": 1.6724737882614136, "learning_rate": 0.0005083061557276804, "loss": 3.4568, "step": 57915 }, { "epoch": 3.935317298545998, "grad_norm": 2.545165777206421, "learning_rate": 0.0005082636907188477, "loss": 3.2813, "step": 57920 }, { "epoch": 3.9356570186166597, "grad_norm": 2.45694637298584, "learning_rate": 0.0005082212257100149, "loss": 3.5443, "step": 57925 }, { "epoch": 3.935996738687322, "grad_norm": 2.2800347805023193, "learning_rate": 0.0005081787607011821, "loss": 3.2124, "step": 57930 }, { "epoch": 3.9363364587579834, "grad_norm": 1.9504477977752686, "learning_rate": 0.0005081362956923496, "loss": 3.5444, "step": 57935 }, { "epoch": 3.936676178828645, "grad_norm": 2.228757619857788, "learning_rate": 0.0005080938306835168, "loss": 3.361, "step": 57940 }, { "epoch": 3.9370158988993067, "grad_norm": 1.544216513633728, "learning_rate": 0.000508051365674684, "loss": 3.1271, "step": 57945 }, { "epoch": 3.9373556189699688, "grad_norm": 1.9789602756500244, "learning_rate": 0.0005080089006658514, "loss": 3.4352, "step": 57950 }, { "epoch": 3.9376953390406304, "grad_norm": 1.5811784267425537, "learning_rate": 0.0005079664356570186, "loss": 3.4615, "step": 57955 }, { "epoch": 3.938035059111292, "grad_norm": 1.870060682296753, "learning_rate": 0.0005079239706481858, "loss": 3.1634, "step": 57960 }, { "epoch": 3.938374779181954, "grad_norm": 1.83116614818573, "learning_rate": 0.0005078815056393532, "loss": 3.4923, "step": 57965 }, { "epoch": 3.9387144992526157, "grad_norm": 2.2768473625183105, "learning_rate": 0.0005078390406305205, "loss": 3.7467, "step": 57970 }, { "epoch": 3.9390542193232774, "grad_norm": 1.8062533140182495, "learning_rate": 0.0005077965756216878, "loss": 3.4347, "step": 57975 }, { "epoch": 3.9393939393939394, "grad_norm": 1.8054033517837524, "learning_rate": 0.000507754110612855, "loss": 3.2096, "step": 57980 }, { "epoch": 3.939733659464601, "grad_norm": 1.7632313966751099, "learning_rate": 0.0005077116456040223, "loss": 3.479, "step": 57985 }, { "epoch": 3.9400733795352627, "grad_norm": 1.837581992149353, "learning_rate": 0.0005076691805951896, "loss": 3.3694, "step": 57990 }, { "epoch": 3.940413099605925, "grad_norm": 1.7515244483947754, "learning_rate": 0.0005076267155863568, "loss": 3.311, "step": 57995 }, { "epoch": 3.9407528196765864, "grad_norm": 2.046194314956665, "learning_rate": 0.0005075842505775241, "loss": 3.2961, "step": 58000 }, { "epoch": 3.941092539747248, "grad_norm": 2.1073708534240723, "learning_rate": 0.0005075417855686915, "loss": 3.1636, "step": 58005 }, { "epoch": 3.94143225981791, "grad_norm": 1.9505887031555176, "learning_rate": 0.0005074993205598587, "loss": 3.4783, "step": 58010 }, { "epoch": 3.9417719798885718, "grad_norm": 1.8829123973846436, "learning_rate": 0.000507456855551026, "loss": 3.4321, "step": 58015 }, { "epoch": 3.9421116999592334, "grad_norm": 1.657309889793396, "learning_rate": 0.0005074143905421933, "loss": 3.4219, "step": 58020 }, { "epoch": 3.9424514200298955, "grad_norm": 1.9641224145889282, "learning_rate": 0.0005073719255333605, "loss": 3.3172, "step": 58025 }, { "epoch": 3.942791140100557, "grad_norm": 2.1640336513519287, "learning_rate": 0.0005073294605245277, "loss": 3.3815, "step": 58030 }, { "epoch": 3.9431308601712187, "grad_norm": 1.585364580154419, "learning_rate": 0.0005072869955156951, "loss": 3.3023, "step": 58035 }, { "epoch": 3.943470580241881, "grad_norm": 1.7280381917953491, "learning_rate": 0.0005072445305068624, "loss": 3.464, "step": 58040 }, { "epoch": 3.9438103003125424, "grad_norm": 1.4984689950942993, "learning_rate": 0.0005072020654980296, "loss": 3.1987, "step": 58045 }, { "epoch": 3.944150020383204, "grad_norm": 1.871848225593567, "learning_rate": 0.000507159600489197, "loss": 3.3004, "step": 58050 }, { "epoch": 3.944489740453866, "grad_norm": 1.6107380390167236, "learning_rate": 0.0005071171354803642, "loss": 3.5653, "step": 58055 }, { "epoch": 3.9448294605245278, "grad_norm": 1.8242355585098267, "learning_rate": 0.0005070746704715314, "loss": 3.573, "step": 58060 }, { "epoch": 3.9451691805951894, "grad_norm": 2.125203847885132, "learning_rate": 0.0005070322054626988, "loss": 3.3474, "step": 58065 }, { "epoch": 3.9455089006658515, "grad_norm": 1.7287122011184692, "learning_rate": 0.000506989740453866, "loss": 3.4377, "step": 58070 }, { "epoch": 3.945848620736513, "grad_norm": 1.6732354164123535, "learning_rate": 0.0005069472754450333, "loss": 3.6497, "step": 58075 }, { "epoch": 3.9461883408071747, "grad_norm": 1.5511280298233032, "learning_rate": 0.0005069048104362006, "loss": 3.4639, "step": 58080 }, { "epoch": 3.946528060877837, "grad_norm": 1.5452390909194946, "learning_rate": 0.0005068623454273679, "loss": 3.4112, "step": 58085 }, { "epoch": 3.9468677809484984, "grad_norm": 2.0350253582000732, "learning_rate": 0.0005068198804185351, "loss": 3.3193, "step": 58090 }, { "epoch": 3.94720750101916, "grad_norm": 1.622273325920105, "learning_rate": 0.0005067774154097024, "loss": 3.4504, "step": 58095 }, { "epoch": 3.947547221089822, "grad_norm": 1.811416745185852, "learning_rate": 0.0005067349504008697, "loss": 3.5865, "step": 58100 }, { "epoch": 3.9478869411604838, "grad_norm": 2.4905683994293213, "learning_rate": 0.0005066924853920369, "loss": 3.7001, "step": 58105 }, { "epoch": 3.9482266612311454, "grad_norm": 2.1692440509796143, "learning_rate": 0.0005066500203832043, "loss": 3.2128, "step": 58110 }, { "epoch": 3.9485663813018075, "grad_norm": 2.0796072483062744, "learning_rate": 0.0005066075553743716, "loss": 3.5329, "step": 58115 }, { "epoch": 3.948906101372469, "grad_norm": 1.8860726356506348, "learning_rate": 0.0005065650903655388, "loss": 3.2319, "step": 58120 }, { "epoch": 3.9492458214431307, "grad_norm": 2.0374796390533447, "learning_rate": 0.0005065226253567061, "loss": 3.3233, "step": 58125 }, { "epoch": 3.949585541513793, "grad_norm": 2.023101329803467, "learning_rate": 0.0005064801603478733, "loss": 3.7752, "step": 58130 }, { "epoch": 3.9499252615844545, "grad_norm": 1.8558121919631958, "learning_rate": 0.0005064376953390406, "loss": 3.4758, "step": 58135 }, { "epoch": 3.950264981655116, "grad_norm": 2.1160030364990234, "learning_rate": 0.000506395230330208, "loss": 3.4492, "step": 58140 }, { "epoch": 3.950604701725778, "grad_norm": 1.7332805395126343, "learning_rate": 0.0005063527653213752, "loss": 3.4796, "step": 58145 }, { "epoch": 3.95094442179644, "grad_norm": 1.9893503189086914, "learning_rate": 0.0005063103003125425, "loss": 3.47, "step": 58150 }, { "epoch": 3.9512841418671014, "grad_norm": 1.6637026071548462, "learning_rate": 0.0005062678353037098, "loss": 3.4292, "step": 58155 }, { "epoch": 3.9516238619377635, "grad_norm": 1.726328730583191, "learning_rate": 0.000506225370294877, "loss": 3.6159, "step": 58160 }, { "epoch": 3.951963582008425, "grad_norm": 1.8860381841659546, "learning_rate": 0.0005061829052860442, "loss": 3.7289, "step": 58165 }, { "epoch": 3.9523033020790868, "grad_norm": 2.0078797340393066, "learning_rate": 0.0005061404402772116, "loss": 3.3269, "step": 58170 }, { "epoch": 3.952643022149749, "grad_norm": 1.8565313816070557, "learning_rate": 0.0005060979752683789, "loss": 3.2868, "step": 58175 }, { "epoch": 3.9529827422204105, "grad_norm": 1.778277039527893, "learning_rate": 0.0005060555102595461, "loss": 3.3676, "step": 58180 }, { "epoch": 3.953322462291072, "grad_norm": 2.0769076347351074, "learning_rate": 0.0005060130452507135, "loss": 3.4207, "step": 58185 }, { "epoch": 3.953662182361734, "grad_norm": 1.4929087162017822, "learning_rate": 0.0005059705802418807, "loss": 3.3551, "step": 58190 }, { "epoch": 3.954001902432396, "grad_norm": 1.9898772239685059, "learning_rate": 0.0005059281152330479, "loss": 3.3566, "step": 58195 }, { "epoch": 3.9543416225030574, "grad_norm": 1.7104365825653076, "learning_rate": 0.0005058856502242153, "loss": 3.2543, "step": 58200 }, { "epoch": 3.9546813425737195, "grad_norm": 2.1225593090057373, "learning_rate": 0.0005058431852153825, "loss": 3.5387, "step": 58205 }, { "epoch": 3.955021062644381, "grad_norm": 2.385911464691162, "learning_rate": 0.0005058007202065498, "loss": 3.2505, "step": 58210 }, { "epoch": 3.9553607827150428, "grad_norm": 1.9052082300186157, "learning_rate": 0.0005057582551977172, "loss": 3.2419, "step": 58215 }, { "epoch": 3.955700502785705, "grad_norm": 2.165283203125, "learning_rate": 0.0005057157901888844, "loss": 3.7085, "step": 58220 }, { "epoch": 3.9560402228563665, "grad_norm": 1.6268450021743774, "learning_rate": 0.0005056733251800516, "loss": 3.3156, "step": 58225 }, { "epoch": 3.956379942927028, "grad_norm": 1.7484723329544067, "learning_rate": 0.0005056308601712189, "loss": 3.3438, "step": 58230 }, { "epoch": 3.95671966299769, "grad_norm": 1.7575709819793701, "learning_rate": 0.0005055883951623862, "loss": 3.5981, "step": 58235 }, { "epoch": 3.957059383068352, "grad_norm": 1.9363148212432861, "learning_rate": 0.0005055459301535534, "loss": 3.4644, "step": 58240 }, { "epoch": 3.9573991031390134, "grad_norm": 1.8952656984329224, "learning_rate": 0.0005055034651447208, "loss": 3.5636, "step": 58245 }, { "epoch": 3.957738823209675, "grad_norm": 2.033689260482788, "learning_rate": 0.0005054610001358881, "loss": 3.3512, "step": 58250 }, { "epoch": 3.958078543280337, "grad_norm": 1.9480912685394287, "learning_rate": 0.0005054270281288219, "loss": 3.2332, "step": 58255 }, { "epoch": 3.958418263350999, "grad_norm": 1.9327378273010254, "learning_rate": 0.0005053845631199891, "loss": 3.383, "step": 58260 }, { "epoch": 3.9587579834216604, "grad_norm": 1.8159458637237549, "learning_rate": 0.0005053420981111563, "loss": 3.5601, "step": 58265 }, { "epoch": 3.9590977034923225, "grad_norm": 1.9443714618682861, "learning_rate": 0.0005052996331023238, "loss": 3.3867, "step": 58270 }, { "epoch": 3.959437423562984, "grad_norm": 2.015096426010132, "learning_rate": 0.000505257168093491, "loss": 3.1543, "step": 58275 }, { "epoch": 3.9597771436336457, "grad_norm": 1.8046987056732178, "learning_rate": 0.0005052147030846582, "loss": 3.5111, "step": 58280 }, { "epoch": 3.9601168637043074, "grad_norm": 2.037053108215332, "learning_rate": 0.0005051722380758256, "loss": 3.5401, "step": 58285 }, { "epoch": 3.9604565837749695, "grad_norm": 1.5774478912353516, "learning_rate": 0.0005051297730669928, "loss": 3.6529, "step": 58290 }, { "epoch": 3.960796303845631, "grad_norm": 1.5336580276489258, "learning_rate": 0.00050508730805816, "loss": 3.4116, "step": 58295 }, { "epoch": 3.9611360239162927, "grad_norm": 3.0413451194763184, "learning_rate": 0.0005050448430493274, "loss": 3.4372, "step": 58300 }, { "epoch": 3.961475743986955, "grad_norm": 1.9394663572311401, "learning_rate": 0.0005050023780404947, "loss": 3.5024, "step": 58305 }, { "epoch": 3.9618154640576164, "grad_norm": 1.6504771709442139, "learning_rate": 0.0005049599130316619, "loss": 3.2804, "step": 58310 }, { "epoch": 3.962155184128278, "grad_norm": 1.917162299156189, "learning_rate": 0.0005049174480228292, "loss": 3.7459, "step": 58315 }, { "epoch": 3.96249490419894, "grad_norm": 1.9850852489471436, "learning_rate": 0.0005048749830139965, "loss": 3.4291, "step": 58320 }, { "epoch": 3.9628346242696018, "grad_norm": 1.6055864095687866, "learning_rate": 0.0005048325180051637, "loss": 3.41, "step": 58325 }, { "epoch": 3.9631743443402634, "grad_norm": 2.57232928276062, "learning_rate": 0.000504790052996331, "loss": 3.2615, "step": 58330 }, { "epoch": 3.9635140644109255, "grad_norm": 2.5671603679656982, "learning_rate": 0.0005047475879874983, "loss": 3.4399, "step": 58335 }, { "epoch": 3.963853784481587, "grad_norm": 1.4535350799560547, "learning_rate": 0.0005047051229786656, "loss": 3.4816, "step": 58340 }, { "epoch": 3.9641935045522487, "grad_norm": 2.14787220954895, "learning_rate": 0.0005046626579698329, "loss": 3.2181, "step": 58345 }, { "epoch": 3.964533224622911, "grad_norm": 1.5833797454833984, "learning_rate": 0.0005046201929610002, "loss": 3.6373, "step": 58350 }, { "epoch": 3.9648729446935724, "grad_norm": 2.0482470989227295, "learning_rate": 0.0005045777279521674, "loss": 3.1815, "step": 58355 }, { "epoch": 3.965212664764234, "grad_norm": 1.771802306175232, "learning_rate": 0.0005045352629433347, "loss": 3.3553, "step": 58360 }, { "epoch": 3.965552384834896, "grad_norm": 1.8100745677947998, "learning_rate": 0.0005044927979345019, "loss": 3.593, "step": 58365 }, { "epoch": 3.9658921049055578, "grad_norm": 2.1447534561157227, "learning_rate": 0.0005044503329256692, "loss": 3.4083, "step": 58370 }, { "epoch": 3.9662318249762194, "grad_norm": 1.9535845518112183, "learning_rate": 0.0005044078679168366, "loss": 3.4021, "step": 58375 }, { "epoch": 3.9665715450468815, "grad_norm": 1.991908073425293, "learning_rate": 0.0005043654029080038, "loss": 3.4634, "step": 58380 }, { "epoch": 3.966911265117543, "grad_norm": 1.8702709674835205, "learning_rate": 0.0005043229378991711, "loss": 3.5964, "step": 58385 }, { "epoch": 3.9672509851882047, "grad_norm": 1.8846485614776611, "learning_rate": 0.0005042804728903384, "loss": 3.5499, "step": 58390 }, { "epoch": 3.967590705258867, "grad_norm": 1.629486083984375, "learning_rate": 0.0005042380078815056, "loss": 3.525, "step": 58395 }, { "epoch": 3.9679304253295284, "grad_norm": 1.766053557395935, "learning_rate": 0.0005041955428726728, "loss": 3.3453, "step": 58400 }, { "epoch": 3.96827014540019, "grad_norm": 1.5108788013458252, "learning_rate": 0.0005041530778638403, "loss": 3.3331, "step": 58405 }, { "epoch": 3.968609865470852, "grad_norm": 1.735444188117981, "learning_rate": 0.0005041106128550075, "loss": 3.3753, "step": 58410 }, { "epoch": 3.968949585541514, "grad_norm": 1.8957947492599487, "learning_rate": 0.0005040681478461747, "loss": 3.3123, "step": 58415 }, { "epoch": 3.9692893056121754, "grad_norm": 2.2556121349334717, "learning_rate": 0.0005040256828373421, "loss": 3.3949, "step": 58420 }, { "epoch": 3.9696290256828375, "grad_norm": 1.8582119941711426, "learning_rate": 0.0005039832178285093, "loss": 3.6451, "step": 58425 }, { "epoch": 3.969968745753499, "grad_norm": 2.7050397396087646, "learning_rate": 0.0005039407528196765, "loss": 3.4759, "step": 58430 }, { "epoch": 3.9703084658241607, "grad_norm": 1.7056947946548462, "learning_rate": 0.0005038982878108439, "loss": 3.238, "step": 58435 }, { "epoch": 3.970648185894823, "grad_norm": 1.9623956680297852, "learning_rate": 0.0005038558228020112, "loss": 3.4176, "step": 58440 }, { "epoch": 3.9709879059654845, "grad_norm": 2.0634567737579346, "learning_rate": 0.0005038133577931784, "loss": 3.3784, "step": 58445 }, { "epoch": 3.971327626036146, "grad_norm": 2.2676494121551514, "learning_rate": 0.0005037708927843458, "loss": 3.0426, "step": 58450 }, { "epoch": 3.971667346106808, "grad_norm": 2.0606698989868164, "learning_rate": 0.000503728427775513, "loss": 3.4723, "step": 58455 }, { "epoch": 3.97200706617747, "grad_norm": 2.0799405574798584, "learning_rate": 0.0005036859627666802, "loss": 3.4055, "step": 58460 }, { "epoch": 3.9723467862481314, "grad_norm": 2.1716766357421875, "learning_rate": 0.0005036434977578475, "loss": 3.4261, "step": 58465 }, { "epoch": 3.9726865063187935, "grad_norm": 2.2849438190460205, "learning_rate": 0.0005036010327490148, "loss": 3.3159, "step": 58470 }, { "epoch": 3.973026226389455, "grad_norm": 1.7616909742355347, "learning_rate": 0.0005035585677401821, "loss": 3.3186, "step": 58475 }, { "epoch": 3.9733659464601168, "grad_norm": 1.9803451299667358, "learning_rate": 0.0005035161027313494, "loss": 3.5041, "step": 58480 }, { "epoch": 3.973705666530779, "grad_norm": 1.786329746246338, "learning_rate": 0.0005034736377225167, "loss": 3.167, "step": 58485 }, { "epoch": 3.9740453866014405, "grad_norm": 1.4700994491577148, "learning_rate": 0.0005034311727136839, "loss": 3.4917, "step": 58490 }, { "epoch": 3.974385106672102, "grad_norm": 1.7044674158096313, "learning_rate": 0.0005033887077048512, "loss": 3.284, "step": 58495 }, { "epoch": 3.974724826742764, "grad_norm": 2.164936065673828, "learning_rate": 0.0005033462426960184, "loss": 3.3764, "step": 58500 }, { "epoch": 3.975064546813426, "grad_norm": 1.9518840312957764, "learning_rate": 0.0005033037776871857, "loss": 3.2344, "step": 58505 }, { "epoch": 3.9754042668840874, "grad_norm": 1.619116187095642, "learning_rate": 0.0005032613126783531, "loss": 3.5441, "step": 58510 }, { "epoch": 3.9757439869547495, "grad_norm": 1.831943392753601, "learning_rate": 0.0005032188476695203, "loss": 3.4871, "step": 58515 }, { "epoch": 3.976083707025411, "grad_norm": 2.153390645980835, "learning_rate": 0.0005031763826606877, "loss": 3.4369, "step": 58520 }, { "epoch": 3.9764234270960728, "grad_norm": 1.7720527648925781, "learning_rate": 0.0005031339176518549, "loss": 3.5558, "step": 58525 }, { "epoch": 3.976763147166735, "grad_norm": 1.9388600587844849, "learning_rate": 0.0005030914526430221, "loss": 3.3048, "step": 58530 }, { "epoch": 3.9771028672373965, "grad_norm": 2.3433837890625, "learning_rate": 0.0005030489876341895, "loss": 3.4056, "step": 58535 }, { "epoch": 3.977442587308058, "grad_norm": 1.989569902420044, "learning_rate": 0.0005030065226253567, "loss": 3.4638, "step": 58540 }, { "epoch": 3.97778230737872, "grad_norm": 2.3658814430236816, "learning_rate": 0.000502964057616524, "loss": 3.3015, "step": 58545 }, { "epoch": 3.978122027449382, "grad_norm": 2.1984171867370605, "learning_rate": 0.0005029215926076914, "loss": 3.5829, "step": 58550 }, { "epoch": 3.9784617475200434, "grad_norm": 1.6993647813796997, "learning_rate": 0.0005028791275988586, "loss": 3.6836, "step": 58555 }, { "epoch": 3.9788014675907055, "grad_norm": 1.7059122323989868, "learning_rate": 0.0005028366625900258, "loss": 3.4591, "step": 58560 }, { "epoch": 3.979141187661367, "grad_norm": 1.3912248611450195, "learning_rate": 0.0005027941975811931, "loss": 3.4191, "step": 58565 }, { "epoch": 3.979480907732029, "grad_norm": 1.952986478805542, "learning_rate": 0.0005027517325723604, "loss": 3.4476, "step": 58570 }, { "epoch": 3.979820627802691, "grad_norm": 2.2954370975494385, "learning_rate": 0.0005027092675635276, "loss": 3.3607, "step": 58575 }, { "epoch": 3.9801603478733525, "grad_norm": 1.479718804359436, "learning_rate": 0.000502666802554695, "loss": 3.4224, "step": 58580 }, { "epoch": 3.980500067944014, "grad_norm": 1.7202385663986206, "learning_rate": 0.0005026243375458623, "loss": 3.4984, "step": 58585 }, { "epoch": 3.9808397880146758, "grad_norm": 2.1667356491088867, "learning_rate": 0.0005025818725370295, "loss": 3.3254, "step": 58590 }, { "epoch": 3.981179508085338, "grad_norm": 2.4082305431365967, "learning_rate": 0.0005025394075281968, "loss": 3.4359, "step": 58595 }, { "epoch": 3.9815192281559995, "grad_norm": 1.510634422302246, "learning_rate": 0.000502496942519364, "loss": 3.5365, "step": 58600 }, { "epoch": 3.981858948226661, "grad_norm": 1.4106686115264893, "learning_rate": 0.0005024544775105313, "loss": 3.3865, "step": 58605 }, { "epoch": 3.982198668297323, "grad_norm": 1.8724315166473389, "learning_rate": 0.0005024120125016986, "loss": 3.4087, "step": 58610 }, { "epoch": 3.982538388367985, "grad_norm": 1.6285817623138428, "learning_rate": 0.0005023695474928659, "loss": 3.3214, "step": 58615 }, { "epoch": 3.9828781084386464, "grad_norm": 1.6795151233673096, "learning_rate": 0.0005023270824840332, "loss": 3.4154, "step": 58620 }, { "epoch": 3.983217828509308, "grad_norm": 1.5072542428970337, "learning_rate": 0.0005022846174752005, "loss": 3.2693, "step": 58625 }, { "epoch": 3.98355754857997, "grad_norm": 1.4363523721694946, "learning_rate": 0.0005022421524663677, "loss": 3.4533, "step": 58630 }, { "epoch": 3.9838972686506318, "grad_norm": 1.6468067169189453, "learning_rate": 0.000502199687457535, "loss": 3.2606, "step": 58635 }, { "epoch": 3.9842369887212934, "grad_norm": 1.754982352256775, "learning_rate": 0.0005021572224487023, "loss": 3.5696, "step": 58640 }, { "epoch": 3.9845767087919555, "grad_norm": 1.8945062160491943, "learning_rate": 0.0005021147574398695, "loss": 3.3397, "step": 58645 }, { "epoch": 3.984916428862617, "grad_norm": 1.8077661991119385, "learning_rate": 0.0005020722924310369, "loss": 3.5431, "step": 58650 }, { "epoch": 3.9852561489332787, "grad_norm": 1.447519063949585, "learning_rate": 0.0005020298274222042, "loss": 3.3623, "step": 58655 }, { "epoch": 3.985595869003941, "grad_norm": 1.6732162237167358, "learning_rate": 0.0005019873624133714, "loss": 3.2838, "step": 58660 }, { "epoch": 3.9859355890746024, "grad_norm": 1.872635841369629, "learning_rate": 0.0005019448974045386, "loss": 3.4347, "step": 58665 }, { "epoch": 3.986275309145264, "grad_norm": 2.2488110065460205, "learning_rate": 0.000501902432395706, "loss": 3.2638, "step": 58670 }, { "epoch": 3.986615029215926, "grad_norm": 1.682426929473877, "learning_rate": 0.0005018599673868732, "loss": 3.5327, "step": 58675 }, { "epoch": 3.9869547492865878, "grad_norm": 2.295100450515747, "learning_rate": 0.0005018175023780404, "loss": 3.3524, "step": 58680 }, { "epoch": 3.9872944693572494, "grad_norm": 2.373892068862915, "learning_rate": 0.0005017750373692079, "loss": 3.1149, "step": 58685 }, { "epoch": 3.9876341894279115, "grad_norm": 2.017136335372925, "learning_rate": 0.0005017325723603751, "loss": 3.5162, "step": 58690 }, { "epoch": 3.987973909498573, "grad_norm": 1.3209644556045532, "learning_rate": 0.0005016901073515423, "loss": 3.5442, "step": 58695 }, { "epoch": 3.9883136295692347, "grad_norm": 1.6571155786514282, "learning_rate": 0.0005016476423427097, "loss": 3.2732, "step": 58700 }, { "epoch": 3.988653349639897, "grad_norm": 1.5672557353973389, "learning_rate": 0.0005016051773338769, "loss": 3.512, "step": 58705 }, { "epoch": 3.9889930697105584, "grad_norm": 1.6973483562469482, "learning_rate": 0.0005015627123250441, "loss": 3.3347, "step": 58710 }, { "epoch": 3.98933278978122, "grad_norm": 1.769219160079956, "learning_rate": 0.0005015202473162114, "loss": 3.5363, "step": 58715 }, { "epoch": 3.989672509851882, "grad_norm": 1.6240588426589966, "learning_rate": 0.0005014777823073788, "loss": 3.6738, "step": 58720 }, { "epoch": 3.990012229922544, "grad_norm": 2.1809639930725098, "learning_rate": 0.000501435317298546, "loss": 3.1856, "step": 58725 }, { "epoch": 3.9903519499932054, "grad_norm": 2.1799979209899902, "learning_rate": 0.0005013928522897133, "loss": 3.2831, "step": 58730 }, { "epoch": 3.9906916700638675, "grad_norm": 1.9402096271514893, "learning_rate": 0.0005013503872808806, "loss": 3.2155, "step": 58735 }, { "epoch": 3.991031390134529, "grad_norm": 1.5087884664535522, "learning_rate": 0.0005013079222720478, "loss": 3.3868, "step": 58740 }, { "epoch": 3.9913711102051908, "grad_norm": 1.7007052898406982, "learning_rate": 0.0005012654572632151, "loss": 3.3224, "step": 58745 }, { "epoch": 3.991710830275853, "grad_norm": 1.7570006847381592, "learning_rate": 0.0005012229922543823, "loss": 3.3218, "step": 58750 }, { "epoch": 3.9920505503465145, "grad_norm": 2.075120687484741, "learning_rate": 0.0005011805272455497, "loss": 3.4851, "step": 58755 }, { "epoch": 3.992390270417176, "grad_norm": 2.043477773666382, "learning_rate": 0.000501138062236717, "loss": 3.289, "step": 58760 }, { "epoch": 3.992729990487838, "grad_norm": 2.373725652694702, "learning_rate": 0.0005010955972278842, "loss": 3.7145, "step": 58765 }, { "epoch": 3.9930697105585, "grad_norm": 2.088200807571411, "learning_rate": 0.0005010531322190515, "loss": 3.2483, "step": 58770 }, { "epoch": 3.9934094306291614, "grad_norm": 2.145493745803833, "learning_rate": 0.0005010106672102188, "loss": 3.1514, "step": 58775 }, { "epoch": 3.9937491506998235, "grad_norm": 1.8995417356491089, "learning_rate": 0.000500968202201386, "loss": 3.2799, "step": 58780 }, { "epoch": 3.994088870770485, "grad_norm": 1.8358880281448364, "learning_rate": 0.0005009257371925533, "loss": 3.4776, "step": 58785 }, { "epoch": 3.9944285908411468, "grad_norm": 1.9961907863616943, "learning_rate": 0.0005008832721837207, "loss": 3.2433, "step": 58790 }, { "epoch": 3.994768310911809, "grad_norm": 1.9928187131881714, "learning_rate": 0.0005008408071748879, "loss": 3.4841, "step": 58795 }, { "epoch": 3.9951080309824705, "grad_norm": 2.166734457015991, "learning_rate": 0.0005007983421660551, "loss": 3.5253, "step": 58800 }, { "epoch": 3.995447751053132, "grad_norm": 2.2625482082366943, "learning_rate": 0.0005007558771572225, "loss": 3.6126, "step": 58805 }, { "epoch": 3.995787471123794, "grad_norm": 1.5080978870391846, "learning_rate": 0.0005007134121483897, "loss": 3.3599, "step": 58810 }, { "epoch": 3.996127191194456, "grad_norm": 1.9079408645629883, "learning_rate": 0.0005006709471395569, "loss": 3.2418, "step": 58815 }, { "epoch": 3.9964669112651174, "grad_norm": 2.3855104446411133, "learning_rate": 0.0005006284821307243, "loss": 3.6585, "step": 58820 }, { "epoch": 3.9968066313357795, "grad_norm": 1.702027440071106, "learning_rate": 0.0005005860171218916, "loss": 3.3628, "step": 58825 }, { "epoch": 3.997146351406441, "grad_norm": 1.988870620727539, "learning_rate": 0.0005005435521130588, "loss": 3.486, "step": 58830 }, { "epoch": 3.9974860714771028, "grad_norm": 1.7623016834259033, "learning_rate": 0.0005005010871042262, "loss": 3.2561, "step": 58835 }, { "epoch": 3.997825791547765, "grad_norm": 2.184257984161377, "learning_rate": 0.0005004586220953934, "loss": 3.398, "step": 58840 }, { "epoch": 3.9981655116184265, "grad_norm": 1.3158745765686035, "learning_rate": 0.0005004161570865606, "loss": 3.3876, "step": 58845 }, { "epoch": 3.998505231689088, "grad_norm": 1.9084044694900513, "learning_rate": 0.0005003736920777279, "loss": 3.3927, "step": 58850 }, { "epoch": 3.99884495175975, "grad_norm": 1.4599671363830566, "learning_rate": 0.0005003312270688952, "loss": 3.4094, "step": 58855 }, { "epoch": 3.999184671830412, "grad_norm": 2.0745158195495605, "learning_rate": 0.0005002887620600626, "loss": 3.6027, "step": 58860 }, { "epoch": 3.9995243919010735, "grad_norm": 2.2420120239257812, "learning_rate": 0.0005002462970512298, "loss": 3.6999, "step": 58865 }, { "epoch": 3.9998641119717355, "grad_norm": 2.663557767868042, "learning_rate": 0.0005002038320423971, "loss": 3.3203, "step": 58870 }, { "epoch": 4.0, "eval_bertscore": { "f1": 0.819475436218719, "precision": 0.8228954839687938, "recall": 0.8170868782112679 }, "eval_bleu_4": 0.008924493686938772, "eval_exact_match": 9.690861517588914e-05, "eval_loss": 3.418769359588623, "eval_meteor": 0.0893660911583074, "eval_rouge": { "rouge1": 0.12071674353284298, "rouge2": 0.012535990201015666, "rougeL": 0.10101264182190366, "rougeLsum": 0.10102530398544304 }, "eval_runtime": 1315.0011, "eval_samples_per_second": 7.847, "eval_steps_per_second": 0.981, "step": 58872 }, { "epoch": 4.000203832042397, "grad_norm": 1.993991494178772, "learning_rate": 0.0005001613670335644, "loss": 3.6283, "step": 58875 }, { "epoch": 4.000543552113059, "grad_norm": 2.1039206981658936, "learning_rate": 0.0005001189020247316, "loss": 3.4205, "step": 58880 }, { "epoch": 4.000883272183721, "grad_norm": 1.789146900177002, "learning_rate": 0.0005000764370158989, "loss": 3.2621, "step": 58885 }, { "epoch": 4.001222992254382, "grad_norm": 1.9819302558898926, "learning_rate": 0.0005000339720070662, "loss": 3.2931, "step": 58890 }, { "epoch": 4.001562712325044, "grad_norm": 1.6624367237091064, "learning_rate": 0.0004999915069982335, "loss": 3.3205, "step": 58895 }, { "epoch": 4.001902432395706, "grad_norm": 1.7999123334884644, "learning_rate": 0.0004999490419894007, "loss": 3.3882, "step": 58900 }, { "epoch": 4.002242152466367, "grad_norm": 1.7676951885223389, "learning_rate": 0.000499906576980568, "loss": 3.2305, "step": 58905 }, { "epoch": 4.0025818725370295, "grad_norm": 1.3641835451126099, "learning_rate": 0.0004998641119717353, "loss": 3.2607, "step": 58910 }, { "epoch": 4.0029215926076915, "grad_norm": 2.6148555278778076, "learning_rate": 0.0004998216469629025, "loss": 3.2816, "step": 58915 }, { "epoch": 4.003261312678353, "grad_norm": 1.9744110107421875, "learning_rate": 0.0004997791819540699, "loss": 3.5325, "step": 58920 }, { "epoch": 4.003601032749015, "grad_norm": 1.5822240114212036, "learning_rate": 0.0004997367169452371, "loss": 3.2421, "step": 58925 }, { "epoch": 4.003940752819677, "grad_norm": 2.208583354949951, "learning_rate": 0.0004996942519364044, "loss": 3.6087, "step": 58930 }, { "epoch": 4.004280472890338, "grad_norm": 2.1080987453460693, "learning_rate": 0.0004996517869275717, "loss": 3.3373, "step": 58935 }, { "epoch": 4.004620192961, "grad_norm": 1.549288272857666, "learning_rate": 0.000499609321918739, "loss": 3.4716, "step": 58940 }, { "epoch": 4.004959913031662, "grad_norm": 1.5371408462524414, "learning_rate": 0.0004995668569099062, "loss": 3.3225, "step": 58945 }, { "epoch": 4.005299633102323, "grad_norm": 1.3884999752044678, "learning_rate": 0.0004995243919010735, "loss": 3.316, "step": 58950 }, { "epoch": 4.0056393531729855, "grad_norm": 2.1580073833465576, "learning_rate": 0.0004994819268922408, "loss": 3.2802, "step": 58955 }, { "epoch": 4.0059790732436475, "grad_norm": 2.112473726272583, "learning_rate": 0.0004994394618834081, "loss": 3.3893, "step": 58960 }, { "epoch": 4.006318793314309, "grad_norm": 1.7176121473312378, "learning_rate": 0.0004993969968745753, "loss": 3.1843, "step": 58965 }, { "epoch": 4.006658513384971, "grad_norm": 1.6041663885116577, "learning_rate": 0.0004993545318657427, "loss": 3.1705, "step": 58970 }, { "epoch": 4.006998233455633, "grad_norm": 2.0912368297576904, "learning_rate": 0.0004993120668569099, "loss": 3.4973, "step": 58975 }, { "epoch": 4.007337953526294, "grad_norm": 1.9197882413864136, "learning_rate": 0.0004992696018480772, "loss": 3.4456, "step": 58980 }, { "epoch": 4.007677673596956, "grad_norm": 1.9488123655319214, "learning_rate": 0.0004992271368392445, "loss": 3.394, "step": 58985 }, { "epoch": 4.008017393667618, "grad_norm": 1.6788676977157593, "learning_rate": 0.0004991846718304118, "loss": 3.378, "step": 58990 }, { "epoch": 4.008357113738279, "grad_norm": 1.7644860744476318, "learning_rate": 0.000499142206821579, "loss": 3.3378, "step": 58995 }, { "epoch": 4.0086968338089415, "grad_norm": 1.761858582496643, "learning_rate": 0.0004990997418127463, "loss": 3.5871, "step": 59000 }, { "epoch": 4.009036553879604, "grad_norm": 1.6232753992080688, "learning_rate": 0.0004990572768039136, "loss": 3.2811, "step": 59005 }, { "epoch": 4.009376273950265, "grad_norm": 1.6488051414489746, "learning_rate": 0.0004990148117950808, "loss": 3.3735, "step": 59010 }, { "epoch": 4.009715994020927, "grad_norm": 2.4527652263641357, "learning_rate": 0.0004989723467862481, "loss": 3.1839, "step": 59015 }, { "epoch": 4.010055714091589, "grad_norm": 1.414353370666504, "learning_rate": 0.0004989298817774155, "loss": 3.6705, "step": 59020 }, { "epoch": 4.01039543416225, "grad_norm": 2.092477560043335, "learning_rate": 0.0004988874167685827, "loss": 3.4381, "step": 59025 }, { "epoch": 4.010735154232912, "grad_norm": 1.7357650995254517, "learning_rate": 0.00049884495175975, "loss": 3.2538, "step": 59030 }, { "epoch": 4.011074874303574, "grad_norm": 1.76509428024292, "learning_rate": 0.0004988024867509173, "loss": 3.2837, "step": 59035 }, { "epoch": 4.011414594374235, "grad_norm": 1.9218990802764893, "learning_rate": 0.0004987600217420845, "loss": 3.2105, "step": 59040 }, { "epoch": 4.0117543144448975, "grad_norm": 1.9223743677139282, "learning_rate": 0.0004987175567332518, "loss": 3.5192, "step": 59045 }, { "epoch": 4.01209403451556, "grad_norm": 1.9378780126571655, "learning_rate": 0.0004986750917244191, "loss": 3.347, "step": 59050 }, { "epoch": 4.012433754586221, "grad_norm": 2.1289093494415283, "learning_rate": 0.0004986326267155864, "loss": 3.3459, "step": 59055 }, { "epoch": 4.012773474656883, "grad_norm": 1.8348671197891235, "learning_rate": 0.0004985901617067536, "loss": 3.4398, "step": 59060 }, { "epoch": 4.013113194727545, "grad_norm": 1.768241047859192, "learning_rate": 0.0004985476966979209, "loss": 3.4831, "step": 59065 }, { "epoch": 4.013452914798206, "grad_norm": 1.8566452264785767, "learning_rate": 0.0004985052316890882, "loss": 3.3136, "step": 59070 }, { "epoch": 4.013792634868868, "grad_norm": 1.6128654479980469, "learning_rate": 0.0004984627666802555, "loss": 3.1468, "step": 59075 }, { "epoch": 4.01413235493953, "grad_norm": 2.067619562149048, "learning_rate": 0.0004984203016714227, "loss": 3.4145, "step": 59080 }, { "epoch": 4.014472075010191, "grad_norm": 2.1641056537628174, "learning_rate": 0.0004983778366625901, "loss": 3.3944, "step": 59085 }, { "epoch": 4.0148117950808535, "grad_norm": 1.6611675024032593, "learning_rate": 0.0004983353716537573, "loss": 3.3481, "step": 59090 }, { "epoch": 4.015151515151516, "grad_norm": 1.5659812688827515, "learning_rate": 0.0004982929066449246, "loss": 3.423, "step": 59095 }, { "epoch": 4.015491235222177, "grad_norm": 1.7261886596679688, "learning_rate": 0.0004982504416360918, "loss": 3.4243, "step": 59100 }, { "epoch": 4.015830955292839, "grad_norm": 1.7681808471679688, "learning_rate": 0.0004982079766272592, "loss": 3.2718, "step": 59105 }, { "epoch": 4.016170675363501, "grad_norm": 1.8472890853881836, "learning_rate": 0.0004981655116184264, "loss": 3.3802, "step": 59110 }, { "epoch": 4.016510395434162, "grad_norm": 2.273470640182495, "learning_rate": 0.0004981230466095936, "loss": 3.2208, "step": 59115 }, { "epoch": 4.016850115504824, "grad_norm": 2.1271374225616455, "learning_rate": 0.000498080581600761, "loss": 3.3632, "step": 59120 }, { "epoch": 4.017189835575485, "grad_norm": 2.2581021785736084, "learning_rate": 0.0004980381165919283, "loss": 3.1632, "step": 59125 }, { "epoch": 4.017529555646147, "grad_norm": 1.3678228855133057, "learning_rate": 0.0004979956515830955, "loss": 3.5183, "step": 59130 }, { "epoch": 4.0178692757168095, "grad_norm": 2.360706090927124, "learning_rate": 0.0004979531865742629, "loss": 3.5396, "step": 59135 }, { "epoch": 4.018208995787471, "grad_norm": 2.2899200916290283, "learning_rate": 0.0004979107215654301, "loss": 3.3804, "step": 59140 }, { "epoch": 4.018548715858133, "grad_norm": 1.8226630687713623, "learning_rate": 0.0004978682565565974, "loss": 3.3198, "step": 59145 }, { "epoch": 4.018888435928795, "grad_norm": 1.7100951671600342, "learning_rate": 0.0004978257915477646, "loss": 3.3642, "step": 59150 }, { "epoch": 4.019228155999456, "grad_norm": 1.2341402769088745, "learning_rate": 0.000497783326538932, "loss": 3.5497, "step": 59155 }, { "epoch": 4.019567876070118, "grad_norm": 1.740242838859558, "learning_rate": 0.0004977408615300992, "loss": 3.1822, "step": 59160 }, { "epoch": 4.01990759614078, "grad_norm": 1.6463303565979004, "learning_rate": 0.0004976983965212664, "loss": 3.4484, "step": 59165 }, { "epoch": 4.020247316211441, "grad_norm": 1.4280318021774292, "learning_rate": 0.0004976559315124338, "loss": 3.2674, "step": 59170 }, { "epoch": 4.0205870362821035, "grad_norm": 2.4192535877227783, "learning_rate": 0.0004976134665036011, "loss": 3.5926, "step": 59175 }, { "epoch": 4.0209267563527655, "grad_norm": 1.8494932651519775, "learning_rate": 0.0004975710014947683, "loss": 3.199, "step": 59180 }, { "epoch": 4.021266476423427, "grad_norm": 2.319608211517334, "learning_rate": 0.0004975285364859355, "loss": 3.4644, "step": 59185 }, { "epoch": 4.021606196494089, "grad_norm": 1.414857029914856, "learning_rate": 0.0004974860714771029, "loss": 3.5753, "step": 59190 }, { "epoch": 4.021945916564751, "grad_norm": 1.507441520690918, "learning_rate": 0.0004974436064682701, "loss": 3.4696, "step": 59195 }, { "epoch": 4.022285636635412, "grad_norm": 1.8758220672607422, "learning_rate": 0.0004974011414594374, "loss": 3.4681, "step": 59200 }, { "epoch": 4.022625356706074, "grad_norm": 1.8487558364868164, "learning_rate": 0.0004973586764506048, "loss": 3.5087, "step": 59205 }, { "epoch": 4.022965076776736, "grad_norm": 1.7996782064437866, "learning_rate": 0.000497316211441772, "loss": 3.5079, "step": 59210 }, { "epoch": 4.023304796847397, "grad_norm": 1.9071885347366333, "learning_rate": 0.0004972737464329392, "loss": 3.3679, "step": 59215 }, { "epoch": 4.0236445169180595, "grad_norm": 1.828254222869873, "learning_rate": 0.0004972312814241066, "loss": 3.3903, "step": 59220 }, { "epoch": 4.0239842369887215, "grad_norm": 2.2144832611083984, "learning_rate": 0.0004971888164152738, "loss": 3.3676, "step": 59225 }, { "epoch": 4.024323957059383, "grad_norm": 1.3344557285308838, "learning_rate": 0.0004971463514064411, "loss": 3.5581, "step": 59230 }, { "epoch": 4.024663677130045, "grad_norm": 1.6497802734375, "learning_rate": 0.0004971038863976083, "loss": 3.4415, "step": 59235 }, { "epoch": 4.025003397200707, "grad_norm": 1.5569651126861572, "learning_rate": 0.0004970614213887757, "loss": 3.3711, "step": 59240 }, { "epoch": 4.025343117271368, "grad_norm": 1.8042292594909668, "learning_rate": 0.0004970189563799429, "loss": 3.5323, "step": 59245 }, { "epoch": 4.02568283734203, "grad_norm": 1.6315510272979736, "learning_rate": 0.0004969764913711102, "loss": 3.47, "step": 59250 }, { "epoch": 4.026022557412692, "grad_norm": 1.6252825260162354, "learning_rate": 0.0004969340263622775, "loss": 3.3687, "step": 59255 }, { "epoch": 4.026362277483353, "grad_norm": 1.558652639389038, "learning_rate": 0.0004968915613534448, "loss": 3.3536, "step": 59260 }, { "epoch": 4.0267019975540155, "grad_norm": 1.6208709478378296, "learning_rate": 0.000496849096344612, "loss": 3.5589, "step": 59265 }, { "epoch": 4.0270417176246776, "grad_norm": 1.7768884897232056, "learning_rate": 0.0004968066313357793, "loss": 3.4575, "step": 59270 }, { "epoch": 4.027381437695339, "grad_norm": 2.114823579788208, "learning_rate": 0.0004967641663269466, "loss": 3.5297, "step": 59275 }, { "epoch": 4.027721157766001, "grad_norm": 1.839556097984314, "learning_rate": 0.0004967217013181139, "loss": 3.2793, "step": 59280 }, { "epoch": 4.028060877836663, "grad_norm": 2.1142423152923584, "learning_rate": 0.0004966792363092811, "loss": 3.469, "step": 59285 }, { "epoch": 4.028400597907324, "grad_norm": 1.5260629653930664, "learning_rate": 0.0004966367713004485, "loss": 3.2308, "step": 59290 }, { "epoch": 4.028740317977986, "grad_norm": 2.2351062297821045, "learning_rate": 0.0004965943062916157, "loss": 3.3777, "step": 59295 }, { "epoch": 4.029080038048648, "grad_norm": 1.4859565496444702, "learning_rate": 0.000496551841282783, "loss": 3.3304, "step": 59300 }, { "epoch": 4.029419758119309, "grad_norm": 1.6617647409439087, "learning_rate": 0.0004965093762739503, "loss": 3.4195, "step": 59305 }, { "epoch": 4.0297594781899715, "grad_norm": 1.83713960647583, "learning_rate": 0.0004964669112651176, "loss": 3.2, "step": 59310 }, { "epoch": 4.030099198260634, "grad_norm": 1.8123712539672852, "learning_rate": 0.0004964244462562848, "loss": 3.2886, "step": 59315 }, { "epoch": 4.030438918331295, "grad_norm": 1.7204588651657104, "learning_rate": 0.0004963819812474521, "loss": 3.4857, "step": 59320 }, { "epoch": 4.030778638401957, "grad_norm": 2.017787218093872, "learning_rate": 0.0004963395162386194, "loss": 3.3416, "step": 59325 }, { "epoch": 4.031118358472619, "grad_norm": 1.4988703727722168, "learning_rate": 0.0004962970512297867, "loss": 3.4991, "step": 59330 }, { "epoch": 4.03145807854328, "grad_norm": 2.0062777996063232, "learning_rate": 0.000496254586220954, "loss": 3.4579, "step": 59335 }, { "epoch": 4.031797798613942, "grad_norm": 1.6878724098205566, "learning_rate": 0.0004962121212121212, "loss": 3.2894, "step": 59340 }, { "epoch": 4.032137518684604, "grad_norm": 2.1917881965637207, "learning_rate": 0.0004961696562032885, "loss": 3.4686, "step": 59345 }, { "epoch": 4.032477238755265, "grad_norm": 1.8084602355957031, "learning_rate": 0.0004961271911944557, "loss": 3.3549, "step": 59350 }, { "epoch": 4.0328169588259275, "grad_norm": 2.1087772846221924, "learning_rate": 0.0004960847261856231, "loss": 3.459, "step": 59355 }, { "epoch": 4.03315667889659, "grad_norm": 2.375896692276001, "learning_rate": 0.0004960422611767904, "loss": 3.2462, "step": 59360 }, { "epoch": 4.033496398967251, "grad_norm": 1.9529855251312256, "learning_rate": 0.0004959997961679576, "loss": 3.3606, "step": 59365 }, { "epoch": 4.033836119037913, "grad_norm": 1.7962007522583008, "learning_rate": 0.0004959573311591249, "loss": 3.4651, "step": 59370 }, { "epoch": 4.034175839108575, "grad_norm": 1.5211737155914307, "learning_rate": 0.0004959148661502922, "loss": 3.2873, "step": 59375 }, { "epoch": 4.034515559179236, "grad_norm": 1.6640504598617554, "learning_rate": 0.0004958724011414594, "loss": 3.5874, "step": 59380 }, { "epoch": 4.034855279249898, "grad_norm": 1.5710439682006836, "learning_rate": 0.0004958299361326268, "loss": 3.3721, "step": 59385 }, { "epoch": 4.03519499932056, "grad_norm": 1.7602494955062866, "learning_rate": 0.000495787471123794, "loss": 3.2746, "step": 59390 }, { "epoch": 4.035534719391221, "grad_norm": 2.2486867904663086, "learning_rate": 0.0004957450061149613, "loss": 3.2999, "step": 59395 }, { "epoch": 4.0358744394618835, "grad_norm": 2.083045482635498, "learning_rate": 0.0004957025411061285, "loss": 3.2753, "step": 59400 }, { "epoch": 4.036214159532546, "grad_norm": 1.897786021232605, "learning_rate": 0.0004956600760972959, "loss": 3.2885, "step": 59405 }, { "epoch": 4.036553879603207, "grad_norm": 2.391528844833374, "learning_rate": 0.0004956176110884631, "loss": 3.1174, "step": 59410 }, { "epoch": 4.036893599673869, "grad_norm": 1.9192010164260864, "learning_rate": 0.0004955751460796304, "loss": 3.2886, "step": 59415 }, { "epoch": 4.037233319744531, "grad_norm": 1.9626001119613647, "learning_rate": 0.0004955326810707977, "loss": 3.3298, "step": 59420 }, { "epoch": 4.037573039815192, "grad_norm": 1.7832223176956177, "learning_rate": 0.0004954902160619649, "loss": 3.294, "step": 59425 }, { "epoch": 4.037912759885854, "grad_norm": 2.273798942565918, "learning_rate": 0.0004954477510531322, "loss": 3.41, "step": 59430 }, { "epoch": 4.038252479956516, "grad_norm": 1.8175690174102783, "learning_rate": 0.0004954052860442996, "loss": 3.3706, "step": 59435 }, { "epoch": 4.0385922000271774, "grad_norm": 1.9540610313415527, "learning_rate": 0.0004953628210354668, "loss": 3.2243, "step": 59440 }, { "epoch": 4.0389319200978395, "grad_norm": 2.370222806930542, "learning_rate": 0.000495320356026634, "loss": 3.4407, "step": 59445 }, { "epoch": 4.039271640168501, "grad_norm": 2.005084276199341, "learning_rate": 0.0004952778910178013, "loss": 3.3478, "step": 59450 }, { "epoch": 4.039611360239163, "grad_norm": 1.7650060653686523, "learning_rate": 0.0004952354260089686, "loss": 3.5425, "step": 59455 }, { "epoch": 4.039951080309825, "grad_norm": 1.7376317977905273, "learning_rate": 0.0004951929610001359, "loss": 3.4607, "step": 59460 }, { "epoch": 4.040290800380486, "grad_norm": 2.1652538776397705, "learning_rate": 0.0004951504959913032, "loss": 3.2784, "step": 59465 }, { "epoch": 4.040630520451148, "grad_norm": 1.478109359741211, "learning_rate": 0.0004951080309824705, "loss": 3.3712, "step": 59470 }, { "epoch": 4.04097024052181, "grad_norm": 1.6215746402740479, "learning_rate": 0.0004950655659736377, "loss": 3.2846, "step": 59475 }, { "epoch": 4.041309960592471, "grad_norm": 1.844590663909912, "learning_rate": 0.000495023100964805, "loss": 3.3472, "step": 59480 }, { "epoch": 4.0416496806631335, "grad_norm": 1.7868213653564453, "learning_rate": 0.0004949806359559724, "loss": 3.3366, "step": 59485 }, { "epoch": 4.0419894007337955, "grad_norm": 2.1045193672180176, "learning_rate": 0.0004949381709471396, "loss": 3.4948, "step": 59490 }, { "epoch": 4.042329120804457, "grad_norm": 2.1004505157470703, "learning_rate": 0.0004948957059383068, "loss": 3.5646, "step": 59495 }, { "epoch": 4.042668840875119, "grad_norm": 1.9152185916900635, "learning_rate": 0.0004948532409294741, "loss": 3.5012, "step": 59500 }, { "epoch": 4.043008560945781, "grad_norm": 1.438382625579834, "learning_rate": 0.0004948107759206414, "loss": 3.5213, "step": 59505 }, { "epoch": 4.043348281016442, "grad_norm": 1.904005527496338, "learning_rate": 0.0004947683109118087, "loss": 3.3513, "step": 59510 }, { "epoch": 4.043688001087104, "grad_norm": 2.108377695083618, "learning_rate": 0.000494725845902976, "loss": 3.3879, "step": 59515 }, { "epoch": 4.044027721157766, "grad_norm": 2.009507417678833, "learning_rate": 0.0004946833808941433, "loss": 3.451, "step": 59520 }, { "epoch": 4.044367441228427, "grad_norm": 1.6680647134780884, "learning_rate": 0.0004946409158853105, "loss": 3.2744, "step": 59525 }, { "epoch": 4.0447071612990895, "grad_norm": 1.6273709535598755, "learning_rate": 0.0004945984508764778, "loss": 3.3093, "step": 59530 }, { "epoch": 4.0450468813697515, "grad_norm": 1.9703646898269653, "learning_rate": 0.000494555985867645, "loss": 3.5816, "step": 59535 }, { "epoch": 4.045386601440413, "grad_norm": 1.4631634950637817, "learning_rate": 0.0004945135208588124, "loss": 3.5389, "step": 59540 }, { "epoch": 4.045726321511075, "grad_norm": 1.7705535888671875, "learning_rate": 0.0004944710558499796, "loss": 3.2249, "step": 59545 }, { "epoch": 4.046066041581737, "grad_norm": 1.9207873344421387, "learning_rate": 0.0004944285908411469, "loss": 3.3904, "step": 59550 }, { "epoch": 4.046405761652398, "grad_norm": 1.5881309509277344, "learning_rate": 0.0004943861258323142, "loss": 3.189, "step": 59555 }, { "epoch": 4.04674548172306, "grad_norm": 1.8356029987335205, "learning_rate": 0.0004943436608234815, "loss": 3.6893, "step": 59560 }, { "epoch": 4.047085201793722, "grad_norm": 1.9935393333435059, "learning_rate": 0.0004943011958146487, "loss": 3.4532, "step": 59565 }, { "epoch": 4.047424921864383, "grad_norm": 2.2958998680114746, "learning_rate": 0.0004942587308058161, "loss": 3.3559, "step": 59570 }, { "epoch": 4.0477646419350455, "grad_norm": 1.6456587314605713, "learning_rate": 0.0004942162657969833, "loss": 3.0353, "step": 59575 }, { "epoch": 4.048104362005708, "grad_norm": 1.798695683479309, "learning_rate": 0.0004941738007881505, "loss": 3.4969, "step": 59580 }, { "epoch": 4.048444082076369, "grad_norm": 1.787828803062439, "learning_rate": 0.0004941313357793178, "loss": 3.41, "step": 59585 }, { "epoch": 4.048783802147031, "grad_norm": 2.0154647827148438, "learning_rate": 0.0004940888707704852, "loss": 3.2847, "step": 59590 }, { "epoch": 4.049123522217693, "grad_norm": 2.1556320190429688, "learning_rate": 0.0004940464057616524, "loss": 3.3846, "step": 59595 }, { "epoch": 4.049463242288354, "grad_norm": 1.6187412738800049, "learning_rate": 0.0004940039407528196, "loss": 3.3915, "step": 59600 }, { "epoch": 4.049802962359016, "grad_norm": 1.8947455883026123, "learning_rate": 0.000493961475743987, "loss": 3.4123, "step": 59605 }, { "epoch": 4.050142682429678, "grad_norm": 1.806564211845398, "learning_rate": 0.0004939190107351542, "loss": 3.498, "step": 59610 }, { "epoch": 4.050482402500339, "grad_norm": 2.127149820327759, "learning_rate": 0.0004938765457263215, "loss": 3.3753, "step": 59615 }, { "epoch": 4.0508221225710015, "grad_norm": 1.9116795063018799, "learning_rate": 0.0004938340807174889, "loss": 3.4294, "step": 59620 }, { "epoch": 4.051161842641664, "grad_norm": 2.0534000396728516, "learning_rate": 0.0004937916157086561, "loss": 3.2755, "step": 59625 }, { "epoch": 4.051501562712325, "grad_norm": 1.671410322189331, "learning_rate": 0.0004937491506998233, "loss": 3.2779, "step": 59630 }, { "epoch": 4.051841282782987, "grad_norm": 2.281747817993164, "learning_rate": 0.0004937066856909906, "loss": 3.6698, "step": 59635 }, { "epoch": 4.052181002853649, "grad_norm": 1.367194652557373, "learning_rate": 0.000493664220682158, "loss": 3.2091, "step": 59640 }, { "epoch": 4.05252072292431, "grad_norm": 1.5244135856628418, "learning_rate": 0.0004936217556733252, "loss": 3.3641, "step": 59645 }, { "epoch": 4.052860442994972, "grad_norm": 2.230506658554077, "learning_rate": 0.0004935792906644924, "loss": 3.5611, "step": 59650 }, { "epoch": 4.053200163065634, "grad_norm": 1.8914183378219604, "learning_rate": 0.0004935368256556598, "loss": 3.3084, "step": 59655 }, { "epoch": 4.053539883136295, "grad_norm": 1.8037034273147583, "learning_rate": 0.000493494360646827, "loss": 3.2844, "step": 59660 }, { "epoch": 4.0538796032069575, "grad_norm": 2.3338394165039062, "learning_rate": 0.0004934518956379943, "loss": 3.4819, "step": 59665 }, { "epoch": 4.05421932327762, "grad_norm": 1.9145169258117676, "learning_rate": 0.0004934094306291616, "loss": 3.66, "step": 59670 }, { "epoch": 4.054559043348281, "grad_norm": 1.9047795534133911, "learning_rate": 0.0004933669656203289, "loss": 3.3582, "step": 59675 }, { "epoch": 4.054898763418943, "grad_norm": 1.6684781312942505, "learning_rate": 0.0004933245006114961, "loss": 3.46, "step": 59680 }, { "epoch": 4.055238483489605, "grad_norm": 1.7031161785125732, "learning_rate": 0.0004932820356026634, "loss": 3.3217, "step": 59685 }, { "epoch": 4.055578203560266, "grad_norm": 1.9054566621780396, "learning_rate": 0.0004932395705938307, "loss": 3.333, "step": 59690 }, { "epoch": 4.055917923630928, "grad_norm": 1.969916820526123, "learning_rate": 0.000493197105584998, "loss": 3.3489, "step": 59695 }, { "epoch": 4.05625764370159, "grad_norm": 1.8401293754577637, "learning_rate": 0.0004931546405761652, "loss": 3.2222, "step": 59700 }, { "epoch": 4.056597363772251, "grad_norm": 1.6272108554840088, "learning_rate": 0.0004931121755673325, "loss": 3.3021, "step": 59705 }, { "epoch": 4.0569370838429135, "grad_norm": 2.0692734718322754, "learning_rate": 0.0004930697105584998, "loss": 3.4877, "step": 59710 }, { "epoch": 4.057276803913576, "grad_norm": 1.6578717231750488, "learning_rate": 0.0004930272455496671, "loss": 3.4657, "step": 59715 }, { "epoch": 4.057616523984237, "grad_norm": 1.3676793575286865, "learning_rate": 0.0004929847805408344, "loss": 3.4194, "step": 59720 }, { "epoch": 4.057956244054899, "grad_norm": 1.5712491273880005, "learning_rate": 0.0004929423155320017, "loss": 3.4917, "step": 59725 }, { "epoch": 4.058295964125561, "grad_norm": 1.9688960313796997, "learning_rate": 0.0004928998505231689, "loss": 3.4374, "step": 59730 }, { "epoch": 4.058635684196222, "grad_norm": 1.5582629442214966, "learning_rate": 0.0004928573855143361, "loss": 3.3437, "step": 59735 }, { "epoch": 4.058975404266884, "grad_norm": 2.1638224124908447, "learning_rate": 0.0004928149205055035, "loss": 3.5105, "step": 59740 }, { "epoch": 4.059315124337546, "grad_norm": 1.637573003768921, "learning_rate": 0.0004927724554966708, "loss": 3.4723, "step": 59745 }, { "epoch": 4.0596548444082075, "grad_norm": 1.8428452014923096, "learning_rate": 0.000492729990487838, "loss": 3.4435, "step": 59750 }, { "epoch": 4.0599945644788695, "grad_norm": 1.5445822477340698, "learning_rate": 0.0004926875254790053, "loss": 3.4302, "step": 59755 }, { "epoch": 4.060334284549532, "grad_norm": 1.715213418006897, "learning_rate": 0.0004926450604701726, "loss": 3.3148, "step": 59760 }, { "epoch": 4.060674004620193, "grad_norm": 2.117852210998535, "learning_rate": 0.0004926025954613398, "loss": 3.3757, "step": 59765 }, { "epoch": 4.061013724690855, "grad_norm": 1.4706878662109375, "learning_rate": 0.0004925601304525072, "loss": 3.3267, "step": 59770 }, { "epoch": 4.061353444761517, "grad_norm": 1.7118568420410156, "learning_rate": 0.0004925176654436744, "loss": 3.413, "step": 59775 }, { "epoch": 4.061693164832178, "grad_norm": 1.7806752920150757, "learning_rate": 0.0004924752004348417, "loss": 3.3254, "step": 59780 }, { "epoch": 4.06203288490284, "grad_norm": 2.3596694469451904, "learning_rate": 0.0004924327354260089, "loss": 3.3432, "step": 59785 }, { "epoch": 4.062372604973502, "grad_norm": 1.619323968887329, "learning_rate": 0.0004923902704171763, "loss": 3.4619, "step": 59790 }, { "epoch": 4.0627123250441635, "grad_norm": 2.302086114883423, "learning_rate": 0.0004923478054083435, "loss": 3.576, "step": 59795 }, { "epoch": 4.0630520451148255, "grad_norm": 1.8818902969360352, "learning_rate": 0.0004923053403995108, "loss": 3.4115, "step": 59800 }, { "epoch": 4.063391765185487, "grad_norm": 1.4821735620498657, "learning_rate": 0.0004922628753906781, "loss": 3.4595, "step": 59805 }, { "epoch": 4.063731485256149, "grad_norm": 1.757299542427063, "learning_rate": 0.0004922204103818454, "loss": 3.4546, "step": 59810 }, { "epoch": 4.064071205326811, "grad_norm": 1.962043046951294, "learning_rate": 0.0004921779453730126, "loss": 3.3336, "step": 59815 }, { "epoch": 4.064410925397472, "grad_norm": 1.4484996795654297, "learning_rate": 0.00049213548036418, "loss": 3.366, "step": 59820 }, { "epoch": 4.064750645468134, "grad_norm": 2.1378655433654785, "learning_rate": 0.0004920930153553472, "loss": 3.4338, "step": 59825 }, { "epoch": 4.065090365538796, "grad_norm": 1.8606088161468506, "learning_rate": 0.0004920505503465145, "loss": 3.3274, "step": 59830 }, { "epoch": 4.065430085609457, "grad_norm": 2.0282721519470215, "learning_rate": 0.0004920080853376817, "loss": 3.2027, "step": 59835 }, { "epoch": 4.0657698056801195, "grad_norm": 1.7833900451660156, "learning_rate": 0.0004919656203288491, "loss": 3.5538, "step": 59840 }, { "epoch": 4.0661095257507816, "grad_norm": 1.725012183189392, "learning_rate": 0.0004919231553200163, "loss": 3.5363, "step": 59845 }, { "epoch": 4.066449245821443, "grad_norm": 1.868731141090393, "learning_rate": 0.0004918806903111836, "loss": 3.3508, "step": 59850 }, { "epoch": 4.066788965892105, "grad_norm": 2.4221372604370117, "learning_rate": 0.0004918382253023509, "loss": 3.1087, "step": 59855 }, { "epoch": 4.067128685962767, "grad_norm": 1.7905402183532715, "learning_rate": 0.0004917957602935181, "loss": 3.3198, "step": 59860 }, { "epoch": 4.067468406033428, "grad_norm": 1.6823121309280396, "learning_rate": 0.0004917532952846854, "loss": 3.36, "step": 59865 }, { "epoch": 4.06780812610409, "grad_norm": 1.7341206073760986, "learning_rate": 0.0004917108302758528, "loss": 3.4457, "step": 59870 }, { "epoch": 4.068147846174752, "grad_norm": 2.082798957824707, "learning_rate": 0.00049166836526702, "loss": 3.496, "step": 59875 }, { "epoch": 4.068487566245413, "grad_norm": 1.6574466228485107, "learning_rate": 0.0004916259002581873, "loss": 3.5806, "step": 59880 }, { "epoch": 4.0688272863160755, "grad_norm": 1.9593571424484253, "learning_rate": 0.0004915834352493545, "loss": 3.4351, "step": 59885 }, { "epoch": 4.069167006386738, "grad_norm": 2.0075230598449707, "learning_rate": 0.0004915409702405218, "loss": 3.3243, "step": 59890 }, { "epoch": 4.069506726457399, "grad_norm": 1.9656463861465454, "learning_rate": 0.0004914985052316891, "loss": 3.4626, "step": 59895 }, { "epoch": 4.069846446528061, "grad_norm": 1.7965508699417114, "learning_rate": 0.0004914560402228564, "loss": 3.5029, "step": 59900 }, { "epoch": 4.070186166598723, "grad_norm": 1.4579434394836426, "learning_rate": 0.0004914135752140237, "loss": 3.5526, "step": 59905 }, { "epoch": 4.070525886669384, "grad_norm": 1.7364760637283325, "learning_rate": 0.0004913711102051909, "loss": 3.2655, "step": 59910 }, { "epoch": 4.070865606740046, "grad_norm": 2.292192220687866, "learning_rate": 0.0004913286451963582, "loss": 3.4754, "step": 59915 }, { "epoch": 4.071205326810708, "grad_norm": 2.289841651916504, "learning_rate": 0.0004912861801875254, "loss": 3.4857, "step": 59920 }, { "epoch": 4.071545046881369, "grad_norm": 1.7491071224212646, "learning_rate": 0.0004912437151786928, "loss": 3.4596, "step": 59925 }, { "epoch": 4.0718847669520315, "grad_norm": 1.9110558032989502, "learning_rate": 0.00049120125016986, "loss": 3.5331, "step": 59930 }, { "epoch": 4.072224487022694, "grad_norm": 1.604244351387024, "learning_rate": 0.0004911587851610273, "loss": 3.2703, "step": 59935 }, { "epoch": 4.072564207093355, "grad_norm": 1.7197027206420898, "learning_rate": 0.0004911163201521946, "loss": 3.5308, "step": 59940 }, { "epoch": 4.072903927164017, "grad_norm": 1.8252149820327759, "learning_rate": 0.0004910738551433619, "loss": 3.6992, "step": 59945 }, { "epoch": 4.073243647234679, "grad_norm": 1.8140825033187866, "learning_rate": 0.0004910313901345291, "loss": 3.3918, "step": 59950 }, { "epoch": 4.07358336730534, "grad_norm": 1.9269367456436157, "learning_rate": 0.0004909889251256965, "loss": 3.2844, "step": 59955 }, { "epoch": 4.073923087376002, "grad_norm": 2.078331470489502, "learning_rate": 0.0004909464601168637, "loss": 3.2919, "step": 59960 }, { "epoch": 4.074262807446664, "grad_norm": 2.1689443588256836, "learning_rate": 0.0004909039951080309, "loss": 3.1375, "step": 59965 }, { "epoch": 4.074602527517325, "grad_norm": 1.8254830837249756, "learning_rate": 0.0004908615300991982, "loss": 3.3673, "step": 59970 }, { "epoch": 4.0749422475879875, "grad_norm": 1.817759394645691, "learning_rate": 0.0004908190650903656, "loss": 3.3736, "step": 59975 }, { "epoch": 4.07528196765865, "grad_norm": 1.5814197063446045, "learning_rate": 0.0004907766000815328, "loss": 3.3359, "step": 59980 }, { "epoch": 4.075621687729311, "grad_norm": 1.7559959888458252, "learning_rate": 0.0004907341350727001, "loss": 3.2324, "step": 59985 }, { "epoch": 4.075961407799973, "grad_norm": 1.79584538936615, "learning_rate": 0.0004906916700638674, "loss": 3.3314, "step": 59990 }, { "epoch": 4.076301127870635, "grad_norm": 2.196969509124756, "learning_rate": 0.0004906492050550347, "loss": 3.2267, "step": 59995 }, { "epoch": 4.076640847941296, "grad_norm": 1.970947504043579, "learning_rate": 0.0004906067400462019, "loss": 3.4247, "step": 60000 }, { "epoch": 4.076980568011958, "grad_norm": 1.7827051877975464, "learning_rate": 0.0004905642750373693, "loss": 3.3936, "step": 60005 }, { "epoch": 4.07732028808262, "grad_norm": 1.7887659072875977, "learning_rate": 0.0004905218100285365, "loss": 3.3136, "step": 60010 }, { "epoch": 4.0776600081532814, "grad_norm": 2.2495675086975098, "learning_rate": 0.0004904793450197037, "loss": 3.1265, "step": 60015 }, { "epoch": 4.0779997282239435, "grad_norm": 2.9074671268463135, "learning_rate": 0.000490436880010871, "loss": 3.4271, "step": 60020 }, { "epoch": 4.078339448294606, "grad_norm": 1.7652173042297363, "learning_rate": 0.0004903944150020384, "loss": 3.2766, "step": 60025 }, { "epoch": 4.078679168365267, "grad_norm": 1.6308510303497314, "learning_rate": 0.0004903519499932056, "loss": 3.3501, "step": 60030 }, { "epoch": 4.079018888435929, "grad_norm": 1.7320678234100342, "learning_rate": 0.0004903094849843728, "loss": 3.4376, "step": 60035 }, { "epoch": 4.079358608506591, "grad_norm": 3.5199708938598633, "learning_rate": 0.0004902670199755402, "loss": 3.4767, "step": 60040 }, { "epoch": 4.079698328577252, "grad_norm": 2.3477590084075928, "learning_rate": 0.0004902245549667074, "loss": 3.3282, "step": 60045 }, { "epoch": 4.080038048647914, "grad_norm": 1.5316070318222046, "learning_rate": 0.0004901820899578747, "loss": 3.2858, "step": 60050 }, { "epoch": 4.080377768718576, "grad_norm": 1.5755774974822998, "learning_rate": 0.0004901396249490421, "loss": 3.3512, "step": 60055 }, { "epoch": 4.0807174887892375, "grad_norm": 1.5548995733261108, "learning_rate": 0.0004900971599402093, "loss": 3.3524, "step": 60060 }, { "epoch": 4.0810572088598995, "grad_norm": 1.9203760623931885, "learning_rate": 0.0004900546949313765, "loss": 3.1713, "step": 60065 }, { "epoch": 4.081396928930562, "grad_norm": 2.477482557296753, "learning_rate": 0.0004900122299225439, "loss": 3.3598, "step": 60070 }, { "epoch": 4.081736649001223, "grad_norm": 1.994563341140747, "learning_rate": 0.0004899697649137111, "loss": 3.3714, "step": 60075 }, { "epoch": 4.082076369071885, "grad_norm": 2.3512306213378906, "learning_rate": 0.0004899272999048784, "loss": 3.4983, "step": 60080 }, { "epoch": 4.082416089142547, "grad_norm": 1.8946993350982666, "learning_rate": 0.0004898848348960456, "loss": 3.501, "step": 60085 }, { "epoch": 4.082755809213208, "grad_norm": 1.75563383102417, "learning_rate": 0.000489842369887213, "loss": 3.4585, "step": 60090 }, { "epoch": 4.08309552928387, "grad_norm": 2.0283031463623047, "learning_rate": 0.0004897999048783802, "loss": 3.8258, "step": 60095 }, { "epoch": 4.083435249354532, "grad_norm": 1.723699927330017, "learning_rate": 0.0004897574398695475, "loss": 3.3076, "step": 60100 }, { "epoch": 4.0837749694251935, "grad_norm": 1.7277249097824097, "learning_rate": 0.0004897149748607148, "loss": 3.1265, "step": 60105 }, { "epoch": 4.0841146894958555, "grad_norm": 1.6719449758529663, "learning_rate": 0.0004896725098518821, "loss": 3.4651, "step": 60110 }, { "epoch": 4.084454409566518, "grad_norm": 1.893649697303772, "learning_rate": 0.0004896300448430493, "loss": 3.5885, "step": 60115 }, { "epoch": 4.084794129637179, "grad_norm": 1.6876636743545532, "learning_rate": 0.0004895875798342165, "loss": 3.3395, "step": 60120 }, { "epoch": 4.085133849707841, "grad_norm": 2.0660908222198486, "learning_rate": 0.0004895451148253839, "loss": 3.5227, "step": 60125 }, { "epoch": 4.085473569778502, "grad_norm": 2.0138700008392334, "learning_rate": 0.0004895026498165512, "loss": 3.3853, "step": 60130 }, { "epoch": 4.085813289849164, "grad_norm": 2.0304179191589355, "learning_rate": 0.0004894601848077184, "loss": 3.753, "step": 60135 }, { "epoch": 4.086153009919826, "grad_norm": 1.808819055557251, "learning_rate": 0.0004894177197988858, "loss": 3.2066, "step": 60140 }, { "epoch": 4.086492729990487, "grad_norm": 1.678234577178955, "learning_rate": 0.000489375254790053, "loss": 3.3632, "step": 60145 }, { "epoch": 4.0868324500611495, "grad_norm": 1.6988402605056763, "learning_rate": 0.0004893327897812203, "loss": 3.4611, "step": 60150 }, { "epoch": 4.0871721701318116, "grad_norm": 1.4129925966262817, "learning_rate": 0.0004892903247723876, "loss": 3.6121, "step": 60155 }, { "epoch": 4.087511890202473, "grad_norm": 2.0340516567230225, "learning_rate": 0.0004892478597635549, "loss": 3.3992, "step": 60160 }, { "epoch": 4.087851610273135, "grad_norm": 1.643910527229309, "learning_rate": 0.0004892053947547221, "loss": 3.5823, "step": 60165 }, { "epoch": 4.088191330343797, "grad_norm": 2.007070541381836, "learning_rate": 0.0004891629297458893, "loss": 3.3061, "step": 60170 }, { "epoch": 4.088531050414458, "grad_norm": 1.8176881074905396, "learning_rate": 0.0004891204647370567, "loss": 3.4661, "step": 60175 }, { "epoch": 4.08887077048512, "grad_norm": 1.9543884992599487, "learning_rate": 0.000489077999728224, "loss": 3.6335, "step": 60180 }, { "epoch": 4.089210490555782, "grad_norm": 1.9550668001174927, "learning_rate": 0.0004890355347193912, "loss": 3.2285, "step": 60185 }, { "epoch": 4.089550210626443, "grad_norm": 2.3812036514282227, "learning_rate": 0.0004889930697105585, "loss": 3.3748, "step": 60190 }, { "epoch": 4.0898899306971055, "grad_norm": 2.0053255558013916, "learning_rate": 0.0004889506047017258, "loss": 3.6244, "step": 60195 }, { "epoch": 4.090229650767768, "grad_norm": 1.7043262720108032, "learning_rate": 0.000488908139692893, "loss": 3.3623, "step": 60200 }, { "epoch": 4.090569370838429, "grad_norm": 3.035824775695801, "learning_rate": 0.0004888656746840604, "loss": 3.5909, "step": 60205 }, { "epoch": 4.090909090909091, "grad_norm": 1.9972409009933472, "learning_rate": 0.0004888232096752277, "loss": 3.211, "step": 60210 }, { "epoch": 4.091248810979753, "grad_norm": 1.8565239906311035, "learning_rate": 0.0004887807446663949, "loss": 3.3764, "step": 60215 }, { "epoch": 4.091588531050414, "grad_norm": 2.0868818759918213, "learning_rate": 0.0004887382796575621, "loss": 3.4274, "step": 60220 }, { "epoch": 4.091928251121076, "grad_norm": 2.948996067047119, "learning_rate": 0.0004886958146487295, "loss": 3.5181, "step": 60225 }, { "epoch": 4.092267971191738, "grad_norm": 1.4606399536132812, "learning_rate": 0.0004886533496398967, "loss": 3.582, "step": 60230 }, { "epoch": 4.092607691262399, "grad_norm": 1.6169168949127197, "learning_rate": 0.000488610884631064, "loss": 3.3259, "step": 60235 }, { "epoch": 4.0929474113330615, "grad_norm": 2.5352346897125244, "learning_rate": 0.0004885684196222313, "loss": 3.2348, "step": 60240 }, { "epoch": 4.093287131403724, "grad_norm": 2.118159770965576, "learning_rate": 0.0004885259546133986, "loss": 3.2975, "step": 60245 }, { "epoch": 4.093626851474385, "grad_norm": 2.0746278762817383, "learning_rate": 0.0004884834896045658, "loss": 3.1575, "step": 60250 }, { "epoch": 4.093966571545047, "grad_norm": 1.9328498840332031, "learning_rate": 0.0004884410245957332, "loss": 3.4642, "step": 60255 }, { "epoch": 4.094306291615709, "grad_norm": 2.0420820713043213, "learning_rate": 0.0004883985595869004, "loss": 3.5219, "step": 60260 }, { "epoch": 4.09464601168637, "grad_norm": 1.7957148551940918, "learning_rate": 0.0004883560945780677, "loss": 3.499, "step": 60265 }, { "epoch": 4.094985731757032, "grad_norm": 2.2350850105285645, "learning_rate": 0.0004883136295692349, "loss": 3.3778, "step": 60270 }, { "epoch": 4.095325451827694, "grad_norm": 1.91860032081604, "learning_rate": 0.0004882711645604022, "loss": 3.1594, "step": 60275 }, { "epoch": 4.095665171898355, "grad_norm": 1.7672467231750488, "learning_rate": 0.0004882286995515695, "loss": 3.4059, "step": 60280 }, { "epoch": 4.0960048919690175, "grad_norm": 1.641132116317749, "learning_rate": 0.0004881862345427368, "loss": 3.2646, "step": 60285 }, { "epoch": 4.09634461203968, "grad_norm": 2.0046546459198, "learning_rate": 0.00048814376953390407, "loss": 3.3327, "step": 60290 }, { "epoch": 4.096684332110341, "grad_norm": 1.87942373752594, "learning_rate": 0.00048810130452507135, "loss": 3.3551, "step": 60295 }, { "epoch": 4.097024052181003, "grad_norm": 2.079958915710449, "learning_rate": 0.0004880588395162386, "loss": 3.543, "step": 60300 }, { "epoch": 4.097363772251665, "grad_norm": 1.7626216411590576, "learning_rate": 0.0004880163745074059, "loss": 3.4521, "step": 60305 }, { "epoch": 4.097703492322326, "grad_norm": 1.5881942510604858, "learning_rate": 0.0004879739094985732, "loss": 3.1219, "step": 60310 }, { "epoch": 4.098043212392988, "grad_norm": 1.9254851341247559, "learning_rate": 0.00048793144448974047, "loss": 3.4647, "step": 60315 }, { "epoch": 4.09838293246365, "grad_norm": 1.6564881801605225, "learning_rate": 0.00048788897948090775, "loss": 3.2532, "step": 60320 }, { "epoch": 4.0987226525343115, "grad_norm": 2.2509899139404297, "learning_rate": 0.000487846514472075, "loss": 3.428, "step": 60325 }, { "epoch": 4.0990623726049735, "grad_norm": 1.6035407781600952, "learning_rate": 0.0004878040494632423, "loss": 3.334, "step": 60330 }, { "epoch": 4.099402092675636, "grad_norm": 2.1123132705688477, "learning_rate": 0.0004877615844544096, "loss": 3.4658, "step": 60335 }, { "epoch": 4.099741812746297, "grad_norm": 1.9338181018829346, "learning_rate": 0.00048771911944557687, "loss": 3.4502, "step": 60340 }, { "epoch": 4.100081532816959, "grad_norm": 1.6513895988464355, "learning_rate": 0.00048767665443674415, "loss": 3.3213, "step": 60345 }, { "epoch": 4.100421252887621, "grad_norm": 1.576535701751709, "learning_rate": 0.0004876341894279114, "loss": 3.4427, "step": 60350 }, { "epoch": 4.100760972958282, "grad_norm": 1.4713237285614014, "learning_rate": 0.00048759172441907865, "loss": 3.4897, "step": 60355 }, { "epoch": 4.101100693028944, "grad_norm": 1.8792202472686768, "learning_rate": 0.000487549259410246, "loss": 3.3159, "step": 60360 }, { "epoch": 4.101440413099606, "grad_norm": 1.7174755334854126, "learning_rate": 0.00048750679440141327, "loss": 3.4226, "step": 60365 }, { "epoch": 4.1017801331702675, "grad_norm": 1.8895093202590942, "learning_rate": 0.0004874643293925805, "loss": 3.458, "step": 60370 }, { "epoch": 4.1021198532409295, "grad_norm": 1.9813097715377808, "learning_rate": 0.00048742186438374783, "loss": 3.3971, "step": 60375 }, { "epoch": 4.102459573311592, "grad_norm": 1.5909180641174316, "learning_rate": 0.0004873793993749151, "loss": 3.4324, "step": 60380 }, { "epoch": 4.102799293382253, "grad_norm": 1.8762253522872925, "learning_rate": 0.00048733693436608233, "loss": 3.3963, "step": 60385 }, { "epoch": 4.103139013452915, "grad_norm": 1.9598256349563599, "learning_rate": 0.0004872944693572496, "loss": 3.2251, "step": 60390 }, { "epoch": 4.103478733523577, "grad_norm": 1.6154308319091797, "learning_rate": 0.00048725200434841695, "loss": 3.1934, "step": 60395 }, { "epoch": 4.103818453594238, "grad_norm": 1.6406766176223755, "learning_rate": 0.0004872095393395842, "loss": 3.259, "step": 60400 }, { "epoch": 4.1041581736649, "grad_norm": 2.333040475845337, "learning_rate": 0.00048716707433075145, "loss": 3.3997, "step": 60405 }, { "epoch": 4.104497893735562, "grad_norm": 1.8011099100112915, "learning_rate": 0.0004871246093219188, "loss": 3.2984, "step": 60410 }, { "epoch": 4.1048376138062235, "grad_norm": 2.0304067134857178, "learning_rate": 0.000487082144313086, "loss": 3.3728, "step": 60415 }, { "epoch": 4.1051773338768855, "grad_norm": 2.0671684741973877, "learning_rate": 0.0004870396793042533, "loss": 3.4366, "step": 60420 }, { "epoch": 4.105517053947548, "grad_norm": 1.9813882112503052, "learning_rate": 0.0004869972142954206, "loss": 3.4355, "step": 60425 }, { "epoch": 4.105856774018209, "grad_norm": 1.7134602069854736, "learning_rate": 0.00048695474928658785, "loss": 3.3313, "step": 60430 }, { "epoch": 4.106196494088871, "grad_norm": 1.7568143606185913, "learning_rate": 0.00048691228427775513, "loss": 3.0513, "step": 60435 }, { "epoch": 4.106536214159533, "grad_norm": 1.883142113685608, "learning_rate": 0.0004868698192689224, "loss": 3.4535, "step": 60440 }, { "epoch": 4.106875934230194, "grad_norm": 1.6762166023254395, "learning_rate": 0.0004868273542600897, "loss": 3.3698, "step": 60445 }, { "epoch": 4.107215654300856, "grad_norm": 1.8943079710006714, "learning_rate": 0.000486784889251257, "loss": 3.5257, "step": 60450 }, { "epoch": 4.107555374371518, "grad_norm": 1.85199773311615, "learning_rate": 0.00048674242424242425, "loss": 3.3205, "step": 60455 }, { "epoch": 4.1078950944421795, "grad_norm": 1.777312994003296, "learning_rate": 0.0004866999592335915, "loss": 3.3068, "step": 60460 }, { "epoch": 4.108234814512842, "grad_norm": 1.8947104215621948, "learning_rate": 0.0004866574942247588, "loss": 3.2941, "step": 60465 }, { "epoch": 4.108574534583504, "grad_norm": 1.725339651107788, "learning_rate": 0.0004866150292159261, "loss": 3.2919, "step": 60470 }, { "epoch": 4.108914254654165, "grad_norm": 2.1941845417022705, "learning_rate": 0.0004865725642070933, "loss": 3.5114, "step": 60475 }, { "epoch": 4.109253974724827, "grad_norm": 1.597952127456665, "learning_rate": 0.00048653009919826065, "loss": 3.3367, "step": 60480 }, { "epoch": 4.109593694795488, "grad_norm": 1.702436923980713, "learning_rate": 0.00048648763418942793, "loss": 3.3582, "step": 60485 }, { "epoch": 4.10993341486615, "grad_norm": 1.6043709516525269, "learning_rate": 0.0004864451691805952, "loss": 3.5825, "step": 60490 }, { "epoch": 4.110273134936812, "grad_norm": 2.122208833694458, "learning_rate": 0.00048640270417176244, "loss": 3.4279, "step": 60495 }, { "epoch": 4.110612855007473, "grad_norm": 2.0070066452026367, "learning_rate": 0.0004863602391629298, "loss": 3.0957, "step": 60500 }, { "epoch": 4.1109525750781355, "grad_norm": 1.8070175647735596, "learning_rate": 0.00048631777415409705, "loss": 3.3751, "step": 60505 }, { "epoch": 4.111292295148798, "grad_norm": 1.922191858291626, "learning_rate": 0.0004862753091452643, "loss": 3.4894, "step": 60510 }, { "epoch": 4.111632015219459, "grad_norm": 1.4952672719955444, "learning_rate": 0.0004862328441364316, "loss": 3.3798, "step": 60515 }, { "epoch": 4.111971735290121, "grad_norm": 2.1874821186065674, "learning_rate": 0.0004861903791275989, "loss": 3.3515, "step": 60520 }, { "epoch": 4.112311455360783, "grad_norm": 1.9481499195098877, "learning_rate": 0.0004861479141187661, "loss": 3.3685, "step": 60525 }, { "epoch": 4.112651175431444, "grad_norm": 2.1840763092041016, "learning_rate": 0.00048610544910993346, "loss": 3.2272, "step": 60530 }, { "epoch": 4.112990895502106, "grad_norm": 2.0792758464813232, "learning_rate": 0.00048606298410110074, "loss": 3.0947, "step": 60535 }, { "epoch": 4.113330615572768, "grad_norm": 1.8596428632736206, "learning_rate": 0.00048602051909226796, "loss": 3.2625, "step": 60540 }, { "epoch": 4.113670335643429, "grad_norm": 1.6249405145645142, "learning_rate": 0.00048597805408343524, "loss": 3.34, "step": 60545 }, { "epoch": 4.1140100557140915, "grad_norm": 1.9119313955307007, "learning_rate": 0.0004859355890746026, "loss": 3.1653, "step": 60550 }, { "epoch": 4.114349775784754, "grad_norm": 1.8234617710113525, "learning_rate": 0.0004858931240657698, "loss": 3.2778, "step": 60555 }, { "epoch": 4.114689495855415, "grad_norm": 2.4764506816864014, "learning_rate": 0.0004858506590569371, "loss": 3.2832, "step": 60560 }, { "epoch": 4.115029215926077, "grad_norm": 1.8517574071884155, "learning_rate": 0.0004858081940481044, "loss": 3.4494, "step": 60565 }, { "epoch": 4.115368935996739, "grad_norm": 2.1201529502868652, "learning_rate": 0.00048576572903927164, "loss": 3.3688, "step": 60570 }, { "epoch": 4.1157086560674, "grad_norm": 1.7934504747390747, "learning_rate": 0.0004857232640304389, "loss": 3.1401, "step": 60575 }, { "epoch": 4.116048376138062, "grad_norm": 2.7126424312591553, "learning_rate": 0.0004856807990216062, "loss": 3.5729, "step": 60580 }, { "epoch": 4.116388096208724, "grad_norm": 1.717152714729309, "learning_rate": 0.0004856383340127735, "loss": 3.3345, "step": 60585 }, { "epoch": 4.116727816279385, "grad_norm": 1.8116801977157593, "learning_rate": 0.00048559586900394076, "loss": 3.3641, "step": 60590 }, { "epoch": 4.1170675363500475, "grad_norm": 1.9468929767608643, "learning_rate": 0.00048555340399510804, "loss": 3.5169, "step": 60595 }, { "epoch": 4.11740725642071, "grad_norm": 1.8708568811416626, "learning_rate": 0.0004855109389862753, "loss": 3.5925, "step": 60600 }, { "epoch": 4.117746976491371, "grad_norm": 2.0646517276763916, "learning_rate": 0.0004854684739774426, "loss": 3.3138, "step": 60605 }, { "epoch": 4.118086696562033, "grad_norm": 1.9751980304718018, "learning_rate": 0.0004854260089686099, "loss": 3.2414, "step": 60610 }, { "epoch": 4.118426416632695, "grad_norm": 2.0608298778533936, "learning_rate": 0.0004853835439597771, "loss": 3.3366, "step": 60615 }, { "epoch": 4.118766136703356, "grad_norm": 1.5009738206863403, "learning_rate": 0.00048534107895094444, "loss": 3.3969, "step": 60620 }, { "epoch": 4.119105856774018, "grad_norm": 1.6562659740447998, "learning_rate": 0.0004852986139421117, "loss": 3.4534, "step": 60625 }, { "epoch": 4.11944557684468, "grad_norm": 1.7650082111358643, "learning_rate": 0.00048526464193504555, "loss": 3.1476, "step": 60630 }, { "epoch": 4.1197852969153415, "grad_norm": 1.6812400817871094, "learning_rate": 0.00048522217692621277, "loss": 3.6977, "step": 60635 }, { "epoch": 4.1201250169860035, "grad_norm": 1.6551364660263062, "learning_rate": 0.0004851797119173801, "loss": 3.2697, "step": 60640 }, { "epoch": 4.120464737056666, "grad_norm": 1.906916618347168, "learning_rate": 0.0004851372469085474, "loss": 3.4243, "step": 60645 }, { "epoch": 4.120804457127327, "grad_norm": 1.9593793153762817, "learning_rate": 0.0004850947818997146, "loss": 3.4679, "step": 60650 }, { "epoch": 4.121144177197989, "grad_norm": 1.590056300163269, "learning_rate": 0.0004850523168908819, "loss": 3.4585, "step": 60655 }, { "epoch": 4.121483897268651, "grad_norm": 1.625127911567688, "learning_rate": 0.00048500985188204923, "loss": 3.2347, "step": 60660 }, { "epoch": 4.121823617339312, "grad_norm": 2.0768680572509766, "learning_rate": 0.00048496738687321645, "loss": 3.2506, "step": 60665 }, { "epoch": 4.122163337409974, "grad_norm": 2.2041146755218506, "learning_rate": 0.00048492492186438373, "loss": 3.3061, "step": 60670 }, { "epoch": 4.122503057480636, "grad_norm": 2.278345823287964, "learning_rate": 0.00048488245685555107, "loss": 3.2103, "step": 60675 }, { "epoch": 4.1228427775512975, "grad_norm": 2.417304515838623, "learning_rate": 0.0004848399918467183, "loss": 3.1861, "step": 60680 }, { "epoch": 4.1231824976219595, "grad_norm": 3.0031070709228516, "learning_rate": 0.0004847975268378856, "loss": 3.3523, "step": 60685 }, { "epoch": 4.123522217692622, "grad_norm": 1.5867854356765747, "learning_rate": 0.00048475506182905285, "loss": 3.7041, "step": 60690 }, { "epoch": 4.123861937763283, "grad_norm": 1.850716233253479, "learning_rate": 0.0004847125968202202, "loss": 3.1765, "step": 60695 }, { "epoch": 4.124201657833945, "grad_norm": 1.8252235651016235, "learning_rate": 0.0004846701318113874, "loss": 3.0792, "step": 60700 }, { "epoch": 4.124541377904607, "grad_norm": 1.9128468036651611, "learning_rate": 0.0004846276668025547, "loss": 3.3758, "step": 60705 }, { "epoch": 4.124881097975268, "grad_norm": 1.8915722370147705, "learning_rate": 0.00048458520179372203, "loss": 3.2861, "step": 60710 }, { "epoch": 4.12522081804593, "grad_norm": 1.7563883066177368, "learning_rate": 0.00048454273678488925, "loss": 3.3261, "step": 60715 }, { "epoch": 4.125560538116592, "grad_norm": 1.6824835538864136, "learning_rate": 0.00048450027177605653, "loss": 3.3143, "step": 60720 }, { "epoch": 4.1259002581872535, "grad_norm": 1.6523269414901733, "learning_rate": 0.0004844578067672238, "loss": 3.155, "step": 60725 }, { "epoch": 4.1262399782579156, "grad_norm": 2.4433321952819824, "learning_rate": 0.0004844153417583911, "loss": 3.3217, "step": 60730 }, { "epoch": 4.126579698328578, "grad_norm": 2.136667490005493, "learning_rate": 0.0004843728767495584, "loss": 3.2996, "step": 60735 }, { "epoch": 4.126919418399239, "grad_norm": 1.6631959676742554, "learning_rate": 0.00048433041174072565, "loss": 3.3047, "step": 60740 }, { "epoch": 4.127259138469901, "grad_norm": 2.2824740409851074, "learning_rate": 0.00048428794673189293, "loss": 3.0076, "step": 60745 }, { "epoch": 4.127598858540563, "grad_norm": 1.9434919357299805, "learning_rate": 0.0004842454817230602, "loss": 3.6157, "step": 60750 }, { "epoch": 4.127938578611224, "grad_norm": 1.8348795175552368, "learning_rate": 0.0004842030167142275, "loss": 3.3235, "step": 60755 }, { "epoch": 4.128278298681886, "grad_norm": 1.865661382675171, "learning_rate": 0.0004841605517053947, "loss": 3.4126, "step": 60760 }, { "epoch": 4.128618018752548, "grad_norm": 1.8344238996505737, "learning_rate": 0.00048411808669656205, "loss": 3.2552, "step": 60765 }, { "epoch": 4.1289577388232095, "grad_norm": 2.265775680541992, "learning_rate": 0.00048407562168772933, "loss": 3.347, "step": 60770 }, { "epoch": 4.129297458893872, "grad_norm": 1.5283757448196411, "learning_rate": 0.00048403315667889656, "loss": 3.2711, "step": 60775 }, { "epoch": 4.129637178964534, "grad_norm": 2.0364344120025635, "learning_rate": 0.0004839906916700639, "loss": 3.514, "step": 60780 }, { "epoch": 4.129976899035195, "grad_norm": 1.4146357774734497, "learning_rate": 0.0004839482266612312, "loss": 3.4849, "step": 60785 }, { "epoch": 4.130316619105857, "grad_norm": 2.314563512802124, "learning_rate": 0.0004839057616523984, "loss": 3.4452, "step": 60790 }, { "epoch": 4.130656339176519, "grad_norm": 1.9548051357269287, "learning_rate": 0.00048386329664356574, "loss": 3.3753, "step": 60795 }, { "epoch": 4.13099605924718, "grad_norm": 1.6733289957046509, "learning_rate": 0.000483820831634733, "loss": 3.2883, "step": 60800 }, { "epoch": 4.131335779317842, "grad_norm": 1.4876779317855835, "learning_rate": 0.00048377836662590024, "loss": 3.2214, "step": 60805 }, { "epoch": 4.131675499388503, "grad_norm": 2.49955153465271, "learning_rate": 0.0004837359016170675, "loss": 3.457, "step": 60810 }, { "epoch": 4.1320152194591655, "grad_norm": 2.1172525882720947, "learning_rate": 0.00048369343660823486, "loss": 3.105, "step": 60815 }, { "epoch": 4.132354939529828, "grad_norm": 1.8299798965454102, "learning_rate": 0.0004836509715994021, "loss": 3.4015, "step": 60820 }, { "epoch": 4.132694659600489, "grad_norm": 1.8277558088302612, "learning_rate": 0.00048360850659056936, "loss": 3.306, "step": 60825 }, { "epoch": 4.133034379671151, "grad_norm": 2.17913556098938, "learning_rate": 0.0004835660415817367, "loss": 3.3898, "step": 60830 }, { "epoch": 4.133374099741813, "grad_norm": 1.9685362577438354, "learning_rate": 0.0004835235765729039, "loss": 3.4497, "step": 60835 }, { "epoch": 4.133713819812474, "grad_norm": 1.9092811346054077, "learning_rate": 0.0004834811115640712, "loss": 3.2572, "step": 60840 }, { "epoch": 4.134053539883136, "grad_norm": 2.8664777278900146, "learning_rate": 0.0004834386465552385, "loss": 3.1905, "step": 60845 }, { "epoch": 4.134393259953798, "grad_norm": 1.7609442472457886, "learning_rate": 0.00048339618154640576, "loss": 3.4074, "step": 60850 }, { "epoch": 4.134732980024459, "grad_norm": 1.8686013221740723, "learning_rate": 0.00048335371653757304, "loss": 3.3642, "step": 60855 }, { "epoch": 4.1350727000951215, "grad_norm": 2.0271012783050537, "learning_rate": 0.0004833112515287403, "loss": 3.3883, "step": 60860 }, { "epoch": 4.135412420165784, "grad_norm": 1.8683146238327026, "learning_rate": 0.00048326878651990766, "loss": 3.4316, "step": 60865 }, { "epoch": 4.135752140236445, "grad_norm": 1.859028935432434, "learning_rate": 0.0004832263215110749, "loss": 3.351, "step": 60870 }, { "epoch": 4.136091860307107, "grad_norm": 1.6059482097625732, "learning_rate": 0.00048318385650224216, "loss": 3.3708, "step": 60875 }, { "epoch": 4.136431580377769, "grad_norm": 1.9582599401474, "learning_rate": 0.00048314139149340944, "loss": 3.4173, "step": 60880 }, { "epoch": 4.13677130044843, "grad_norm": 1.9627962112426758, "learning_rate": 0.0004830989264845767, "loss": 3.4314, "step": 60885 }, { "epoch": 4.137111020519092, "grad_norm": 2.0311832427978516, "learning_rate": 0.000483056461475744, "loss": 3.4839, "step": 60890 }, { "epoch": 4.137450740589754, "grad_norm": 1.8362809419631958, "learning_rate": 0.0004830139964669113, "loss": 3.2716, "step": 60895 }, { "epoch": 4.1377904606604154, "grad_norm": 2.012545347213745, "learning_rate": 0.00048297153145807856, "loss": 3.3465, "step": 60900 }, { "epoch": 4.1381301807310775, "grad_norm": 1.8697340488433838, "learning_rate": 0.00048292906644924584, "loss": 3.2257, "step": 60905 }, { "epoch": 4.13846990080174, "grad_norm": 2.123215913772583, "learning_rate": 0.0004828866014404131, "loss": 3.3272, "step": 60910 }, { "epoch": 4.138809620872401, "grad_norm": 2.0382015705108643, "learning_rate": 0.00048284413643158035, "loss": 3.582, "step": 60915 }, { "epoch": 4.139149340943063, "grad_norm": 2.0468268394470215, "learning_rate": 0.0004828016714227477, "loss": 3.2835, "step": 60920 }, { "epoch": 4.139489061013725, "grad_norm": 1.8593060970306396, "learning_rate": 0.00048275920641391496, "loss": 3.2439, "step": 60925 }, { "epoch": 4.139828781084386, "grad_norm": 2.0923635959625244, "learning_rate": 0.0004827167414050822, "loss": 3.7362, "step": 60930 }, { "epoch": 4.140168501155048, "grad_norm": 1.8295753002166748, "learning_rate": 0.0004826742763962495, "loss": 3.3342, "step": 60935 }, { "epoch": 4.14050822122571, "grad_norm": 1.4892802238464355, "learning_rate": 0.0004826318113874168, "loss": 3.2857, "step": 60940 }, { "epoch": 4.1408479412963715, "grad_norm": 1.894958734512329, "learning_rate": 0.00048258934637858403, "loss": 3.5949, "step": 60945 }, { "epoch": 4.1411876613670335, "grad_norm": 1.5730942487716675, "learning_rate": 0.0004825468813697513, "loss": 3.4677, "step": 60950 }, { "epoch": 4.141527381437696, "grad_norm": 1.6580049991607666, "learning_rate": 0.00048250441636091864, "loss": 3.0708, "step": 60955 }, { "epoch": 4.141867101508357, "grad_norm": 1.7242121696472168, "learning_rate": 0.00048246195135208587, "loss": 3.2217, "step": 60960 }, { "epoch": 4.142206821579019, "grad_norm": 1.8843815326690674, "learning_rate": 0.00048241948634325315, "loss": 3.3483, "step": 60965 }, { "epoch": 4.142546541649681, "grad_norm": 2.185161590576172, "learning_rate": 0.0004823770213344205, "loss": 3.4341, "step": 60970 }, { "epoch": 4.142886261720342, "grad_norm": 1.968928337097168, "learning_rate": 0.0004823345563255877, "loss": 3.3064, "step": 60975 }, { "epoch": 4.143225981791004, "grad_norm": 1.8459643125534058, "learning_rate": 0.000482292091316755, "loss": 3.2343, "step": 60980 }, { "epoch": 4.143565701861666, "grad_norm": 1.9763920307159424, "learning_rate": 0.00048224962630792227, "loss": 3.4866, "step": 60985 }, { "epoch": 4.1439054219323275, "grad_norm": 2.3316004276275635, "learning_rate": 0.00048220716129908955, "loss": 3.3267, "step": 60990 }, { "epoch": 4.1442451420029895, "grad_norm": 1.9499934911727905, "learning_rate": 0.00048216469629025683, "loss": 3.2771, "step": 60995 }, { "epoch": 4.144584862073652, "grad_norm": 1.6418520212173462, "learning_rate": 0.0004821222312814241, "loss": 3.0367, "step": 61000 }, { "epoch": 4.144924582144313, "grad_norm": 1.7641489505767822, "learning_rate": 0.0004820797662725914, "loss": 3.2251, "step": 61005 }, { "epoch": 4.145264302214975, "grad_norm": 1.7034047842025757, "learning_rate": 0.00048203730126375867, "loss": 3.3854, "step": 61010 }, { "epoch": 4.145604022285637, "grad_norm": 1.9203999042510986, "learning_rate": 0.00048199483625492595, "loss": 3.4116, "step": 61015 }, { "epoch": 4.145943742356298, "grad_norm": 1.8460745811462402, "learning_rate": 0.0004819523712460932, "loss": 3.346, "step": 61020 }, { "epoch": 4.14628346242696, "grad_norm": 2.0995423793792725, "learning_rate": 0.0004819099062372605, "loss": 3.4917, "step": 61025 }, { "epoch": 4.146623182497622, "grad_norm": 1.8831093311309814, "learning_rate": 0.0004818674412284278, "loss": 3.4585, "step": 61030 }, { "epoch": 4.1469629025682835, "grad_norm": 1.7754555940628052, "learning_rate": 0.00048182497621959507, "loss": 3.4249, "step": 61035 }, { "epoch": 4.147302622638946, "grad_norm": 1.6182258129119873, "learning_rate": 0.00048178251121076235, "loss": 3.422, "step": 61040 }, { "epoch": 4.147642342709608, "grad_norm": 1.9612795114517212, "learning_rate": 0.00048174004620192963, "loss": 3.5898, "step": 61045 }, { "epoch": 4.147982062780269, "grad_norm": 2.210153102874756, "learning_rate": 0.0004816975811930969, "loss": 3.354, "step": 61050 }, { "epoch": 4.148321782850931, "grad_norm": 1.7200103998184204, "learning_rate": 0.0004816551161842642, "loss": 3.4875, "step": 61055 }, { "epoch": 4.148661502921593, "grad_norm": 1.3595695495605469, "learning_rate": 0.00048161265117543147, "loss": 3.4169, "step": 61060 }, { "epoch": 4.149001222992254, "grad_norm": 1.775814175605774, "learning_rate": 0.00048157018616659875, "loss": 3.3418, "step": 61065 }, { "epoch": 4.149340943062916, "grad_norm": 1.7004001140594482, "learning_rate": 0.000481527721157766, "loss": 3.4319, "step": 61070 }, { "epoch": 4.149680663133578, "grad_norm": 1.8813523054122925, "learning_rate": 0.0004814852561489333, "loss": 3.4075, "step": 61075 }, { "epoch": 4.1500203832042395, "grad_norm": 1.6944419145584106, "learning_rate": 0.0004814427911401006, "loss": 3.3656, "step": 61080 }, { "epoch": 4.150360103274902, "grad_norm": 2.2287137508392334, "learning_rate": 0.0004814003261312678, "loss": 3.2284, "step": 61085 }, { "epoch": 4.150699823345564, "grad_norm": 1.9331369400024414, "learning_rate": 0.00048135786112243515, "loss": 3.3736, "step": 61090 }, { "epoch": 4.151039543416225, "grad_norm": 1.8928322792053223, "learning_rate": 0.00048131539611360243, "loss": 3.5913, "step": 61095 }, { "epoch": 4.151379263486887, "grad_norm": 1.533021330833435, "learning_rate": 0.00048127293110476966, "loss": 3.3514, "step": 61100 }, { "epoch": 4.151718983557549, "grad_norm": 1.5453964471817017, "learning_rate": 0.00048123046609593694, "loss": 3.2583, "step": 61105 }, { "epoch": 4.15205870362821, "grad_norm": 2.0716047286987305, "learning_rate": 0.00048118800108710427, "loss": 3.5218, "step": 61110 }, { "epoch": 4.152398423698872, "grad_norm": 2.3371987342834473, "learning_rate": 0.0004811455360782715, "loss": 3.5297, "step": 61115 }, { "epoch": 4.152738143769534, "grad_norm": 1.6257816553115845, "learning_rate": 0.0004811030710694388, "loss": 3.3843, "step": 61120 }, { "epoch": 4.1530778638401955, "grad_norm": 1.690600872039795, "learning_rate": 0.0004810606060606061, "loss": 3.3868, "step": 61125 }, { "epoch": 4.153417583910858, "grad_norm": 1.724847674369812, "learning_rate": 0.00048101814105177334, "loss": 3.1955, "step": 61130 }, { "epoch": 4.15375730398152, "grad_norm": 1.685431718826294, "learning_rate": 0.0004809756760429406, "loss": 3.3842, "step": 61135 }, { "epoch": 4.154097024052181, "grad_norm": 2.3685519695281982, "learning_rate": 0.0004809332110341079, "loss": 3.4702, "step": 61140 }, { "epoch": 4.154436744122843, "grad_norm": 1.7901302576065063, "learning_rate": 0.0004808907460252752, "loss": 3.3758, "step": 61145 }, { "epoch": 4.154776464193505, "grad_norm": 1.9585648775100708, "learning_rate": 0.00048084828101644246, "loss": 3.4259, "step": 61150 }, { "epoch": 4.155116184264166, "grad_norm": 1.6355968713760376, "learning_rate": 0.00048080581600760974, "loss": 3.5213, "step": 61155 }, { "epoch": 4.155455904334828, "grad_norm": 2.389187812805176, "learning_rate": 0.000480763350998777, "loss": 3.3231, "step": 61160 }, { "epoch": 4.15579562440549, "grad_norm": 1.9292737245559692, "learning_rate": 0.0004807208859899443, "loss": 3.551, "step": 61165 }, { "epoch": 4.1561353444761515, "grad_norm": 1.6704293489456177, "learning_rate": 0.0004806784209811116, "loss": 3.5609, "step": 61170 }, { "epoch": 4.156475064546814, "grad_norm": 1.7757701873779297, "learning_rate": 0.0004806359559722788, "loss": 3.2747, "step": 61175 }, { "epoch": 4.156814784617475, "grad_norm": 2.2971036434173584, "learning_rate": 0.00048059349096344614, "loss": 3.396, "step": 61180 }, { "epoch": 4.157154504688137, "grad_norm": 2.1127147674560547, "learning_rate": 0.0004805510259546134, "loss": 3.4025, "step": 61185 }, { "epoch": 4.157494224758799, "grad_norm": 1.946179747581482, "learning_rate": 0.00048050856094578064, "loss": 3.3744, "step": 61190 }, { "epoch": 4.15783394482946, "grad_norm": 1.5050177574157715, "learning_rate": 0.000480466095936948, "loss": 3.3835, "step": 61195 }, { "epoch": 4.158173664900122, "grad_norm": 2.2235569953918457, "learning_rate": 0.00048042363092811526, "loss": 3.3943, "step": 61200 }, { "epoch": 4.158513384970784, "grad_norm": 2.132695436477661, "learning_rate": 0.00048038116591928254, "loss": 3.2769, "step": 61205 }, { "epoch": 4.1588531050414455, "grad_norm": 2.0415151119232178, "learning_rate": 0.00048033870091044976, "loss": 3.6082, "step": 61210 }, { "epoch": 4.1591928251121075, "grad_norm": 1.5999412536621094, "learning_rate": 0.0004802962359016171, "loss": 3.3752, "step": 61215 }, { "epoch": 4.15953254518277, "grad_norm": 1.8458318710327148, "learning_rate": 0.0004802537708927844, "loss": 3.5441, "step": 61220 }, { "epoch": 4.159872265253431, "grad_norm": 2.2668821811676025, "learning_rate": 0.0004802113058839516, "loss": 3.3783, "step": 61225 }, { "epoch": 4.160211985324093, "grad_norm": 1.6191421747207642, "learning_rate": 0.00048016884087511894, "loss": 3.3188, "step": 61230 }, { "epoch": 4.160551705394755, "grad_norm": 1.7977752685546875, "learning_rate": 0.0004801263758662862, "loss": 3.3867, "step": 61235 }, { "epoch": 4.160891425465416, "grad_norm": 2.00815749168396, "learning_rate": 0.00048008391085745345, "loss": 3.2362, "step": 61240 }, { "epoch": 4.161231145536078, "grad_norm": 1.6474279165267944, "learning_rate": 0.0004800414458486207, "loss": 3.4228, "step": 61245 }, { "epoch": 4.16157086560674, "grad_norm": 2.7535974979400635, "learning_rate": 0.00047999898083978806, "loss": 3.3043, "step": 61250 }, { "epoch": 4.1619105856774015, "grad_norm": 2.077695846557617, "learning_rate": 0.0004799565158309553, "loss": 3.419, "step": 61255 }, { "epoch": 4.1622503057480635, "grad_norm": 1.672211766242981, "learning_rate": 0.00047991405082212257, "loss": 3.4692, "step": 61260 }, { "epoch": 4.162590025818726, "grad_norm": 1.8072094917297363, "learning_rate": 0.0004798715858132899, "loss": 3.4449, "step": 61265 }, { "epoch": 4.162929745889387, "grad_norm": 2.052213191986084, "learning_rate": 0.0004798291208044571, "loss": 3.2295, "step": 61270 }, { "epoch": 4.163269465960049, "grad_norm": 1.6737806797027588, "learning_rate": 0.0004797866557956244, "loss": 3.4549, "step": 61275 }, { "epoch": 4.163609186030711, "grad_norm": 2.067131519317627, "learning_rate": 0.0004797441907867917, "loss": 3.5371, "step": 61280 }, { "epoch": 4.163948906101372, "grad_norm": 1.532370924949646, "learning_rate": 0.00047970172577795897, "loss": 3.3862, "step": 61285 }, { "epoch": 4.164288626172034, "grad_norm": 2.0166409015655518, "learning_rate": 0.00047965926076912625, "loss": 3.3794, "step": 61290 }, { "epoch": 4.164628346242696, "grad_norm": 1.7751067876815796, "learning_rate": 0.0004796167957602935, "loss": 3.3593, "step": 61295 }, { "epoch": 4.1649680663133575, "grad_norm": 2.035414457321167, "learning_rate": 0.0004795743307514608, "loss": 3.4468, "step": 61300 }, { "epoch": 4.1653077863840196, "grad_norm": 1.7132453918457031, "learning_rate": 0.0004795318657426281, "loss": 3.4589, "step": 61305 }, { "epoch": 4.165647506454682, "grad_norm": 1.6527477502822876, "learning_rate": 0.00047948940073379537, "loss": 3.2807, "step": 61310 }, { "epoch": 4.165987226525343, "grad_norm": 2.030463695526123, "learning_rate": 0.0004794469357249626, "loss": 3.5026, "step": 61315 }, { "epoch": 4.166326946596005, "grad_norm": 1.8513214588165283, "learning_rate": 0.0004794044707161299, "loss": 3.2403, "step": 61320 }, { "epoch": 4.166666666666667, "grad_norm": 1.8062292337417603, "learning_rate": 0.0004793620057072972, "loss": 3.2637, "step": 61325 }, { "epoch": 4.167006386737328, "grad_norm": 1.888967514038086, "learning_rate": 0.00047931954069846443, "loss": 3.5108, "step": 61330 }, { "epoch": 4.16734610680799, "grad_norm": 1.7688068151474, "learning_rate": 0.00047927707568963177, "loss": 3.4567, "step": 61335 }, { "epoch": 4.167685826878652, "grad_norm": 1.9667704105377197, "learning_rate": 0.00047923461068079905, "loss": 3.6468, "step": 61340 }, { "epoch": 4.1680255469493135, "grad_norm": 1.7122758626937866, "learning_rate": 0.00047919214567196627, "loss": 3.0874, "step": 61345 }, { "epoch": 4.168365267019976, "grad_norm": 2.1505815982818604, "learning_rate": 0.0004791496806631336, "loss": 3.2495, "step": 61350 }, { "epoch": 4.168704987090638, "grad_norm": 1.8666729927062988, "learning_rate": 0.0004791072156543009, "loss": 3.5134, "step": 61355 }, { "epoch": 4.169044707161299, "grad_norm": 1.6612181663513184, "learning_rate": 0.0004790647506454681, "loss": 3.4601, "step": 61360 }, { "epoch": 4.169384427231961, "grad_norm": 1.9497464895248413, "learning_rate": 0.0004790222856366354, "loss": 3.118, "step": 61365 }, { "epoch": 4.169724147302623, "grad_norm": 2.0681982040405273, "learning_rate": 0.0004789798206278027, "loss": 3.2509, "step": 61370 }, { "epoch": 4.170063867373284, "grad_norm": 2.0049123764038086, "learning_rate": 0.00047893735561897, "loss": 3.2882, "step": 61375 }, { "epoch": 4.170403587443946, "grad_norm": 2.318103790283203, "learning_rate": 0.00047889489061013723, "loss": 3.4499, "step": 61380 }, { "epoch": 4.170743307514608, "grad_norm": 1.8602869510650635, "learning_rate": 0.00047885242560130457, "loss": 3.4668, "step": 61385 }, { "epoch": 4.1710830275852695, "grad_norm": 1.9098588228225708, "learning_rate": 0.00047880996059247185, "loss": 3.0306, "step": 61390 }, { "epoch": 4.171422747655932, "grad_norm": 2.126398801803589, "learning_rate": 0.0004787674955836391, "loss": 3.613, "step": 61395 }, { "epoch": 4.171762467726594, "grad_norm": 2.162661075592041, "learning_rate": 0.00047872503057480635, "loss": 3.3917, "step": 61400 }, { "epoch": 4.172102187797255, "grad_norm": 1.56696355342865, "learning_rate": 0.0004786825655659737, "loss": 3.2505, "step": 61405 }, { "epoch": 4.172441907867917, "grad_norm": 2.3206512928009033, "learning_rate": 0.0004786401005571409, "loss": 3.5168, "step": 61410 }, { "epoch": 4.172781627938579, "grad_norm": 2.1557724475860596, "learning_rate": 0.0004785976355483082, "loss": 3.3477, "step": 61415 }, { "epoch": 4.17312134800924, "grad_norm": 1.9327479600906372, "learning_rate": 0.00047855517053947553, "loss": 3.3816, "step": 61420 }, { "epoch": 4.173461068079902, "grad_norm": 1.9061800241470337, "learning_rate": 0.00047851270553064275, "loss": 3.2534, "step": 61425 }, { "epoch": 4.173800788150564, "grad_norm": 1.5615953207015991, "learning_rate": 0.00047847024052181003, "loss": 3.3769, "step": 61430 }, { "epoch": 4.1741405082212255, "grad_norm": 2.1454124450683594, "learning_rate": 0.0004784277755129773, "loss": 3.4774, "step": 61435 }, { "epoch": 4.174480228291888, "grad_norm": 1.7411134243011475, "learning_rate": 0.0004783853105041446, "loss": 3.5937, "step": 61440 }, { "epoch": 4.17481994836255, "grad_norm": 1.7512203454971313, "learning_rate": 0.0004783428454953119, "loss": 3.3395, "step": 61445 }, { "epoch": 4.175159668433211, "grad_norm": 1.3527860641479492, "learning_rate": 0.00047830038048647915, "loss": 3.4766, "step": 61450 }, { "epoch": 4.175499388503873, "grad_norm": 1.7402321100234985, "learning_rate": 0.00047825791547764643, "loss": 3.4046, "step": 61455 }, { "epoch": 4.175839108574535, "grad_norm": 1.7788046598434448, "learning_rate": 0.0004782154504688137, "loss": 3.3, "step": 61460 }, { "epoch": 4.176178828645196, "grad_norm": 1.8727202415466309, "learning_rate": 0.000478172985459981, "loss": 3.4252, "step": 61465 }, { "epoch": 4.176518548715858, "grad_norm": 2.247021436691284, "learning_rate": 0.0004781305204511482, "loss": 3.4016, "step": 61470 }, { "epoch": 4.17685826878652, "grad_norm": 1.422570824623108, "learning_rate": 0.00047808805544231555, "loss": 3.3695, "step": 61475 }, { "epoch": 4.1771979888571815, "grad_norm": 1.5765643119812012, "learning_rate": 0.00047804559043348283, "loss": 3.4324, "step": 61480 }, { "epoch": 4.177537708927844, "grad_norm": 2.0157864093780518, "learning_rate": 0.00047800312542465006, "loss": 3.1043, "step": 61485 }, { "epoch": 4.177877428998505, "grad_norm": 1.7649749517440796, "learning_rate": 0.0004779606604158174, "loss": 3.3456, "step": 61490 }, { "epoch": 4.178217149069167, "grad_norm": 2.007145643234253, "learning_rate": 0.0004779181954069847, "loss": 3.3186, "step": 61495 }, { "epoch": 4.178556869139829, "grad_norm": 1.8991931676864624, "learning_rate": 0.0004778757303981519, "loss": 3.2548, "step": 61500 }, { "epoch": 4.17889658921049, "grad_norm": 2.4483399391174316, "learning_rate": 0.0004778332653893192, "loss": 3.1773, "step": 61505 }, { "epoch": 4.179236309281152, "grad_norm": 1.8684208393096924, "learning_rate": 0.0004777908003804865, "loss": 3.4032, "step": 61510 }, { "epoch": 4.179576029351814, "grad_norm": 2.378878593444824, "learning_rate": 0.00047774833537165374, "loss": 3.0915, "step": 61515 }, { "epoch": 4.1799157494224755, "grad_norm": 1.82306706905365, "learning_rate": 0.000477705870362821, "loss": 3.5075, "step": 61520 }, { "epoch": 4.1802554694931375, "grad_norm": 2.0046329498291016, "learning_rate": 0.00047766340535398836, "loss": 3.2385, "step": 61525 }, { "epoch": 4.1805951895638, "grad_norm": 1.3301563262939453, "learning_rate": 0.0004776209403451556, "loss": 3.5701, "step": 61530 }, { "epoch": 4.180934909634461, "grad_norm": 2.0612425804138184, "learning_rate": 0.00047757847533632286, "loss": 3.6447, "step": 61535 }, { "epoch": 4.181274629705123, "grad_norm": 1.980616807937622, "learning_rate": 0.00047753601032749014, "loss": 3.3822, "step": 61540 }, { "epoch": 4.181614349775785, "grad_norm": 2.0230982303619385, "learning_rate": 0.0004774935453186575, "loss": 3.4967, "step": 61545 }, { "epoch": 4.181954069846446, "grad_norm": 1.7978545427322388, "learning_rate": 0.0004774510803098247, "loss": 3.3006, "step": 61550 }, { "epoch": 4.182293789917108, "grad_norm": 1.5651545524597168, "learning_rate": 0.000477408615300992, "loss": 3.2972, "step": 61555 }, { "epoch": 4.18263350998777, "grad_norm": 1.6814746856689453, "learning_rate": 0.0004773661502921593, "loss": 3.1601, "step": 61560 }, { "epoch": 4.1829732300584315, "grad_norm": 1.542611837387085, "learning_rate": 0.00047732368528332654, "loss": 3.5528, "step": 61565 }, { "epoch": 4.1833129501290935, "grad_norm": 2.004862070083618, "learning_rate": 0.0004772812202744938, "loss": 3.3799, "step": 61570 }, { "epoch": 4.183652670199756, "grad_norm": 1.6434799432754517, "learning_rate": 0.0004772387552656611, "loss": 3.4621, "step": 61575 }, { "epoch": 4.183992390270417, "grad_norm": 1.526911973953247, "learning_rate": 0.0004771962902568284, "loss": 3.5327, "step": 61580 }, { "epoch": 4.184332110341079, "grad_norm": 1.3053293228149414, "learning_rate": 0.00047715382524799566, "loss": 3.4769, "step": 61585 }, { "epoch": 4.184671830411741, "grad_norm": 1.564003586769104, "learning_rate": 0.00047711136023916294, "loss": 3.4163, "step": 61590 }, { "epoch": 4.185011550482402, "grad_norm": 1.7282130718231201, "learning_rate": 0.0004770688952303302, "loss": 3.5096, "step": 61595 }, { "epoch": 4.185351270553064, "grad_norm": 1.506221055984497, "learning_rate": 0.0004770264302214975, "loss": 3.5878, "step": 61600 }, { "epoch": 4.185690990623726, "grad_norm": 1.8676317930221558, "learning_rate": 0.0004769839652126648, "loss": 3.42, "step": 61605 }, { "epoch": 4.1860307106943875, "grad_norm": 2.2492637634277344, "learning_rate": 0.000476941500203832, "loss": 3.4676, "step": 61610 }, { "epoch": 4.1863704307650496, "grad_norm": 1.9695357084274292, "learning_rate": 0.00047689903519499934, "loss": 3.3755, "step": 61615 }, { "epoch": 4.186710150835712, "grad_norm": 2.490736961364746, "learning_rate": 0.0004768565701861666, "loss": 3.4759, "step": 61620 }, { "epoch": 4.187049870906373, "grad_norm": 1.7272204160690308, "learning_rate": 0.00047681410517733385, "loss": 3.333, "step": 61625 }, { "epoch": 4.187389590977035, "grad_norm": 1.9941428899765015, "learning_rate": 0.0004767716401685012, "loss": 3.4826, "step": 61630 }, { "epoch": 4.187729311047697, "grad_norm": 2.0939252376556396, "learning_rate": 0.00047672917515966846, "loss": 3.2653, "step": 61635 }, { "epoch": 4.188069031118358, "grad_norm": 1.7787641286849976, "learning_rate": 0.0004766867101508357, "loss": 3.5318, "step": 61640 }, { "epoch": 4.18840875118902, "grad_norm": 1.539757251739502, "learning_rate": 0.000476644245142003, "loss": 3.3114, "step": 61645 }, { "epoch": 4.188748471259682, "grad_norm": 2.382080554962158, "learning_rate": 0.0004766017801331703, "loss": 3.2966, "step": 61650 }, { "epoch": 4.1890881913303435, "grad_norm": 2.154664993286133, "learning_rate": 0.00047655931512433753, "loss": 3.4769, "step": 61655 }, { "epoch": 4.189427911401006, "grad_norm": 2.11958646774292, "learning_rate": 0.0004765168501155048, "loss": 3.2074, "step": 61660 }, { "epoch": 4.189767631471668, "grad_norm": 1.649657130241394, "learning_rate": 0.00047647438510667214, "loss": 3.3545, "step": 61665 }, { "epoch": 4.190107351542329, "grad_norm": 1.7747390270233154, "learning_rate": 0.00047643192009783937, "loss": 3.3629, "step": 61670 }, { "epoch": 4.190447071612991, "grad_norm": 2.0383760929107666, "learning_rate": 0.00047638945508900665, "loss": 3.4406, "step": 61675 }, { "epoch": 4.190786791683653, "grad_norm": 1.9563453197479248, "learning_rate": 0.000476346990080174, "loss": 3.2818, "step": 61680 }, { "epoch": 4.191126511754314, "grad_norm": 1.8708176612854004, "learning_rate": 0.0004763045250713412, "loss": 3.1809, "step": 61685 }, { "epoch": 4.191466231824976, "grad_norm": 1.7513659000396729, "learning_rate": 0.0004762620600625085, "loss": 3.5309, "step": 61690 }, { "epoch": 4.191805951895638, "grad_norm": 1.634667992591858, "learning_rate": 0.00047621959505367577, "loss": 3.6522, "step": 61695 }, { "epoch": 4.1921456719662995, "grad_norm": 1.9081780910491943, "learning_rate": 0.00047617713004484305, "loss": 3.4982, "step": 61700 }, { "epoch": 4.192485392036962, "grad_norm": 1.5698705911636353, "learning_rate": 0.00047613466503601033, "loss": 3.2196, "step": 61705 }, { "epoch": 4.192825112107624, "grad_norm": 1.987583041191101, "learning_rate": 0.0004760922000271776, "loss": 3.2666, "step": 61710 }, { "epoch": 4.193164832178285, "grad_norm": 1.7054845094680786, "learning_rate": 0.00047604973501834494, "loss": 3.1255, "step": 61715 }, { "epoch": 4.193504552248947, "grad_norm": 1.7565838098526, "learning_rate": 0.00047600727000951217, "loss": 3.3283, "step": 61720 }, { "epoch": 4.193844272319609, "grad_norm": 1.61762273311615, "learning_rate": 0.00047596480500067945, "loss": 3.2654, "step": 61725 }, { "epoch": 4.19418399239027, "grad_norm": 1.783613920211792, "learning_rate": 0.00047592233999184673, "loss": 3.4846, "step": 61730 }, { "epoch": 4.194523712460932, "grad_norm": 1.7275304794311523, "learning_rate": 0.000475879874983014, "loss": 3.3706, "step": 61735 }, { "epoch": 4.194863432531594, "grad_norm": 1.832132339477539, "learning_rate": 0.0004758374099741813, "loss": 3.3956, "step": 61740 }, { "epoch": 4.1952031526022555, "grad_norm": 1.8739197254180908, "learning_rate": 0.00047579494496534857, "loss": 3.4428, "step": 61745 }, { "epoch": 4.195542872672918, "grad_norm": 1.4139676094055176, "learning_rate": 0.00047575247995651585, "loss": 3.4442, "step": 61750 }, { "epoch": 4.19588259274358, "grad_norm": 1.6134235858917236, "learning_rate": 0.00047571001494768313, "loss": 3.435, "step": 61755 }, { "epoch": 4.196222312814241, "grad_norm": 2.1089284420013428, "learning_rate": 0.0004756675499388504, "loss": 3.3596, "step": 61760 }, { "epoch": 4.196562032884903, "grad_norm": 1.6293705701828003, "learning_rate": 0.00047562508493001764, "loss": 3.2155, "step": 61765 }, { "epoch": 4.196901752955565, "grad_norm": 2.119520664215088, "learning_rate": 0.00047558261992118497, "loss": 3.538, "step": 61770 }, { "epoch": 4.197241473026226, "grad_norm": 1.9850735664367676, "learning_rate": 0.00047554015491235225, "loss": 3.4843, "step": 61775 }, { "epoch": 4.197581193096888, "grad_norm": 1.663564682006836, "learning_rate": 0.0004754976899035195, "loss": 3.3653, "step": 61780 }, { "epoch": 4.19792091316755, "grad_norm": 2.4450461864471436, "learning_rate": 0.0004754552248946868, "loss": 3.3412, "step": 61785 }, { "epoch": 4.1982606332382115, "grad_norm": 1.486989140510559, "learning_rate": 0.0004754127598858541, "loss": 3.4045, "step": 61790 }, { "epoch": 4.198600353308874, "grad_norm": 1.9039175510406494, "learning_rate": 0.0004753702948770213, "loss": 3.2832, "step": 61795 }, { "epoch": 4.198940073379536, "grad_norm": 1.7304837703704834, "learning_rate": 0.0004753278298681886, "loss": 3.6003, "step": 61800 }, { "epoch": 4.199279793450197, "grad_norm": 2.201659679412842, "learning_rate": 0.00047528536485935593, "loss": 3.5318, "step": 61805 }, { "epoch": 4.199619513520859, "grad_norm": 1.5009766817092896, "learning_rate": 0.00047524289985052316, "loss": 3.4485, "step": 61810 }, { "epoch": 4.199959233591521, "grad_norm": 1.5618270635604858, "learning_rate": 0.00047520043484169044, "loss": 3.4618, "step": 61815 }, { "epoch": 4.200298953662182, "grad_norm": 2.2482569217681885, "learning_rate": 0.00047515796983285777, "loss": 3.4449, "step": 61820 }, { "epoch": 4.200638673732844, "grad_norm": 2.0208983421325684, "learning_rate": 0.000475115504824025, "loss": 3.3535, "step": 61825 }, { "epoch": 4.200978393803506, "grad_norm": 1.691094160079956, "learning_rate": 0.0004750730398151923, "loss": 3.5175, "step": 61830 }, { "epoch": 4.2013181138741675, "grad_norm": 2.857753276824951, "learning_rate": 0.00047503057480635956, "loss": 3.4506, "step": 61835 }, { "epoch": 4.20165783394483, "grad_norm": 1.549081802368164, "learning_rate": 0.00047498810979752684, "loss": 3.3592, "step": 61840 }, { "epoch": 4.201997554015492, "grad_norm": 2.3644120693206787, "learning_rate": 0.0004749456447886941, "loss": 3.3604, "step": 61845 }, { "epoch": 4.202337274086153, "grad_norm": 1.7925916910171509, "learning_rate": 0.0004749031797798614, "loss": 3.4735, "step": 61850 }, { "epoch": 4.202676994156815, "grad_norm": 2.412064790725708, "learning_rate": 0.0004748607147710287, "loss": 3.2666, "step": 61855 }, { "epoch": 4.203016714227476, "grad_norm": 1.7765583992004395, "learning_rate": 0.00047481824976219596, "loss": 3.2209, "step": 61860 }, { "epoch": 4.203356434298138, "grad_norm": 1.8771846294403076, "learning_rate": 0.00047477578475336324, "loss": 3.2793, "step": 61865 }, { "epoch": 4.2036961543688, "grad_norm": 2.03704571723938, "learning_rate": 0.00047473331974453046, "loss": 3.3569, "step": 61870 }, { "epoch": 4.2040358744394615, "grad_norm": 2.556033134460449, "learning_rate": 0.0004746908547356978, "loss": 3.0325, "step": 61875 }, { "epoch": 4.2043755945101235, "grad_norm": 1.578757643699646, "learning_rate": 0.0004746483897268651, "loss": 3.4516, "step": 61880 }, { "epoch": 4.204715314580786, "grad_norm": 1.6886727809906006, "learning_rate": 0.00047460592471803236, "loss": 3.4343, "step": 61885 }, { "epoch": 4.205055034651447, "grad_norm": 2.112330198287964, "learning_rate": 0.00047456345970919964, "loss": 3.4194, "step": 61890 }, { "epoch": 4.205394754722109, "grad_norm": 1.9730126857757568, "learning_rate": 0.0004745209947003669, "loss": 3.4714, "step": 61895 }, { "epoch": 4.205734474792771, "grad_norm": 2.048161029815674, "learning_rate": 0.0004744785296915342, "loss": 3.3566, "step": 61900 }, { "epoch": 4.206074194863432, "grad_norm": 2.060548782348633, "learning_rate": 0.0004744360646827015, "loss": 3.3179, "step": 61905 }, { "epoch": 4.206413914934094, "grad_norm": 1.6535358428955078, "learning_rate": 0.00047439359967386876, "loss": 3.3923, "step": 61910 }, { "epoch": 4.206753635004756, "grad_norm": 1.4585051536560059, "learning_rate": 0.00047435113466503604, "loss": 3.4965, "step": 61915 }, { "epoch": 4.2070933550754175, "grad_norm": 2.3878366947174072, "learning_rate": 0.00047430866965620326, "loss": 3.5573, "step": 61920 }, { "epoch": 4.20743307514608, "grad_norm": 1.5808767080307007, "learning_rate": 0.0004742662046473706, "loss": 3.1959, "step": 61925 }, { "epoch": 4.207772795216742, "grad_norm": 1.5411475896835327, "learning_rate": 0.0004742237396385379, "loss": 3.3279, "step": 61930 }, { "epoch": 4.208112515287403, "grad_norm": 1.4050145149230957, "learning_rate": 0.0004741812746297051, "loss": 3.3332, "step": 61935 }, { "epoch": 4.208452235358065, "grad_norm": 2.40731143951416, "learning_rate": 0.00047413880962087244, "loss": 3.4532, "step": 61940 }, { "epoch": 4.208791955428727, "grad_norm": 1.5673322677612305, "learning_rate": 0.0004740963446120397, "loss": 3.2356, "step": 61945 }, { "epoch": 4.209131675499388, "grad_norm": 1.8382058143615723, "learning_rate": 0.00047405387960320694, "loss": 3.5172, "step": 61950 }, { "epoch": 4.20947139557005, "grad_norm": 1.9831256866455078, "learning_rate": 0.0004740114145943742, "loss": 3.4161, "step": 61955 }, { "epoch": 4.209811115640712, "grad_norm": 2.1683032512664795, "learning_rate": 0.00047396894958554156, "loss": 3.3881, "step": 61960 }, { "epoch": 4.2101508357113735, "grad_norm": 2.0752012729644775, "learning_rate": 0.0004739264845767088, "loss": 3.428, "step": 61965 }, { "epoch": 4.210490555782036, "grad_norm": 1.6276277303695679, "learning_rate": 0.00047388401956787607, "loss": 3.2458, "step": 61970 }, { "epoch": 4.210830275852698, "grad_norm": 1.60543954372406, "learning_rate": 0.0004738415545590434, "loss": 3.411, "step": 61975 }, { "epoch": 4.211169995923359, "grad_norm": 1.7956349849700928, "learning_rate": 0.0004737990895502106, "loss": 3.3421, "step": 61980 }, { "epoch": 4.211509715994021, "grad_norm": 1.8576586246490479, "learning_rate": 0.0004737566245413779, "loss": 3.3579, "step": 61985 }, { "epoch": 4.211849436064683, "grad_norm": 1.8716503381729126, "learning_rate": 0.0004737141595325452, "loss": 3.3337, "step": 61990 }, { "epoch": 4.212189156135344, "grad_norm": 2.505608320236206, "learning_rate": 0.00047367169452371247, "loss": 3.4176, "step": 61995 }, { "epoch": 4.212528876206006, "grad_norm": 1.5698143243789673, "learning_rate": 0.00047362922951487975, "loss": 3.4657, "step": 62000 }, { "epoch": 4.212868596276668, "grad_norm": 2.292057752609253, "learning_rate": 0.000473586764506047, "loss": 3.4062, "step": 62005 }, { "epoch": 4.2132083163473295, "grad_norm": 2.4009039402008057, "learning_rate": 0.0004735442994972143, "loss": 3.2805, "step": 62010 }, { "epoch": 4.213548036417992, "grad_norm": 1.3531360626220703, "learning_rate": 0.0004735018344883816, "loss": 3.4359, "step": 62015 }, { "epoch": 4.213887756488654, "grad_norm": 1.8986833095550537, "learning_rate": 0.00047345936947954887, "loss": 3.4864, "step": 62020 }, { "epoch": 4.214227476559315, "grad_norm": 1.6414539813995361, "learning_rate": 0.0004734169044707161, "loss": 3.5233, "step": 62025 }, { "epoch": 4.214567196629977, "grad_norm": 1.7750457525253296, "learning_rate": 0.0004733744394618834, "loss": 3.3673, "step": 62030 }, { "epoch": 4.214906916700639, "grad_norm": 2.655618190765381, "learning_rate": 0.0004733319744530507, "loss": 3.5746, "step": 62035 }, { "epoch": 4.2152466367713, "grad_norm": 2.157228469848633, "learning_rate": 0.00047328950944421793, "loss": 3.147, "step": 62040 }, { "epoch": 4.215586356841962, "grad_norm": 1.8007817268371582, "learning_rate": 0.00047324704443538527, "loss": 3.5463, "step": 62045 }, { "epoch": 4.215926076912624, "grad_norm": 1.771966814994812, "learning_rate": 0.00047320457942655255, "loss": 3.306, "step": 62050 }, { "epoch": 4.2162657969832855, "grad_norm": 2.1667349338531494, "learning_rate": 0.0004731621144177198, "loss": 3.2707, "step": 62055 }, { "epoch": 4.216605517053948, "grad_norm": 1.7575732469558716, "learning_rate": 0.00047311964940888705, "loss": 3.3188, "step": 62060 }, { "epoch": 4.21694523712461, "grad_norm": 1.506422758102417, "learning_rate": 0.0004730771844000544, "loss": 3.3133, "step": 62065 }, { "epoch": 4.217284957195271, "grad_norm": 1.800232172012329, "learning_rate": 0.00047303471939122167, "loss": 3.5095, "step": 62070 }, { "epoch": 4.217624677265933, "grad_norm": 1.781307578086853, "learning_rate": 0.0004729922543823889, "loss": 3.2881, "step": 62075 }, { "epoch": 4.217964397336595, "grad_norm": 1.6820707321166992, "learning_rate": 0.0004729497893735562, "loss": 3.3083, "step": 62080 }, { "epoch": 4.218304117407256, "grad_norm": 2.0847084522247314, "learning_rate": 0.0004729073243647235, "loss": 3.4214, "step": 62085 }, { "epoch": 4.218643837477918, "grad_norm": 2.4155426025390625, "learning_rate": 0.00047286485935589073, "loss": 3.4599, "step": 62090 }, { "epoch": 4.21898355754858, "grad_norm": 1.8508599996566772, "learning_rate": 0.000472822394347058, "loss": 3.3064, "step": 62095 }, { "epoch": 4.2193232776192415, "grad_norm": 1.873642086982727, "learning_rate": 0.00047277992933822535, "loss": 3.438, "step": 62100 }, { "epoch": 4.219662997689904, "grad_norm": 1.5995442867279053, "learning_rate": 0.0004727374643293926, "loss": 3.6308, "step": 62105 }, { "epoch": 4.220002717760566, "grad_norm": 2.1176178455352783, "learning_rate": 0.00047269499932055985, "loss": 3.0362, "step": 62110 }, { "epoch": 4.220342437831227, "grad_norm": 1.785484790802002, "learning_rate": 0.0004726525343117272, "loss": 3.3596, "step": 62115 }, { "epoch": 4.220682157901889, "grad_norm": 1.5968358516693115, "learning_rate": 0.0004726100693028944, "loss": 3.4116, "step": 62120 }, { "epoch": 4.221021877972551, "grad_norm": 1.8304444551467896, "learning_rate": 0.0004725676042940617, "loss": 3.3862, "step": 62125 }, { "epoch": 4.221361598043212, "grad_norm": 2.0169386863708496, "learning_rate": 0.000472525139285229, "loss": 3.3943, "step": 62130 }, { "epoch": 4.221701318113874, "grad_norm": 1.8429290056228638, "learning_rate": 0.00047248267427639625, "loss": 3.2219, "step": 62135 }, { "epoch": 4.222041038184536, "grad_norm": 1.674924373626709, "learning_rate": 0.00047244020926756353, "loss": 3.3479, "step": 62140 }, { "epoch": 4.2223807582551975, "grad_norm": 1.5193599462509155, "learning_rate": 0.0004723977442587308, "loss": 3.4347, "step": 62145 }, { "epoch": 4.22272047832586, "grad_norm": 1.8565621376037598, "learning_rate": 0.0004723552792498981, "loss": 3.5303, "step": 62150 }, { "epoch": 4.223060198396522, "grad_norm": 2.0968692302703857, "learning_rate": 0.0004723128142410654, "loss": 3.5872, "step": 62155 }, { "epoch": 4.223399918467183, "grad_norm": 1.9589896202087402, "learning_rate": 0.00047227034923223265, "loss": 3.3235, "step": 62160 }, { "epoch": 4.223739638537845, "grad_norm": 1.8851367235183716, "learning_rate": 0.0004722278842233999, "loss": 3.1562, "step": 62165 }, { "epoch": 4.224079358608506, "grad_norm": 2.0455667972564697, "learning_rate": 0.0004721854192145672, "loss": 3.1519, "step": 62170 }, { "epoch": 4.224419078679168, "grad_norm": 1.561319351196289, "learning_rate": 0.0004721429542057345, "loss": 3.21, "step": 62175 }, { "epoch": 4.22475879874983, "grad_norm": 2.571866512298584, "learning_rate": 0.0004721004891969017, "loss": 3.5898, "step": 62180 }, { "epoch": 4.2250985188204915, "grad_norm": 2.23659348487854, "learning_rate": 0.00047205802418806905, "loss": 3.3585, "step": 62185 }, { "epoch": 4.2254382388911536, "grad_norm": 2.4929778575897217, "learning_rate": 0.00047201555917923633, "loss": 3.3501, "step": 62190 }, { "epoch": 4.225777958961816, "grad_norm": 1.8531193733215332, "learning_rate": 0.00047197309417040356, "loss": 3.2631, "step": 62195 }, { "epoch": 4.226117679032477, "grad_norm": 1.920162558555603, "learning_rate": 0.0004719306291615709, "loss": 3.4947, "step": 62200 }, { "epoch": 4.226457399103139, "grad_norm": 2.166461229324341, "learning_rate": 0.0004718881641527382, "loss": 3.4442, "step": 62205 }, { "epoch": 4.226797119173801, "grad_norm": 1.7164989709854126, "learning_rate": 0.0004718456991439054, "loss": 3.1885, "step": 62210 }, { "epoch": 4.227136839244462, "grad_norm": 1.9163691997528076, "learning_rate": 0.0004718032341350727, "loss": 3.214, "step": 62215 }, { "epoch": 4.227476559315124, "grad_norm": 1.7249705791473389, "learning_rate": 0.00047176076912624, "loss": 3.4517, "step": 62220 }, { "epoch": 4.227816279385786, "grad_norm": 1.8376911878585815, "learning_rate": 0.0004717183041174073, "loss": 3.2576, "step": 62225 }, { "epoch": 4.2281559994564475, "grad_norm": 2.4914956092834473, "learning_rate": 0.0004716758391085745, "loss": 3.414, "step": 62230 }, { "epoch": 4.22849571952711, "grad_norm": 1.9699549674987793, "learning_rate": 0.00047163337409974186, "loss": 3.3928, "step": 62235 }, { "epoch": 4.228835439597772, "grad_norm": 1.9612239599227905, "learning_rate": 0.00047159090909090914, "loss": 3.4164, "step": 62240 }, { "epoch": 4.229175159668433, "grad_norm": 2.0442497730255127, "learning_rate": 0.00047154844408207636, "loss": 3.4707, "step": 62245 }, { "epoch": 4.229514879739095, "grad_norm": 1.7530429363250732, "learning_rate": 0.00047150597907324364, "loss": 3.2238, "step": 62250 }, { "epoch": 4.229854599809757, "grad_norm": 1.9506205320358276, "learning_rate": 0.000471463514064411, "loss": 3.3111, "step": 62255 }, { "epoch": 4.230194319880418, "grad_norm": 1.3388348817825317, "learning_rate": 0.0004714210490555782, "loss": 3.5369, "step": 62260 }, { "epoch": 4.23053403995108, "grad_norm": 1.987797498703003, "learning_rate": 0.0004713785840467455, "loss": 3.4364, "step": 62265 }, { "epoch": 4.230873760021742, "grad_norm": 2.161240816116333, "learning_rate": 0.0004713361190379128, "loss": 3.1955, "step": 62270 }, { "epoch": 4.2312134800924035, "grad_norm": 1.7126981019973755, "learning_rate": 0.00047129365402908004, "loss": 3.483, "step": 62275 }, { "epoch": 4.231553200163066, "grad_norm": 1.9184211492538452, "learning_rate": 0.0004712511890202473, "loss": 3.5481, "step": 62280 }, { "epoch": 4.231892920233728, "grad_norm": 1.5442558526992798, "learning_rate": 0.0004712087240114146, "loss": 3.4649, "step": 62285 }, { "epoch": 4.232232640304389, "grad_norm": 1.9089487791061401, "learning_rate": 0.0004711662590025819, "loss": 3.4298, "step": 62290 }, { "epoch": 4.232572360375051, "grad_norm": 1.9615426063537598, "learning_rate": 0.00047112379399374916, "loss": 3.4855, "step": 62295 }, { "epoch": 4.232912080445713, "grad_norm": 1.9002822637557983, "learning_rate": 0.00047108132898491644, "loss": 3.3722, "step": 62300 }, { "epoch": 4.233251800516374, "grad_norm": 2.3604581356048584, "learning_rate": 0.0004710388639760837, "loss": 3.2431, "step": 62305 }, { "epoch": 4.233591520587036, "grad_norm": 1.8939454555511475, "learning_rate": 0.000470996398967251, "loss": 3.1103, "step": 62310 }, { "epoch": 4.233931240657698, "grad_norm": 1.787095069885254, "learning_rate": 0.0004709539339584183, "loss": 3.5223, "step": 62315 }, { "epoch": 4.2342709607283595, "grad_norm": 1.888991117477417, "learning_rate": 0.0004709114689495855, "loss": 3.1003, "step": 62320 }, { "epoch": 4.234610680799022, "grad_norm": 2.3835980892181396, "learning_rate": 0.00047086900394075284, "loss": 3.1733, "step": 62325 }, { "epoch": 4.234950400869684, "grad_norm": 2.009629964828491, "learning_rate": 0.0004708265389319201, "loss": 3.2583, "step": 62330 }, { "epoch": 4.235290120940345, "grad_norm": 1.384881854057312, "learning_rate": 0.00047078407392308735, "loss": 3.5066, "step": 62335 }, { "epoch": 4.235629841011007, "grad_norm": 1.7204118967056274, "learning_rate": 0.0004707416089142547, "loss": 3.3821, "step": 62340 }, { "epoch": 4.235969561081669, "grad_norm": 2.1280527114868164, "learning_rate": 0.00047069914390542196, "loss": 3.1847, "step": 62345 }, { "epoch": 4.23630928115233, "grad_norm": 2.100398302078247, "learning_rate": 0.0004706566788965892, "loss": 3.3649, "step": 62350 }, { "epoch": 4.236649001222992, "grad_norm": 1.5576071739196777, "learning_rate": 0.00047061421388775647, "loss": 3.2629, "step": 62355 }, { "epoch": 4.236988721293654, "grad_norm": 1.9427049160003662, "learning_rate": 0.0004705717488789238, "loss": 3.396, "step": 62360 }, { "epoch": 4.2373284413643155, "grad_norm": 1.4163222312927246, "learning_rate": 0.00047052928387009103, "loss": 3.3929, "step": 62365 }, { "epoch": 4.237668161434978, "grad_norm": 1.6741472482681274, "learning_rate": 0.0004704868188612583, "loss": 3.4072, "step": 62370 }, { "epoch": 4.23800788150564, "grad_norm": 1.8923612833023071, "learning_rate": 0.00047044435385242564, "loss": 3.2468, "step": 62375 }, { "epoch": 4.238347601576301, "grad_norm": 1.9334667921066284, "learning_rate": 0.00047040188884359287, "loss": 3.4552, "step": 62380 }, { "epoch": 4.238687321646963, "grad_norm": 1.9985549449920654, "learning_rate": 0.00047035942383476015, "loss": 3.3952, "step": 62385 }, { "epoch": 4.239027041717625, "grad_norm": 1.8077671527862549, "learning_rate": 0.00047031695882592743, "loss": 3.4371, "step": 62390 }, { "epoch": 4.239366761788286, "grad_norm": 2.1963627338409424, "learning_rate": 0.00047027449381709476, "loss": 3.5457, "step": 62395 }, { "epoch": 4.239706481858948, "grad_norm": 2.051811456680298, "learning_rate": 0.000470232028808262, "loss": 3.3684, "step": 62400 }, { "epoch": 4.24004620192961, "grad_norm": 1.9946792125701904, "learning_rate": 0.00047018956379942927, "loss": 3.4423, "step": 62405 }, { "epoch": 4.2403859220002715, "grad_norm": 1.8407624959945679, "learning_rate": 0.0004701470987905966, "loss": 3.5478, "step": 62410 }, { "epoch": 4.240725642070934, "grad_norm": 1.9300545454025269, "learning_rate": 0.00047010463378176383, "loss": 3.4989, "step": 62415 }, { "epoch": 4.241065362141596, "grad_norm": 1.469043493270874, "learning_rate": 0.0004700621687729311, "loss": 3.5731, "step": 62420 }, { "epoch": 4.241405082212257, "grad_norm": 1.9679899215698242, "learning_rate": 0.0004700197037640984, "loss": 3.4219, "step": 62425 }, { "epoch": 4.241744802282919, "grad_norm": 1.9101409912109375, "learning_rate": 0.00046997723875526567, "loss": 3.1856, "step": 62430 }, { "epoch": 4.242084522353581, "grad_norm": 1.8131464719772339, "learning_rate": 0.00046993477374643295, "loss": 3.2771, "step": 62435 }, { "epoch": 4.242424242424242, "grad_norm": 1.7916024923324585, "learning_rate": 0.00046989230873760023, "loss": 3.3045, "step": 62440 }, { "epoch": 4.242763962494904, "grad_norm": 1.9056122303009033, "learning_rate": 0.0004698498437287675, "loss": 3.6103, "step": 62445 }, { "epoch": 4.243103682565566, "grad_norm": 1.8638241291046143, "learning_rate": 0.0004698073787199348, "loss": 3.4085, "step": 62450 }, { "epoch": 4.2434434026362275, "grad_norm": 1.7914588451385498, "learning_rate": 0.00046976491371110207, "loss": 3.4629, "step": 62455 }, { "epoch": 4.24378312270689, "grad_norm": 1.1414388418197632, "learning_rate": 0.0004697224487022693, "loss": 3.4809, "step": 62460 }, { "epoch": 4.244122842777552, "grad_norm": 1.6465905904769897, "learning_rate": 0.00046967998369343663, "loss": 3.3181, "step": 62465 }, { "epoch": 4.244462562848213, "grad_norm": 1.8201732635498047, "learning_rate": 0.0004696375186846039, "loss": 3.0618, "step": 62470 }, { "epoch": 4.244802282918875, "grad_norm": 1.6826171875, "learning_rate": 0.00046959505367577114, "loss": 3.2755, "step": 62475 }, { "epoch": 4.245142002989537, "grad_norm": 1.7077410221099854, "learning_rate": 0.00046955258866693847, "loss": 3.1253, "step": 62480 }, { "epoch": 4.245481723060198, "grad_norm": 1.829679250717163, "learning_rate": 0.00046951012365810575, "loss": 3.2246, "step": 62485 }, { "epoch": 4.24582144313086, "grad_norm": 2.3984410762786865, "learning_rate": 0.000469467658649273, "loss": 3.2127, "step": 62490 }, { "epoch": 4.246161163201522, "grad_norm": 2.0273544788360596, "learning_rate": 0.0004694251936404403, "loss": 3.2676, "step": 62495 }, { "epoch": 4.246500883272184, "grad_norm": 2.2373478412628174, "learning_rate": 0.0004693827286316076, "loss": 3.4449, "step": 62500 }, { "epoch": 4.246840603342846, "grad_norm": 1.9870799779891968, "learning_rate": 0.0004693402636227748, "loss": 3.3675, "step": 62505 }, { "epoch": 4.247180323413508, "grad_norm": 2.318369150161743, "learning_rate": 0.0004692977986139421, "loss": 3.4001, "step": 62510 }, { "epoch": 4.247520043484169, "grad_norm": 2.549330711364746, "learning_rate": 0.00046925533360510943, "loss": 3.4638, "step": 62515 }, { "epoch": 4.247859763554831, "grad_norm": 2.32373046875, "learning_rate": 0.00046921286859627666, "loss": 3.1443, "step": 62520 }, { "epoch": 4.248199483625493, "grad_norm": 1.7933083772659302, "learning_rate": 0.00046917040358744394, "loss": 3.1602, "step": 62525 }, { "epoch": 4.248539203696154, "grad_norm": 1.6568593978881836, "learning_rate": 0.00046912793857861127, "loss": 3.3255, "step": 62530 }, { "epoch": 4.248878923766816, "grad_norm": 1.728759527206421, "learning_rate": 0.0004690854735697785, "loss": 3.3046, "step": 62535 }, { "epoch": 4.2492186438374775, "grad_norm": 2.04105544090271, "learning_rate": 0.0004690430085609458, "loss": 3.5166, "step": 62540 }, { "epoch": 4.24955836390814, "grad_norm": 1.844459056854248, "learning_rate": 0.00046900054355211306, "loss": 3.2526, "step": 62545 }, { "epoch": 4.249898083978802, "grad_norm": 1.6431306600570679, "learning_rate": 0.00046895807854328034, "loss": 3.2781, "step": 62550 }, { "epoch": 4.250237804049463, "grad_norm": 1.7031055688858032, "learning_rate": 0.0004689156135344476, "loss": 3.2688, "step": 62555 }, { "epoch": 4.250577524120125, "grad_norm": 1.598978042602539, "learning_rate": 0.0004688731485256149, "loss": 3.0218, "step": 62560 }, { "epoch": 4.250917244190787, "grad_norm": 2.502746343612671, "learning_rate": 0.00046883068351678223, "loss": 3.3631, "step": 62565 }, { "epoch": 4.251256964261448, "grad_norm": 1.812000036239624, "learning_rate": 0.00046878821850794946, "loss": 3.3823, "step": 62570 }, { "epoch": 4.25159668433211, "grad_norm": 1.971459150314331, "learning_rate": 0.00046874575349911674, "loss": 3.3756, "step": 62575 }, { "epoch": 4.251936404402772, "grad_norm": 1.9330651760101318, "learning_rate": 0.000468703288490284, "loss": 3.2327, "step": 62580 }, { "epoch": 4.2522761244734335, "grad_norm": 1.7765098810195923, "learning_rate": 0.0004686608234814513, "loss": 3.4376, "step": 62585 }, { "epoch": 4.252615844544096, "grad_norm": 1.8821481466293335, "learning_rate": 0.0004686183584726186, "loss": 3.4017, "step": 62590 }, { "epoch": 4.252955564614758, "grad_norm": 1.5038304328918457, "learning_rate": 0.00046857589346378586, "loss": 3.3741, "step": 62595 }, { "epoch": 4.253295284685419, "grad_norm": 1.9538131952285767, "learning_rate": 0.00046853342845495314, "loss": 3.342, "step": 62600 }, { "epoch": 4.253635004756081, "grad_norm": 1.9003570079803467, "learning_rate": 0.0004684909634461204, "loss": 3.1472, "step": 62605 }, { "epoch": 4.253974724826743, "grad_norm": 1.8804185390472412, "learning_rate": 0.0004684484984372877, "loss": 3.3605, "step": 62610 }, { "epoch": 4.254314444897404, "grad_norm": 1.5849593877792358, "learning_rate": 0.0004684060334284549, "loss": 3.3312, "step": 62615 }, { "epoch": 4.254654164968066, "grad_norm": 1.9720426797866821, "learning_rate": 0.00046836356841962226, "loss": 3.4478, "step": 62620 }, { "epoch": 4.254993885038728, "grad_norm": 1.8787710666656494, "learning_rate": 0.00046832110341078954, "loss": 3.4371, "step": 62625 }, { "epoch": 4.2553336051093895, "grad_norm": 1.8586432933807373, "learning_rate": 0.00046827863840195676, "loss": 3.5035, "step": 62630 }, { "epoch": 4.255673325180052, "grad_norm": 2.136171817779541, "learning_rate": 0.0004682361733931241, "loss": 3.4149, "step": 62635 }, { "epoch": 4.256013045250714, "grad_norm": 2.342113733291626, "learning_rate": 0.0004681937083842914, "loss": 3.1088, "step": 62640 }, { "epoch": 4.256352765321375, "grad_norm": 1.7295706272125244, "learning_rate": 0.0004681512433754586, "loss": 3.4647, "step": 62645 }, { "epoch": 4.256692485392037, "grad_norm": 1.6803468465805054, "learning_rate": 0.0004681087783666259, "loss": 3.3326, "step": 62650 }, { "epoch": 4.257032205462699, "grad_norm": 1.6369359493255615, "learning_rate": 0.0004680663133577932, "loss": 3.4551, "step": 62655 }, { "epoch": 4.25737192553336, "grad_norm": 1.5643070936203003, "learning_rate": 0.00046802384834896044, "loss": 3.191, "step": 62660 }, { "epoch": 4.257711645604022, "grad_norm": 1.671632170677185, "learning_rate": 0.0004679813833401277, "loss": 3.4015, "step": 62665 }, { "epoch": 4.258051365674684, "grad_norm": 1.6394875049591064, "learning_rate": 0.00046793891833129506, "loss": 3.6168, "step": 62670 }, { "epoch": 4.2583910857453455, "grad_norm": 1.7164324522018433, "learning_rate": 0.0004678964533224623, "loss": 3.3259, "step": 62675 }, { "epoch": 4.258730805816008, "grad_norm": 3.0548505783081055, "learning_rate": 0.00046785398831362957, "loss": 3.2446, "step": 62680 }, { "epoch": 4.25907052588667, "grad_norm": 1.9503487348556519, "learning_rate": 0.00046781152330479685, "loss": 3.3944, "step": 62685 }, { "epoch": 4.259410245957331, "grad_norm": 2.383678436279297, "learning_rate": 0.0004677690582959641, "loss": 3.2984, "step": 62690 }, { "epoch": 4.259749966027993, "grad_norm": 1.7086528539657593, "learning_rate": 0.0004677265932871314, "loss": 3.3303, "step": 62695 }, { "epoch": 4.260089686098655, "grad_norm": 1.5905349254608154, "learning_rate": 0.0004676841282782987, "loss": 3.4368, "step": 62700 }, { "epoch": 4.260429406169316, "grad_norm": 2.4463307857513428, "learning_rate": 0.00046764166326946597, "loss": 3.4844, "step": 62705 }, { "epoch": 4.260769126239978, "grad_norm": 1.5947847366333008, "learning_rate": 0.00046759919826063325, "loss": 3.5402, "step": 62710 }, { "epoch": 4.26110884631064, "grad_norm": 1.5723360776901245, "learning_rate": 0.0004675567332518005, "loss": 3.4183, "step": 62715 }, { "epoch": 4.2614485663813015, "grad_norm": 1.7774231433868408, "learning_rate": 0.00046751426824296775, "loss": 3.1941, "step": 62720 }, { "epoch": 4.261788286451964, "grad_norm": 2.1799120903015137, "learning_rate": 0.0004674718032341351, "loss": 3.3945, "step": 62725 }, { "epoch": 4.262128006522626, "grad_norm": 2.2066683769226074, "learning_rate": 0.00046742933822530237, "loss": 3.6441, "step": 62730 }, { "epoch": 4.262467726593287, "grad_norm": 1.7948088645935059, "learning_rate": 0.00046738687321646965, "loss": 3.4993, "step": 62735 }, { "epoch": 4.262807446663949, "grad_norm": 1.8009581565856934, "learning_rate": 0.0004673444082076369, "loss": 3.3392, "step": 62740 }, { "epoch": 4.263147166734611, "grad_norm": 1.8133540153503418, "learning_rate": 0.0004673019431988042, "loss": 3.4655, "step": 62745 }, { "epoch": 4.263486886805272, "grad_norm": 2.73132586479187, "learning_rate": 0.0004672594781899715, "loss": 3.4206, "step": 62750 }, { "epoch": 4.263826606875934, "grad_norm": 1.8707057237625122, "learning_rate": 0.0004672170131811387, "loss": 3.2968, "step": 62755 }, { "epoch": 4.264166326946596, "grad_norm": 1.9274380207061768, "learning_rate": 0.00046717454817230605, "loss": 3.4962, "step": 62760 }, { "epoch": 4.2645060470172576, "grad_norm": 2.0124638080596924, "learning_rate": 0.0004671320831634733, "loss": 3.4715, "step": 62765 }, { "epoch": 4.26484576708792, "grad_norm": 1.6944366693496704, "learning_rate": 0.00046708961815464055, "loss": 3.3833, "step": 62770 }, { "epoch": 4.265185487158582, "grad_norm": 2.0458850860595703, "learning_rate": 0.0004670471531458079, "loss": 3.4408, "step": 62775 }, { "epoch": 4.265525207229243, "grad_norm": 2.1698198318481445, "learning_rate": 0.00046700468813697517, "loss": 3.1461, "step": 62780 }, { "epoch": 4.265864927299905, "grad_norm": 2.0085246562957764, "learning_rate": 0.0004669622231281424, "loss": 3.2394, "step": 62785 }, { "epoch": 4.266204647370567, "grad_norm": 2.2494308948516846, "learning_rate": 0.0004669197581193097, "loss": 3.3481, "step": 62790 }, { "epoch": 4.266544367441228, "grad_norm": 1.6288602352142334, "learning_rate": 0.000466877293110477, "loss": 3.3196, "step": 62795 }, { "epoch": 4.26688408751189, "grad_norm": 1.8175694942474365, "learning_rate": 0.00046683482810164423, "loss": 3.3845, "step": 62800 }, { "epoch": 4.267223807582552, "grad_norm": 1.9750357866287231, "learning_rate": 0.0004667923630928115, "loss": 3.3911, "step": 62805 }, { "epoch": 4.267563527653214, "grad_norm": 2.2762928009033203, "learning_rate": 0.00046674989808397885, "loss": 3.1897, "step": 62810 }, { "epoch": 4.267903247723876, "grad_norm": 2.270822525024414, "learning_rate": 0.0004667074330751461, "loss": 3.6059, "step": 62815 }, { "epoch": 4.268242967794538, "grad_norm": 2.4981019496917725, "learning_rate": 0.00046666496806631335, "loss": 3.1259, "step": 62820 }, { "epoch": 4.268582687865199, "grad_norm": 1.7996008396148682, "learning_rate": 0.0004666225030574807, "loss": 3.5146, "step": 62825 }, { "epoch": 4.268922407935861, "grad_norm": 1.8803800344467163, "learning_rate": 0.0004665800380486479, "loss": 3.6176, "step": 62830 }, { "epoch": 4.269262128006522, "grad_norm": 1.8076069355010986, "learning_rate": 0.0004665375730398152, "loss": 3.505, "step": 62835 }, { "epoch": 4.269601848077184, "grad_norm": 1.6347074508666992, "learning_rate": 0.0004664951080309825, "loss": 3.4172, "step": 62840 }, { "epoch": 4.269941568147846, "grad_norm": 2.0217819213867188, "learning_rate": 0.00046645264302214975, "loss": 3.6777, "step": 62845 }, { "epoch": 4.2702812882185075, "grad_norm": 1.6065599918365479, "learning_rate": 0.00046641017801331703, "loss": 3.346, "step": 62850 }, { "epoch": 4.27062100828917, "grad_norm": 1.8709897994995117, "learning_rate": 0.0004663677130044843, "loss": 3.48, "step": 62855 }, { "epoch": 4.270960728359832, "grad_norm": 1.8437389135360718, "learning_rate": 0.0004663252479956516, "loss": 3.3224, "step": 62860 }, { "epoch": 4.271300448430493, "grad_norm": 1.7612122297286987, "learning_rate": 0.0004662827829868189, "loss": 3.552, "step": 62865 }, { "epoch": 4.271640168501155, "grad_norm": 1.615897536277771, "learning_rate": 0.00046624031797798615, "loss": 3.189, "step": 62870 }, { "epoch": 4.271979888571817, "grad_norm": 1.4130935668945312, "learning_rate": 0.0004661978529691534, "loss": 3.4776, "step": 62875 }, { "epoch": 4.272319608642478, "grad_norm": 2.2378978729248047, "learning_rate": 0.0004661553879603207, "loss": 3.1964, "step": 62880 }, { "epoch": 4.27265932871314, "grad_norm": 2.2815980911254883, "learning_rate": 0.000466112922951488, "loss": 3.3881, "step": 62885 }, { "epoch": 4.272999048783802, "grad_norm": 1.8418476581573486, "learning_rate": 0.0004660704579426552, "loss": 3.5019, "step": 62890 }, { "epoch": 4.2733387688544635, "grad_norm": 2.4462943077087402, "learning_rate": 0.00046602799293382255, "loss": 3.4156, "step": 62895 }, { "epoch": 4.273678488925126, "grad_norm": 1.8653730154037476, "learning_rate": 0.00046598552792498983, "loss": 3.2537, "step": 62900 }, { "epoch": 4.274018208995788, "grad_norm": 1.9121015071868896, "learning_rate": 0.0004659430629161571, "loss": 3.4046, "step": 62905 }, { "epoch": 4.274357929066449, "grad_norm": 1.478361964225769, "learning_rate": 0.00046590059790732434, "loss": 3.2341, "step": 62910 }, { "epoch": 4.274697649137111, "grad_norm": 1.8103326559066772, "learning_rate": 0.0004658581328984917, "loss": 3.2947, "step": 62915 }, { "epoch": 4.275037369207773, "grad_norm": 1.4864380359649658, "learning_rate": 0.00046581566788965895, "loss": 3.4418, "step": 62920 }, { "epoch": 4.275377089278434, "grad_norm": 1.7054061889648438, "learning_rate": 0.0004657732028808262, "loss": 3.3302, "step": 62925 }, { "epoch": 4.275716809349096, "grad_norm": 1.77982497215271, "learning_rate": 0.0004657307378719935, "loss": 3.4606, "step": 62930 }, { "epoch": 4.276056529419758, "grad_norm": 1.9378606081008911, "learning_rate": 0.0004656882728631608, "loss": 3.1257, "step": 62935 }, { "epoch": 4.2763962494904195, "grad_norm": 1.6352455615997314, "learning_rate": 0.000465645807854328, "loss": 3.33, "step": 62940 }, { "epoch": 4.276735969561082, "grad_norm": 2.1910383701324463, "learning_rate": 0.0004656033428454953, "loss": 3.2207, "step": 62945 }, { "epoch": 4.277075689631744, "grad_norm": 1.8796783685684204, "learning_rate": 0.00046556087783666264, "loss": 3.5045, "step": 62950 }, { "epoch": 4.277415409702405, "grad_norm": 1.8041653633117676, "learning_rate": 0.00046551841282782986, "loss": 3.4235, "step": 62955 }, { "epoch": 4.277755129773067, "grad_norm": 2.178936243057251, "learning_rate": 0.00046547594781899714, "loss": 3.341, "step": 62960 }, { "epoch": 4.278094849843729, "grad_norm": 1.7093695402145386, "learning_rate": 0.0004654334828101645, "loss": 3.4391, "step": 62965 }, { "epoch": 4.27843456991439, "grad_norm": 2.015932559967041, "learning_rate": 0.0004653910178013317, "loss": 3.3398, "step": 62970 }, { "epoch": 4.278774289985052, "grad_norm": 1.5940499305725098, "learning_rate": 0.000465348552792499, "loss": 3.4184, "step": 62975 }, { "epoch": 4.279114010055714, "grad_norm": 1.8149186372756958, "learning_rate": 0.00046530608778366626, "loss": 3.5213, "step": 62980 }, { "epoch": 4.2794537301263755, "grad_norm": 1.4262571334838867, "learning_rate": 0.00046526362277483354, "loss": 3.2566, "step": 62985 }, { "epoch": 4.279793450197038, "grad_norm": 1.4280418157577515, "learning_rate": 0.0004652211577660008, "loss": 3.3475, "step": 62990 }, { "epoch": 4.2801331702677, "grad_norm": 1.8276089429855347, "learning_rate": 0.0004651786927571681, "loss": 3.1263, "step": 62995 }, { "epoch": 4.280472890338361, "grad_norm": 2.0866377353668213, "learning_rate": 0.0004651362277483354, "loss": 3.5147, "step": 63000 }, { "epoch": 4.280812610409023, "grad_norm": 1.6632366180419922, "learning_rate": 0.00046509376273950266, "loss": 3.6363, "step": 63005 }, { "epoch": 4.281152330479685, "grad_norm": 1.991124153137207, "learning_rate": 0.00046505129773066994, "loss": 3.4984, "step": 63010 }, { "epoch": 4.281492050550346, "grad_norm": 1.7557904720306396, "learning_rate": 0.00046500883272183717, "loss": 3.4333, "step": 63015 }, { "epoch": 4.281831770621008, "grad_norm": 2.168301820755005, "learning_rate": 0.0004649663677130045, "loss": 3.6593, "step": 63020 }, { "epoch": 4.28217149069167, "grad_norm": 1.7527183294296265, "learning_rate": 0.0004649239027041718, "loss": 3.5214, "step": 63025 }, { "epoch": 4.2825112107623315, "grad_norm": 2.079028844833374, "learning_rate": 0.000464881437695339, "loss": 3.4059, "step": 63030 }, { "epoch": 4.282850930832994, "grad_norm": 1.5585588216781616, "learning_rate": 0.00046483897268650634, "loss": 3.5373, "step": 63035 }, { "epoch": 4.283190650903656, "grad_norm": 2.0770373344421387, "learning_rate": 0.0004647965076776736, "loss": 3.3894, "step": 63040 }, { "epoch": 4.283530370974317, "grad_norm": 2.0058319568634033, "learning_rate": 0.00046475404266884085, "loss": 3.5406, "step": 63045 }, { "epoch": 4.283870091044979, "grad_norm": 1.7260425090789795, "learning_rate": 0.0004647115776600082, "loss": 3.4615, "step": 63050 }, { "epoch": 4.284209811115641, "grad_norm": 1.608200192451477, "learning_rate": 0.00046466911265117546, "loss": 3.5257, "step": 63055 }, { "epoch": 4.284549531186302, "grad_norm": 1.931378960609436, "learning_rate": 0.0004646266476423427, "loss": 3.4308, "step": 63060 }, { "epoch": 4.284889251256964, "grad_norm": 2.157282829284668, "learning_rate": 0.00046458418263350997, "loss": 3.5357, "step": 63065 }, { "epoch": 4.285228971327626, "grad_norm": 1.7300832271575928, "learning_rate": 0.0004645417176246773, "loss": 3.3521, "step": 63070 }, { "epoch": 4.2855686913982876, "grad_norm": 2.259228467941284, "learning_rate": 0.0004644992526158446, "loss": 3.7002, "step": 63075 }, { "epoch": 4.28590841146895, "grad_norm": 2.0366153717041016, "learning_rate": 0.0004644567876070118, "loss": 3.5806, "step": 63080 }, { "epoch": 4.286248131539612, "grad_norm": 1.857787847518921, "learning_rate": 0.00046441432259817914, "loss": 3.3489, "step": 63085 }, { "epoch": 4.286587851610273, "grad_norm": 1.5851643085479736, "learning_rate": 0.0004643718575893464, "loss": 3.256, "step": 63090 }, { "epoch": 4.286927571680935, "grad_norm": 2.016563892364502, "learning_rate": 0.00046432939258051365, "loss": 2.8409, "step": 63095 }, { "epoch": 4.287267291751597, "grad_norm": 1.688968300819397, "learning_rate": 0.00046428692757168093, "loss": 3.4227, "step": 63100 }, { "epoch": 4.287607011822258, "grad_norm": 1.829832911491394, "learning_rate": 0.00046424446256284826, "loss": 3.1909, "step": 63105 }, { "epoch": 4.28794673189292, "grad_norm": 2.431840181350708, "learning_rate": 0.0004642019975540155, "loss": 3.3595, "step": 63110 }, { "epoch": 4.288286451963582, "grad_norm": 1.7922945022583008, "learning_rate": 0.00046415953254518277, "loss": 3.4386, "step": 63115 }, { "epoch": 4.288626172034244, "grad_norm": 2.806675910949707, "learning_rate": 0.0004641170675363501, "loss": 3.4656, "step": 63120 }, { "epoch": 4.288965892104906, "grad_norm": 1.862101674079895, "learning_rate": 0.00046407460252751733, "loss": 3.6861, "step": 63125 }, { "epoch": 4.289305612175568, "grad_norm": 2.1772754192352295, "learning_rate": 0.0004640321375186846, "loss": 3.2858, "step": 63130 }, { "epoch": 4.289645332246229, "grad_norm": 1.5905437469482422, "learning_rate": 0.0004639896725098519, "loss": 3.3617, "step": 63135 }, { "epoch": 4.289985052316891, "grad_norm": 2.197993755340576, "learning_rate": 0.00046394720750101917, "loss": 3.3696, "step": 63140 }, { "epoch": 4.290324772387553, "grad_norm": 1.662125825881958, "learning_rate": 0.00046390474249218645, "loss": 3.4083, "step": 63145 }, { "epoch": 4.290664492458214, "grad_norm": 1.8028936386108398, "learning_rate": 0.00046386227748335373, "loss": 3.6185, "step": 63150 }, { "epoch": 4.291004212528876, "grad_norm": 1.8609600067138672, "learning_rate": 0.000463819812474521, "loss": 3.4739, "step": 63155 }, { "epoch": 4.291343932599538, "grad_norm": 1.8411765098571777, "learning_rate": 0.0004637773474656883, "loss": 3.4637, "step": 63160 }, { "epoch": 4.2916836526702, "grad_norm": 2.4935145378112793, "learning_rate": 0.00046373488245685557, "loss": 3.2061, "step": 63165 }, { "epoch": 4.292023372740862, "grad_norm": 2.3609094619750977, "learning_rate": 0.0004636924174480228, "loss": 3.3777, "step": 63170 }, { "epoch": 4.292363092811524, "grad_norm": 1.8608344793319702, "learning_rate": 0.00046364995243919013, "loss": 3.3844, "step": 63175 }, { "epoch": 4.292702812882185, "grad_norm": 1.8305574655532837, "learning_rate": 0.0004636074874303574, "loss": 3.3313, "step": 63180 }, { "epoch": 4.293042532952847, "grad_norm": 2.094252109527588, "learning_rate": 0.00046356502242152464, "loss": 3.3826, "step": 63185 }, { "epoch": 4.293382253023509, "grad_norm": 2.0469939708709717, "learning_rate": 0.00046352255741269197, "loss": 3.4403, "step": 63190 }, { "epoch": 4.29372197309417, "grad_norm": 1.324774980545044, "learning_rate": 0.00046348009240385925, "loss": 3.5263, "step": 63195 }, { "epoch": 4.294061693164832, "grad_norm": 2.3357481956481934, "learning_rate": 0.0004634376273950265, "loss": 3.0822, "step": 63200 }, { "epoch": 4.294401413235494, "grad_norm": 2.397202253341675, "learning_rate": 0.00046339516238619376, "loss": 3.4733, "step": 63205 }, { "epoch": 4.294741133306156, "grad_norm": 1.5661768913269043, "learning_rate": 0.0004633526973773611, "loss": 3.3892, "step": 63210 }, { "epoch": 4.295080853376818, "grad_norm": 1.9791299104690552, "learning_rate": 0.0004633102323685283, "loss": 3.4006, "step": 63215 }, { "epoch": 4.29542057344748, "grad_norm": 1.9609355926513672, "learning_rate": 0.0004632677673596956, "loss": 3.5654, "step": 63220 }, { "epoch": 4.295760293518141, "grad_norm": 1.8484834432601929, "learning_rate": 0.00046322530235086293, "loss": 3.3321, "step": 63225 }, { "epoch": 4.296100013588803, "grad_norm": 1.9397815465927124, "learning_rate": 0.00046318283734203016, "loss": 3.4128, "step": 63230 }, { "epoch": 4.296439733659464, "grad_norm": 2.069485664367676, "learning_rate": 0.00046314037233319744, "loss": 3.3031, "step": 63235 }, { "epoch": 4.296779453730126, "grad_norm": 2.3314220905303955, "learning_rate": 0.0004630979073243647, "loss": 3.3294, "step": 63240 }, { "epoch": 4.297119173800788, "grad_norm": 1.6204239130020142, "learning_rate": 0.00046305544231553205, "loss": 3.3599, "step": 63245 }, { "epoch": 4.2974588938714495, "grad_norm": 1.4502030611038208, "learning_rate": 0.0004630129773066993, "loss": 3.5475, "step": 63250 }, { "epoch": 4.297798613942112, "grad_norm": 1.819841742515564, "learning_rate": 0.00046297051229786656, "loss": 3.4473, "step": 63255 }, { "epoch": 4.298138334012774, "grad_norm": 1.8680553436279297, "learning_rate": 0.0004629280472890339, "loss": 3.3402, "step": 63260 }, { "epoch": 4.298478054083435, "grad_norm": 1.8544026613235474, "learning_rate": 0.0004628855822802011, "loss": 3.4782, "step": 63265 }, { "epoch": 4.298817774154097, "grad_norm": 1.9973684549331665, "learning_rate": 0.0004628431172713684, "loss": 3.1818, "step": 63270 }, { "epoch": 4.299157494224759, "grad_norm": 1.8161455392837524, "learning_rate": 0.0004628006522625357, "loss": 3.5574, "step": 63275 }, { "epoch": 4.29949721429542, "grad_norm": 1.7216140031814575, "learning_rate": 0.00046275818725370296, "loss": 3.4876, "step": 63280 }, { "epoch": 4.299836934366082, "grad_norm": 1.8857920169830322, "learning_rate": 0.00046271572224487024, "loss": 3.4122, "step": 63285 }, { "epoch": 4.300176654436744, "grad_norm": 1.5191460847854614, "learning_rate": 0.0004626732572360375, "loss": 3.544, "step": 63290 }, { "epoch": 4.3005163745074055, "grad_norm": 2.133535385131836, "learning_rate": 0.0004626307922272048, "loss": 3.5584, "step": 63295 }, { "epoch": 4.300856094578068, "grad_norm": 2.1108641624450684, "learning_rate": 0.0004625883272183721, "loss": 3.3656, "step": 63300 }, { "epoch": 4.30119581464873, "grad_norm": 1.849539041519165, "learning_rate": 0.00046254586220953936, "loss": 3.4534, "step": 63305 }, { "epoch": 4.301535534719391, "grad_norm": 2.17384672164917, "learning_rate": 0.0004625033972007066, "loss": 3.1118, "step": 63310 }, { "epoch": 4.301875254790053, "grad_norm": 2.0488650798797607, "learning_rate": 0.0004624609321918739, "loss": 3.361, "step": 63315 }, { "epoch": 4.302214974860715, "grad_norm": 1.7448865175247192, "learning_rate": 0.0004624184671830412, "loss": 3.6147, "step": 63320 }, { "epoch": 4.302554694931376, "grad_norm": 1.5699710845947266, "learning_rate": 0.0004623760021742084, "loss": 3.3313, "step": 63325 }, { "epoch": 4.302894415002038, "grad_norm": 1.7707451581954956, "learning_rate": 0.00046233353716537576, "loss": 3.3481, "step": 63330 }, { "epoch": 4.3032341350727, "grad_norm": 2.0858523845672607, "learning_rate": 0.00046229107215654304, "loss": 3.2679, "step": 63335 }, { "epoch": 4.3035738551433615, "grad_norm": 2.2775464057922363, "learning_rate": 0.00046224860714771026, "loss": 3.2491, "step": 63340 }, { "epoch": 4.303913575214024, "grad_norm": 1.7053855657577515, "learning_rate": 0.0004622061421388776, "loss": 3.1753, "step": 63345 }, { "epoch": 4.304253295284686, "grad_norm": 1.7572572231292725, "learning_rate": 0.0004621636771300449, "loss": 3.3463, "step": 63350 }, { "epoch": 4.304593015355347, "grad_norm": 1.415999174118042, "learning_rate": 0.0004621212121212121, "loss": 3.1947, "step": 63355 }, { "epoch": 4.304932735426009, "grad_norm": 1.391317367553711, "learning_rate": 0.0004620787471123794, "loss": 3.7136, "step": 63360 }, { "epoch": 4.305272455496671, "grad_norm": 2.1067395210266113, "learning_rate": 0.0004620362821035467, "loss": 3.5319, "step": 63365 }, { "epoch": 4.305612175567332, "grad_norm": 1.911131739616394, "learning_rate": 0.00046199381709471394, "loss": 3.364, "step": 63370 }, { "epoch": 4.305951895637994, "grad_norm": 1.914339303970337, "learning_rate": 0.0004619513520858812, "loss": 3.4244, "step": 63375 }, { "epoch": 4.306291615708656, "grad_norm": 2.231783866882324, "learning_rate": 0.00046190888707704856, "loss": 3.1223, "step": 63380 }, { "epoch": 4.306631335779318, "grad_norm": 2.006817102432251, "learning_rate": 0.0004618664220682158, "loss": 3.2003, "step": 63385 }, { "epoch": 4.30697105584998, "grad_norm": 1.8419129848480225, "learning_rate": 0.00046182395705938307, "loss": 3.4037, "step": 63390 }, { "epoch": 4.307310775920642, "grad_norm": 1.8251399993896484, "learning_rate": 0.00046178149205055035, "loss": 3.3526, "step": 63395 }, { "epoch": 4.307650495991303, "grad_norm": 1.6332355737686157, "learning_rate": 0.0004617390270417176, "loss": 3.4623, "step": 63400 }, { "epoch": 4.307990216061965, "grad_norm": 1.5689504146575928, "learning_rate": 0.0004616965620328849, "loss": 3.1695, "step": 63405 }, { "epoch": 4.308329936132627, "grad_norm": 1.6764143705368042, "learning_rate": 0.0004616540970240522, "loss": 3.3065, "step": 63410 }, { "epoch": 4.308669656203288, "grad_norm": 1.405743956565857, "learning_rate": 0.0004616116320152195, "loss": 3.4194, "step": 63415 }, { "epoch": 4.30900937627395, "grad_norm": 1.9148709774017334, "learning_rate": 0.00046156916700638675, "loss": 3.5303, "step": 63420 }, { "epoch": 4.309349096344612, "grad_norm": 1.4949300289154053, "learning_rate": 0.000461526701997554, "loss": 3.4617, "step": 63425 }, { "epoch": 4.309688816415274, "grad_norm": 1.7274606227874756, "learning_rate": 0.0004614842369887213, "loss": 3.5003, "step": 63430 }, { "epoch": 4.310028536485936, "grad_norm": 1.5417428016662598, "learning_rate": 0.0004614417719798886, "loss": 3.4564, "step": 63435 }, { "epoch": 4.310368256556598, "grad_norm": 1.942589282989502, "learning_rate": 0.00046139930697105587, "loss": 3.3118, "step": 63440 }, { "epoch": 4.310707976627259, "grad_norm": 1.8810052871704102, "learning_rate": 0.00046135684196222315, "loss": 3.2224, "step": 63445 }, { "epoch": 4.311047696697921, "grad_norm": 1.8325141668319702, "learning_rate": 0.0004613143769533904, "loss": 3.4116, "step": 63450 }, { "epoch": 4.311387416768583, "grad_norm": 2.0289254188537598, "learning_rate": 0.0004612719119445577, "loss": 3.5454, "step": 63455 }, { "epoch": 4.311727136839244, "grad_norm": 2.355633497238159, "learning_rate": 0.000461229446935725, "loss": 3.5344, "step": 63460 }, { "epoch": 4.312066856909906, "grad_norm": 1.7995930910110474, "learning_rate": 0.0004611869819268922, "loss": 3.3396, "step": 63465 }, { "epoch": 4.312406576980568, "grad_norm": 1.8585536479949951, "learning_rate": 0.00046114451691805955, "loss": 3.5272, "step": 63470 }, { "epoch": 4.31274629705123, "grad_norm": 2.5130536556243896, "learning_rate": 0.0004611020519092268, "loss": 3.4751, "step": 63475 }, { "epoch": 4.313086017121892, "grad_norm": 2.1127734184265137, "learning_rate": 0.00046105958690039405, "loss": 3.5315, "step": 63480 }, { "epoch": 4.313425737192554, "grad_norm": 1.8111441135406494, "learning_rate": 0.0004610171218915614, "loss": 3.2656, "step": 63485 }, { "epoch": 4.313765457263215, "grad_norm": 2.164355993270874, "learning_rate": 0.00046097465688272867, "loss": 3.3892, "step": 63490 }, { "epoch": 4.314105177333877, "grad_norm": 2.0044572353363037, "learning_rate": 0.0004609321918738959, "loss": 3.3148, "step": 63495 }, { "epoch": 4.314444897404539, "grad_norm": 1.6447159051895142, "learning_rate": 0.00046088972686506317, "loss": 3.6106, "step": 63500 }, { "epoch": 4.3147846174752, "grad_norm": 2.004503011703491, "learning_rate": 0.0004608472618562305, "loss": 3.5322, "step": 63505 }, { "epoch": 4.315124337545862, "grad_norm": 2.2161195278167725, "learning_rate": 0.00046080479684739773, "loss": 3.5101, "step": 63510 }, { "epoch": 4.3154640576165235, "grad_norm": 2.184995651245117, "learning_rate": 0.000460762331838565, "loss": 3.6453, "step": 63515 }, { "epoch": 4.315803777687186, "grad_norm": 2.043548583984375, "learning_rate": 0.00046071986682973235, "loss": 3.3228, "step": 63520 }, { "epoch": 4.316143497757848, "grad_norm": 1.4457744359970093, "learning_rate": 0.0004606774018208996, "loss": 3.3956, "step": 63525 }, { "epoch": 4.316483217828509, "grad_norm": 1.8572815656661987, "learning_rate": 0.00046063493681206685, "loss": 3.1598, "step": 63530 }, { "epoch": 4.316822937899171, "grad_norm": 1.9720613956451416, "learning_rate": 0.00046059247180323413, "loss": 3.4668, "step": 63535 }, { "epoch": 4.317162657969833, "grad_norm": 1.5007282495498657, "learning_rate": 0.0004605500067944014, "loss": 3.3961, "step": 63540 }, { "epoch": 4.317502378040494, "grad_norm": 1.7739254236221313, "learning_rate": 0.0004605075417855687, "loss": 3.4681, "step": 63545 }, { "epoch": 4.317842098111156, "grad_norm": 1.6119741201400757, "learning_rate": 0.000460465076776736, "loss": 3.3801, "step": 63550 }, { "epoch": 4.318181818181818, "grad_norm": 1.8895936012268066, "learning_rate": 0.00046042261176790325, "loss": 3.4962, "step": 63555 }, { "epoch": 4.3185215382524795, "grad_norm": 1.9470595121383667, "learning_rate": 0.00046038014675907053, "loss": 3.5617, "step": 63560 }, { "epoch": 4.318861258323142, "grad_norm": 1.839432954788208, "learning_rate": 0.0004603376817502378, "loss": 3.4488, "step": 63565 }, { "epoch": 4.319200978393804, "grad_norm": 2.0248095989227295, "learning_rate": 0.00046029521674140504, "loss": 3.3916, "step": 63570 }, { "epoch": 4.319540698464465, "grad_norm": 1.8250384330749512, "learning_rate": 0.0004602527517325724, "loss": 3.2766, "step": 63575 }, { "epoch": 4.319880418535127, "grad_norm": 1.6339356899261475, "learning_rate": 0.00046021028672373965, "loss": 3.5551, "step": 63580 }, { "epoch": 4.320220138605789, "grad_norm": 1.5335215330123901, "learning_rate": 0.00046016782171490693, "loss": 3.6253, "step": 63585 }, { "epoch": 4.32055985867645, "grad_norm": 2.1986939907073975, "learning_rate": 0.0004601253567060742, "loss": 3.2946, "step": 63590 }, { "epoch": 4.320899578747112, "grad_norm": 1.576773762702942, "learning_rate": 0.0004600828916972415, "loss": 3.1473, "step": 63595 }, { "epoch": 4.321239298817774, "grad_norm": 1.7326865196228027, "learning_rate": 0.0004600404266884088, "loss": 3.4768, "step": 63600 }, { "epoch": 4.3215790188884355, "grad_norm": 1.7404619455337524, "learning_rate": 0.000459997961679576, "loss": 3.4978, "step": 63605 }, { "epoch": 4.321918738959098, "grad_norm": 2.043907403945923, "learning_rate": 0.00045995549667074333, "loss": 3.3114, "step": 63610 }, { "epoch": 4.32225845902976, "grad_norm": 1.6835942268371582, "learning_rate": 0.0004599130316619106, "loss": 3.3736, "step": 63615 }, { "epoch": 4.322598179100421, "grad_norm": 1.707620620727539, "learning_rate": 0.00045987056665307784, "loss": 3.3524, "step": 63620 }, { "epoch": 4.322937899171083, "grad_norm": 1.7183351516723633, "learning_rate": 0.0004598281016442452, "loss": 3.4387, "step": 63625 }, { "epoch": 4.323277619241745, "grad_norm": 1.769326090812683, "learning_rate": 0.00045978563663541245, "loss": 3.49, "step": 63630 }, { "epoch": 4.323617339312406, "grad_norm": 2.0081841945648193, "learning_rate": 0.0004597431716265797, "loss": 3.1181, "step": 63635 }, { "epoch": 4.323957059383068, "grad_norm": 1.4497911930084229, "learning_rate": 0.000459700706617747, "loss": 3.3831, "step": 63640 }, { "epoch": 4.32429677945373, "grad_norm": 1.625909447669983, "learning_rate": 0.0004596582416089143, "loss": 3.4468, "step": 63645 }, { "epoch": 4.3246364995243916, "grad_norm": 1.4617983102798462, "learning_rate": 0.0004596157766000815, "loss": 3.3191, "step": 63650 }, { "epoch": 4.324976219595054, "grad_norm": 1.773763656616211, "learning_rate": 0.0004595733115912488, "loss": 3.2851, "step": 63655 }, { "epoch": 4.325315939665716, "grad_norm": 1.985143780708313, "learning_rate": 0.00045953084658241614, "loss": 3.1989, "step": 63660 }, { "epoch": 4.325655659736377, "grad_norm": 2.0905094146728516, "learning_rate": 0.00045948838157358336, "loss": 3.6845, "step": 63665 }, { "epoch": 4.325995379807039, "grad_norm": 1.4226137399673462, "learning_rate": 0.00045944591656475064, "loss": 3.4836, "step": 63670 }, { "epoch": 4.326335099877701, "grad_norm": 1.5320817232131958, "learning_rate": 0.000459403451555918, "loss": 3.4749, "step": 63675 }, { "epoch": 4.326674819948362, "grad_norm": 1.9978163242340088, "learning_rate": 0.0004593609865470852, "loss": 3.6659, "step": 63680 }, { "epoch": 4.327014540019024, "grad_norm": 1.4595052003860474, "learning_rate": 0.0004593185215382525, "loss": 3.4793, "step": 63685 }, { "epoch": 4.327354260089686, "grad_norm": 1.561436653137207, "learning_rate": 0.00045927605652941976, "loss": 3.4541, "step": 63690 }, { "epoch": 4.327693980160348, "grad_norm": 1.8051023483276367, "learning_rate": 0.00045923359152058704, "loss": 3.4434, "step": 63695 }, { "epoch": 4.32803370023101, "grad_norm": 1.734125018119812, "learning_rate": 0.0004591911265117543, "loss": 3.2219, "step": 63700 }, { "epoch": 4.328373420301672, "grad_norm": 1.7884788513183594, "learning_rate": 0.0004591486615029216, "loss": 3.3665, "step": 63705 }, { "epoch": 4.328713140372333, "grad_norm": 1.592860221862793, "learning_rate": 0.0004591061964940889, "loss": 3.3923, "step": 63710 }, { "epoch": 4.329052860442995, "grad_norm": 1.6702349185943604, "learning_rate": 0.00045906373148525616, "loss": 3.1347, "step": 63715 }, { "epoch": 4.329392580513657, "grad_norm": 1.5733219385147095, "learning_rate": 0.00045902126647642344, "loss": 3.3106, "step": 63720 }, { "epoch": 4.329732300584318, "grad_norm": 1.5043152570724487, "learning_rate": 0.00045897880146759067, "loss": 3.0691, "step": 63725 }, { "epoch": 4.33007202065498, "grad_norm": 1.5542482137680054, "learning_rate": 0.000458936336458758, "loss": 3.2436, "step": 63730 }, { "epoch": 4.330411740725642, "grad_norm": 2.0834038257598877, "learning_rate": 0.0004588938714499253, "loss": 2.9533, "step": 63735 }, { "epoch": 4.330751460796304, "grad_norm": 1.8208096027374268, "learning_rate": 0.0004588514064410925, "loss": 3.2753, "step": 63740 }, { "epoch": 4.331091180866966, "grad_norm": 2.6300337314605713, "learning_rate": 0.00045880894143225984, "loss": 3.1104, "step": 63745 }, { "epoch": 4.331430900937628, "grad_norm": 1.7670987844467163, "learning_rate": 0.0004587664764234271, "loss": 3.4953, "step": 63750 }, { "epoch": 4.331770621008289, "grad_norm": 1.9438233375549316, "learning_rate": 0.0004587240114145944, "loss": 3.2854, "step": 63755 }, { "epoch": 4.332110341078951, "grad_norm": 1.8317841291427612, "learning_rate": 0.00045868154640576163, "loss": 3.274, "step": 63760 }, { "epoch": 4.332450061149613, "grad_norm": 1.91573965549469, "learning_rate": 0.00045863908139692896, "loss": 3.1509, "step": 63765 }, { "epoch": 4.332789781220274, "grad_norm": 1.5286518335342407, "learning_rate": 0.00045859661638809624, "loss": 3.3524, "step": 63770 }, { "epoch": 4.333129501290936, "grad_norm": 2.1014580726623535, "learning_rate": 0.00045855415137926347, "loss": 3.3062, "step": 63775 }, { "epoch": 4.333469221361598, "grad_norm": 1.5221264362335205, "learning_rate": 0.0004585116863704308, "loss": 3.1116, "step": 63780 }, { "epoch": 4.33380894143226, "grad_norm": 1.8353346586227417, "learning_rate": 0.0004584692213615981, "loss": 3.7634, "step": 63785 }, { "epoch": 4.334148661502922, "grad_norm": 1.4450551271438599, "learning_rate": 0.0004584267563527653, "loss": 3.2538, "step": 63790 }, { "epoch": 4.334488381573584, "grad_norm": 2.3726296424865723, "learning_rate": 0.0004583842913439326, "loss": 3.3676, "step": 63795 }, { "epoch": 4.334828101644245, "grad_norm": 1.9476062059402466, "learning_rate": 0.0004583418263350999, "loss": 3.1319, "step": 63800 }, { "epoch": 4.335167821714907, "grad_norm": 1.300318717956543, "learning_rate": 0.00045829936132626715, "loss": 3.3945, "step": 63805 }, { "epoch": 4.335507541785569, "grad_norm": 2.5259222984313965, "learning_rate": 0.00045825689631743443, "loss": 3.161, "step": 63810 }, { "epoch": 4.33584726185623, "grad_norm": 1.8776154518127441, "learning_rate": 0.00045821443130860176, "loss": 3.6265, "step": 63815 }, { "epoch": 4.336186981926892, "grad_norm": 2.2447171211242676, "learning_rate": 0.000458171966299769, "loss": 3.3966, "step": 63820 }, { "epoch": 4.336526701997554, "grad_norm": 2.1402323246002197, "learning_rate": 0.00045812950129093627, "loss": 3.5861, "step": 63825 }, { "epoch": 4.336866422068216, "grad_norm": 2.0086705684661865, "learning_rate": 0.00045808703628210355, "loss": 3.4983, "step": 63830 }, { "epoch": 4.337206142138878, "grad_norm": 1.9754897356033325, "learning_rate": 0.00045804457127327083, "loss": 3.3072, "step": 63835 }, { "epoch": 4.33754586220954, "grad_norm": 1.8296535015106201, "learning_rate": 0.0004580021062644381, "loss": 3.3804, "step": 63840 }, { "epoch": 4.337885582280201, "grad_norm": 1.8898544311523438, "learning_rate": 0.0004579596412556054, "loss": 3.3756, "step": 63845 }, { "epoch": 4.338225302350863, "grad_norm": 2.1085774898529053, "learning_rate": 0.00045791717624677267, "loss": 3.0073, "step": 63850 }, { "epoch": 4.338565022421525, "grad_norm": 1.4739344120025635, "learning_rate": 0.00045787471123793995, "loss": 3.2606, "step": 63855 }, { "epoch": 4.338904742492186, "grad_norm": 1.721376895904541, "learning_rate": 0.00045783224622910723, "loss": 3.3952, "step": 63860 }, { "epoch": 4.339244462562848, "grad_norm": 2.390528440475464, "learning_rate": 0.00045778978122027446, "loss": 3.5919, "step": 63865 }, { "epoch": 4.33958418263351, "grad_norm": 1.881242036819458, "learning_rate": 0.0004577473162114418, "loss": 3.4354, "step": 63870 }, { "epoch": 4.339923902704172, "grad_norm": 1.764406681060791, "learning_rate": 0.00045770485120260907, "loss": 3.4307, "step": 63875 }, { "epoch": 4.340263622774834, "grad_norm": 1.7043739557266235, "learning_rate": 0.0004576623861937763, "loss": 3.6977, "step": 63880 }, { "epoch": 4.340603342845496, "grad_norm": 1.9904969930648804, "learning_rate": 0.00045761992118494363, "loss": 3.5847, "step": 63885 }, { "epoch": 4.340943062916157, "grad_norm": 1.5866080522537231, "learning_rate": 0.0004575774561761109, "loss": 3.4478, "step": 63890 }, { "epoch": 4.341282782986819, "grad_norm": 1.4613139629364014, "learning_rate": 0.00045753499116727814, "loss": 3.3124, "step": 63895 }, { "epoch": 4.341622503057481, "grad_norm": 2.074709177017212, "learning_rate": 0.00045749252615844547, "loss": 3.2874, "step": 63900 }, { "epoch": 4.341962223128142, "grad_norm": 1.7069118022918701, "learning_rate": 0.00045745006114961275, "loss": 3.2482, "step": 63905 }, { "epoch": 4.342301943198804, "grad_norm": 1.8325741291046143, "learning_rate": 0.00045740759614078, "loss": 3.2566, "step": 63910 }, { "epoch": 4.3426416632694655, "grad_norm": 1.607973575592041, "learning_rate": 0.00045736513113194726, "loss": 3.4184, "step": 63915 }, { "epoch": 4.342981383340128, "grad_norm": 2.0430660247802734, "learning_rate": 0.0004573226661231146, "loss": 3.4695, "step": 63920 }, { "epoch": 4.34332110341079, "grad_norm": 1.6406227350234985, "learning_rate": 0.00045728020111428187, "loss": 3.6792, "step": 63925 }, { "epoch": 4.343660823481451, "grad_norm": 1.814674973487854, "learning_rate": 0.0004572377361054491, "loss": 3.5001, "step": 63930 }, { "epoch": 4.344000543552113, "grad_norm": 1.698088526725769, "learning_rate": 0.00045719527109661643, "loss": 3.5125, "step": 63935 }, { "epoch": 4.344340263622775, "grad_norm": 1.9454500675201416, "learning_rate": 0.0004571528060877837, "loss": 3.2846, "step": 63940 }, { "epoch": 4.344679983693436, "grad_norm": 2.2318003177642822, "learning_rate": 0.00045711034107895094, "loss": 3.2735, "step": 63945 }, { "epoch": 4.345019703764098, "grad_norm": 1.5973279476165771, "learning_rate": 0.0004570678760701182, "loss": 3.3445, "step": 63950 }, { "epoch": 4.34535942383476, "grad_norm": 1.778324007987976, "learning_rate": 0.00045702541106128555, "loss": 3.2108, "step": 63955 }, { "epoch": 4.345699143905422, "grad_norm": 1.802371859550476, "learning_rate": 0.0004569829460524528, "loss": 3.2631, "step": 63960 }, { "epoch": 4.346038863976084, "grad_norm": 2.554748773574829, "learning_rate": 0.00045694048104362006, "loss": 3.2626, "step": 63965 }, { "epoch": 4.346378584046746, "grad_norm": 2.0106847286224365, "learning_rate": 0.0004569065090365539, "loss": 2.9898, "step": 63970 }, { "epoch": 4.346718304117407, "grad_norm": 1.8446149826049805, "learning_rate": 0.0004568640440277212, "loss": 3.5655, "step": 63975 }, { "epoch": 4.347058024188069, "grad_norm": 2.0294060707092285, "learning_rate": 0.00045682157901888844, "loss": 3.2948, "step": 63980 }, { "epoch": 4.347397744258731, "grad_norm": 1.4653724431991577, "learning_rate": 0.0004567791140100557, "loss": 3.5031, "step": 63985 }, { "epoch": 4.347737464329392, "grad_norm": 2.0189616680145264, "learning_rate": 0.000456736649001223, "loss": 3.3823, "step": 63990 }, { "epoch": 4.348077184400054, "grad_norm": 1.9741318225860596, "learning_rate": 0.0004566941839923903, "loss": 3.5257, "step": 63995 }, { "epoch": 4.348416904470716, "grad_norm": 1.4459669589996338, "learning_rate": 0.00045665171898355756, "loss": 3.1846, "step": 64000 }, { "epoch": 4.348756624541378, "grad_norm": 2.4703259468078613, "learning_rate": 0.00045660925397472484, "loss": 3.3531, "step": 64005 }, { "epoch": 4.34909634461204, "grad_norm": 2.0995473861694336, "learning_rate": 0.0004565667889658921, "loss": 3.4713, "step": 64010 }, { "epoch": 4.349436064682702, "grad_norm": 2.7865939140319824, "learning_rate": 0.0004565243239570594, "loss": 3.1177, "step": 64015 }, { "epoch": 4.349775784753363, "grad_norm": 1.80964195728302, "learning_rate": 0.0004564818589482267, "loss": 3.4968, "step": 64020 }, { "epoch": 4.350115504824025, "grad_norm": 2.1616499423980713, "learning_rate": 0.0004564393939393939, "loss": 3.1986, "step": 64025 }, { "epoch": 4.350455224894687, "grad_norm": 1.737204670906067, "learning_rate": 0.00045639692893056124, "loss": 3.6149, "step": 64030 }, { "epoch": 4.350794944965348, "grad_norm": 1.6594442129135132, "learning_rate": 0.0004563544639217285, "loss": 3.2916, "step": 64035 }, { "epoch": 4.35113466503601, "grad_norm": 1.8589998483657837, "learning_rate": 0.00045631199891289575, "loss": 3.6432, "step": 64040 }, { "epoch": 4.351474385106672, "grad_norm": 1.5866409540176392, "learning_rate": 0.0004562695339040631, "loss": 3.3611, "step": 64045 }, { "epoch": 4.351814105177334, "grad_norm": 1.6654549837112427, "learning_rate": 0.00045622706889523036, "loss": 2.9502, "step": 64050 }, { "epoch": 4.352153825247996, "grad_norm": 1.7728735208511353, "learning_rate": 0.0004561846038863976, "loss": 3.1924, "step": 64055 }, { "epoch": 4.352493545318658, "grad_norm": 2.116562843322754, "learning_rate": 0.00045614213887756487, "loss": 3.3715, "step": 64060 }, { "epoch": 4.352833265389319, "grad_norm": 1.8237465620040894, "learning_rate": 0.0004560996738687322, "loss": 3.5851, "step": 64065 }, { "epoch": 4.353172985459981, "grad_norm": 1.992287039756775, "learning_rate": 0.00045605720885989943, "loss": 3.4068, "step": 64070 }, { "epoch": 4.353512705530643, "grad_norm": 1.9467273950576782, "learning_rate": 0.0004560147438510667, "loss": 3.5408, "step": 64075 }, { "epoch": 4.353852425601304, "grad_norm": 1.6846691370010376, "learning_rate": 0.00045597227884223404, "loss": 3.393, "step": 64080 }, { "epoch": 4.354192145671966, "grad_norm": 2.286442279815674, "learning_rate": 0.00045592981383340127, "loss": 3.2218, "step": 64085 }, { "epoch": 4.354531865742628, "grad_norm": 1.5429545640945435, "learning_rate": 0.00045588734882456855, "loss": 3.2888, "step": 64090 }, { "epoch": 4.35487158581329, "grad_norm": 1.5593624114990234, "learning_rate": 0.00045584488381573583, "loss": 3.5113, "step": 64095 }, { "epoch": 4.355211305883952, "grad_norm": 1.6902177333831787, "learning_rate": 0.0004558024188069031, "loss": 3.2393, "step": 64100 }, { "epoch": 4.355551025954614, "grad_norm": 1.7770904302597046, "learning_rate": 0.0004557599537980704, "loss": 3.4266, "step": 64105 }, { "epoch": 4.355890746025275, "grad_norm": 1.6446094512939453, "learning_rate": 0.00045571748878923767, "loss": 3.3986, "step": 64110 }, { "epoch": 4.356230466095937, "grad_norm": 1.688822865486145, "learning_rate": 0.00045567502378040495, "loss": 3.2043, "step": 64115 }, { "epoch": 4.356570186166599, "grad_norm": 1.785085678100586, "learning_rate": 0.00045563255877157223, "loss": 3.6372, "step": 64120 }, { "epoch": 4.35690990623726, "grad_norm": 1.9603291749954224, "learning_rate": 0.0004555900937627395, "loss": 3.4273, "step": 64125 }, { "epoch": 4.357249626307922, "grad_norm": 1.71449875831604, "learning_rate": 0.00045554762875390684, "loss": 3.2816, "step": 64130 }, { "epoch": 4.357589346378584, "grad_norm": 1.5834614038467407, "learning_rate": 0.00045550516374507407, "loss": 3.356, "step": 64135 }, { "epoch": 4.357929066449246, "grad_norm": 1.6736280918121338, "learning_rate": 0.00045546269873624135, "loss": 3.2386, "step": 64140 }, { "epoch": 4.358268786519908, "grad_norm": 2.0104451179504395, "learning_rate": 0.00045542023372740863, "loss": 3.6407, "step": 64145 }, { "epoch": 4.35860850659057, "grad_norm": 1.8240742683410645, "learning_rate": 0.0004553777687185759, "loss": 3.2304, "step": 64150 }, { "epoch": 4.358948226661231, "grad_norm": 1.7488903999328613, "learning_rate": 0.0004553353037097432, "loss": 3.5505, "step": 64155 }, { "epoch": 4.359287946731893, "grad_norm": 2.6225523948669434, "learning_rate": 0.00045529283870091047, "loss": 3.4641, "step": 64160 }, { "epoch": 4.359627666802555, "grad_norm": 2.5709781646728516, "learning_rate": 0.00045525037369207775, "loss": 3.3241, "step": 64165 }, { "epoch": 4.359967386873216, "grad_norm": 2.4773128032684326, "learning_rate": 0.00045520790868324503, "loss": 3.2891, "step": 64170 }, { "epoch": 4.360307106943878, "grad_norm": 1.6698139905929565, "learning_rate": 0.0004551654436744123, "loss": 3.3933, "step": 64175 }, { "epoch": 4.36064682701454, "grad_norm": 1.8936424255371094, "learning_rate": 0.00045512297866557954, "loss": 3.4052, "step": 64180 }, { "epoch": 4.360986547085202, "grad_norm": 1.6654490232467651, "learning_rate": 0.00045508051365674687, "loss": 3.5877, "step": 64185 }, { "epoch": 4.361326267155864, "grad_norm": 1.6932880878448486, "learning_rate": 0.00045503804864791415, "loss": 3.4344, "step": 64190 }, { "epoch": 4.361665987226525, "grad_norm": 1.8606945276260376, "learning_rate": 0.0004549955836390814, "loss": 3.4948, "step": 64195 }, { "epoch": 4.362005707297187, "grad_norm": 2.1544668674468994, "learning_rate": 0.0004549531186302487, "loss": 3.2296, "step": 64200 }, { "epoch": 4.362345427367849, "grad_norm": 2.4058456420898438, "learning_rate": 0.000454910653621416, "loss": 3.4564, "step": 64205 }, { "epoch": 4.36268514743851, "grad_norm": 2.3996455669403076, "learning_rate": 0.0004548681886125832, "loss": 3.1898, "step": 64210 }, { "epoch": 4.363024867509172, "grad_norm": 1.8960700035095215, "learning_rate": 0.0004548257236037505, "loss": 3.4725, "step": 64215 }, { "epoch": 4.363364587579834, "grad_norm": 1.4972732067108154, "learning_rate": 0.00045478325859491783, "loss": 3.3803, "step": 64220 }, { "epoch": 4.3637043076504956, "grad_norm": 1.5149701833724976, "learning_rate": 0.00045474079358608506, "loss": 3.2883, "step": 64225 }, { "epoch": 4.364044027721158, "grad_norm": 1.5806714296340942, "learning_rate": 0.00045469832857725234, "loss": 3.4012, "step": 64230 }, { "epoch": 4.36438374779182, "grad_norm": 2.2766225337982178, "learning_rate": 0.00045465586356841967, "loss": 3.3259, "step": 64235 }, { "epoch": 4.364723467862481, "grad_norm": 1.6505588293075562, "learning_rate": 0.0004546133985595869, "loss": 3.3925, "step": 64240 }, { "epoch": 4.365063187933143, "grad_norm": 1.8468067646026611, "learning_rate": 0.0004545709335507542, "loss": 3.5879, "step": 64245 }, { "epoch": 4.365402908003805, "grad_norm": 1.841615080833435, "learning_rate": 0.00045452846854192146, "loss": 3.2487, "step": 64250 }, { "epoch": 4.365742628074466, "grad_norm": 1.8467968702316284, "learning_rate": 0.00045448600353308874, "loss": 3.1773, "step": 64255 }, { "epoch": 4.366082348145128, "grad_norm": 2.273847818374634, "learning_rate": 0.000454443538524256, "loss": 3.2353, "step": 64260 }, { "epoch": 4.36642206821579, "grad_norm": 1.746593713760376, "learning_rate": 0.0004544010735154233, "loss": 3.0646, "step": 64265 }, { "epoch": 4.366761788286452, "grad_norm": 1.7181586027145386, "learning_rate": 0.0004543586085065906, "loss": 3.1507, "step": 64270 }, { "epoch": 4.367101508357114, "grad_norm": 1.6826053857803345, "learning_rate": 0.00045431614349775786, "loss": 3.5531, "step": 64275 }, { "epoch": 4.367441228427776, "grad_norm": 1.9826093912124634, "learning_rate": 0.00045427367848892514, "loss": 3.2753, "step": 64280 }, { "epoch": 4.367780948498437, "grad_norm": 1.646175742149353, "learning_rate": 0.00045423121348009236, "loss": 3.3749, "step": 64285 }, { "epoch": 4.368120668569099, "grad_norm": 2.6871488094329834, "learning_rate": 0.0004541887484712597, "loss": 3.331, "step": 64290 }, { "epoch": 4.368460388639761, "grad_norm": 1.7050225734710693, "learning_rate": 0.000454146283462427, "loss": 3.3469, "step": 64295 }, { "epoch": 4.368800108710422, "grad_norm": 2.0548791885375977, "learning_rate": 0.00045410381845359426, "loss": 3.4442, "step": 64300 }, { "epoch": 4.369139828781084, "grad_norm": 1.7941640615463257, "learning_rate": 0.00045406135344476154, "loss": 3.6694, "step": 64305 }, { "epoch": 4.369479548851746, "grad_norm": 1.7950944900512695, "learning_rate": 0.0004540188884359288, "loss": 3.3047, "step": 64310 }, { "epoch": 4.369819268922408, "grad_norm": 1.5421591997146606, "learning_rate": 0.0004539764234270961, "loss": 3.2467, "step": 64315 }, { "epoch": 4.37015898899307, "grad_norm": 1.6230512857437134, "learning_rate": 0.0004539339584182633, "loss": 3.5699, "step": 64320 }, { "epoch": 4.370498709063732, "grad_norm": 1.612119436264038, "learning_rate": 0.00045389149340943066, "loss": 3.2408, "step": 64325 }, { "epoch": 4.370838429134393, "grad_norm": 1.3675705194473267, "learning_rate": 0.00045384902840059794, "loss": 3.5967, "step": 64330 }, { "epoch": 4.371178149205055, "grad_norm": 2.1987011432647705, "learning_rate": 0.00045380656339176516, "loss": 3.4585, "step": 64335 }, { "epoch": 4.371517869275717, "grad_norm": 2.2053894996643066, "learning_rate": 0.0004537640983829325, "loss": 3.1528, "step": 64340 }, { "epoch": 4.371857589346378, "grad_norm": 1.53899347782135, "learning_rate": 0.0004537216333740998, "loss": 3.7264, "step": 64345 }, { "epoch": 4.37219730941704, "grad_norm": 1.8201311826705933, "learning_rate": 0.000453679168365267, "loss": 3.3557, "step": 64350 }, { "epoch": 4.372537029487702, "grad_norm": 1.8996779918670654, "learning_rate": 0.0004536367033564343, "loss": 3.5108, "step": 64355 }, { "epoch": 4.372876749558364, "grad_norm": 2.0645225048065186, "learning_rate": 0.0004535942383476016, "loss": 3.3894, "step": 64360 }, { "epoch": 4.373216469629026, "grad_norm": 2.075998067855835, "learning_rate": 0.00045355177333876884, "loss": 3.5272, "step": 64365 }, { "epoch": 4.373556189699688, "grad_norm": 1.846683382987976, "learning_rate": 0.0004535093083299361, "loss": 3.0489, "step": 64370 }, { "epoch": 4.373895909770349, "grad_norm": 1.6651302576065063, "learning_rate": 0.00045346684332110346, "loss": 3.525, "step": 64375 }, { "epoch": 4.374235629841011, "grad_norm": 1.7370668649673462, "learning_rate": 0.0004534243783122707, "loss": 3.4446, "step": 64380 }, { "epoch": 4.374575349911673, "grad_norm": 1.7272944450378418, "learning_rate": 0.00045338191330343796, "loss": 3.0963, "step": 64385 }, { "epoch": 4.374915069982334, "grad_norm": 1.9533307552337646, "learning_rate": 0.0004533394482946053, "loss": 3.2752, "step": 64390 }, { "epoch": 4.375254790052996, "grad_norm": 2.2946009635925293, "learning_rate": 0.0004532969832857725, "loss": 3.5506, "step": 64395 }, { "epoch": 4.375594510123658, "grad_norm": 2.2640645503997803, "learning_rate": 0.0004532545182769398, "loss": 3.3487, "step": 64400 }, { "epoch": 4.37593423019432, "grad_norm": 2.0235607624053955, "learning_rate": 0.0004532120532681071, "loss": 3.4086, "step": 64405 }, { "epoch": 4.376273950264982, "grad_norm": 1.7527574300765991, "learning_rate": 0.00045316958825927437, "loss": 3.2993, "step": 64410 }, { "epoch": 4.376613670335644, "grad_norm": 1.601886510848999, "learning_rate": 0.00045312712325044165, "loss": 3.3594, "step": 64415 }, { "epoch": 4.376953390406305, "grad_norm": 1.454182744026184, "learning_rate": 0.0004530846582416089, "loss": 3.5448, "step": 64420 }, { "epoch": 4.377293110476967, "grad_norm": 2.1674909591674805, "learning_rate": 0.0004530421932327762, "loss": 3.4376, "step": 64425 }, { "epoch": 4.377632830547629, "grad_norm": 2.0037076473236084, "learning_rate": 0.0004529997282239435, "loss": 3.6637, "step": 64430 }, { "epoch": 4.37797255061829, "grad_norm": 1.4713138341903687, "learning_rate": 0.00045295726321511077, "loss": 3.6407, "step": 64435 }, { "epoch": 4.378312270688952, "grad_norm": 1.5461312532424927, "learning_rate": 0.000452914798206278, "loss": 3.2341, "step": 64440 }, { "epoch": 4.378651990759614, "grad_norm": 1.6058164834976196, "learning_rate": 0.0004528723331974453, "loss": 3.2874, "step": 64445 }, { "epoch": 4.378991710830276, "grad_norm": 2.3105838298797607, "learning_rate": 0.0004528298681886126, "loss": 3.7733, "step": 64450 }, { "epoch": 4.379331430900938, "grad_norm": 1.5270605087280273, "learning_rate": 0.00045278740317977983, "loss": 3.2924, "step": 64455 }, { "epoch": 4.3796711509716, "grad_norm": 1.4486243724822998, "learning_rate": 0.00045274493817094717, "loss": 3.3223, "step": 64460 }, { "epoch": 4.380010871042261, "grad_norm": 1.6212859153747559, "learning_rate": 0.00045270247316211445, "loss": 3.4715, "step": 64465 }, { "epoch": 4.380350591112923, "grad_norm": 1.8134469985961914, "learning_rate": 0.0004526600081532817, "loss": 3.2701, "step": 64470 }, { "epoch": 4.380690311183585, "grad_norm": 1.6685960292816162, "learning_rate": 0.00045261754314444895, "loss": 3.3444, "step": 64475 }, { "epoch": 4.381030031254246, "grad_norm": 1.979351282119751, "learning_rate": 0.0004525750781356163, "loss": 3.418, "step": 64480 }, { "epoch": 4.381369751324908, "grad_norm": 1.9043983221054077, "learning_rate": 0.00045253261312678357, "loss": 3.4254, "step": 64485 }, { "epoch": 4.38170947139557, "grad_norm": 1.8400859832763672, "learning_rate": 0.0004524901481179508, "loss": 3.4462, "step": 64490 }, { "epoch": 4.382049191466232, "grad_norm": 2.048153877258301, "learning_rate": 0.0004524476831091181, "loss": 3.4635, "step": 64495 }, { "epoch": 4.382388911536894, "grad_norm": 1.8954297304153442, "learning_rate": 0.0004524052181002854, "loss": 3.0991, "step": 64500 }, { "epoch": 4.382728631607556, "grad_norm": 2.0419111251831055, "learning_rate": 0.00045236275309145263, "loss": 3.6199, "step": 64505 }, { "epoch": 4.383068351678217, "grad_norm": 1.788674235343933, "learning_rate": 0.0004523202880826199, "loss": 3.323, "step": 64510 }, { "epoch": 4.383408071748879, "grad_norm": 1.5389244556427002, "learning_rate": 0.00045227782307378725, "loss": 3.469, "step": 64515 }, { "epoch": 4.383747791819541, "grad_norm": 2.3230693340301514, "learning_rate": 0.00045223535806495447, "loss": 3.5778, "step": 64520 }, { "epoch": 4.384087511890202, "grad_norm": 2.094280481338501, "learning_rate": 0.00045219289305612175, "loss": 3.7766, "step": 64525 }, { "epoch": 4.384427231960864, "grad_norm": 1.6609793901443481, "learning_rate": 0.0004521504280472891, "loss": 3.6213, "step": 64530 }, { "epoch": 4.384766952031526, "grad_norm": 1.8464328050613403, "learning_rate": 0.0004521079630384563, "loss": 3.3632, "step": 64535 }, { "epoch": 4.385106672102188, "grad_norm": 1.7348501682281494, "learning_rate": 0.0004520654980296236, "loss": 3.5729, "step": 64540 }, { "epoch": 4.38544639217285, "grad_norm": 2.0814707279205322, "learning_rate": 0.0004520230330207909, "loss": 3.2357, "step": 64545 }, { "epoch": 4.385786112243512, "grad_norm": 2.2661426067352295, "learning_rate": 0.00045198056801195815, "loss": 3.5794, "step": 64550 }, { "epoch": 4.386125832314173, "grad_norm": 1.606168508529663, "learning_rate": 0.00045193810300312543, "loss": 3.3849, "step": 64555 }, { "epoch": 4.386465552384835, "grad_norm": 1.648384690284729, "learning_rate": 0.0004518956379942927, "loss": 3.3322, "step": 64560 }, { "epoch": 4.386805272455497, "grad_norm": 1.8659157752990723, "learning_rate": 0.00045185317298546, "loss": 3.5395, "step": 64565 }, { "epoch": 4.387144992526158, "grad_norm": 1.7045056819915771, "learning_rate": 0.0004518107079766273, "loss": 3.3307, "step": 64570 }, { "epoch": 4.38748471259682, "grad_norm": 2.075593948364258, "learning_rate": 0.00045176824296779455, "loss": 3.3466, "step": 64575 }, { "epoch": 4.3878244326674825, "grad_norm": 1.8247047662734985, "learning_rate": 0.0004517257779589618, "loss": 3.6375, "step": 64580 }, { "epoch": 4.388164152738144, "grad_norm": 1.454236626625061, "learning_rate": 0.0004516833129501291, "loss": 3.4586, "step": 64585 }, { "epoch": 4.388503872808806, "grad_norm": 1.8392833471298218, "learning_rate": 0.0004516408479412964, "loss": 3.4297, "step": 64590 }, { "epoch": 4.388843592879467, "grad_norm": 1.5762248039245605, "learning_rate": 0.0004515983829324636, "loss": 3.5533, "step": 64595 }, { "epoch": 4.389183312950129, "grad_norm": 1.9042444229125977, "learning_rate": 0.00045155591792363095, "loss": 3.3321, "step": 64600 }, { "epoch": 4.389523033020791, "grad_norm": 1.986138105392456, "learning_rate": 0.00045151345291479823, "loss": 3.6838, "step": 64605 }, { "epoch": 4.389862753091452, "grad_norm": 2.173466920852661, "learning_rate": 0.00045147098790596546, "loss": 3.5572, "step": 64610 }, { "epoch": 4.390202473162114, "grad_norm": 1.8128821849822998, "learning_rate": 0.00045142852289713274, "loss": 3.6611, "step": 64615 }, { "epoch": 4.390542193232776, "grad_norm": 1.9256377220153809, "learning_rate": 0.0004513860578883001, "loss": 3.3404, "step": 64620 }, { "epoch": 4.390881913303438, "grad_norm": 1.8450651168823242, "learning_rate": 0.0004513435928794673, "loss": 3.4579, "step": 64625 }, { "epoch": 4.3912216333741, "grad_norm": 1.7326622009277344, "learning_rate": 0.0004513011278706346, "loss": 3.2082, "step": 64630 }, { "epoch": 4.391561353444762, "grad_norm": 1.7889187335968018, "learning_rate": 0.0004512671558635684, "loss": 3.2563, "step": 64635 }, { "epoch": 4.391901073515423, "grad_norm": 1.7609758377075195, "learning_rate": 0.00045122469085473574, "loss": 3.5058, "step": 64640 }, { "epoch": 4.392240793586085, "grad_norm": 1.7049601078033447, "learning_rate": 0.00045118222584590296, "loss": 3.2509, "step": 64645 }, { "epoch": 4.392580513656747, "grad_norm": 1.8833926916122437, "learning_rate": 0.00045113976083707024, "loss": 3.3014, "step": 64650 }, { "epoch": 4.392920233727408, "grad_norm": 1.9281548261642456, "learning_rate": 0.0004510972958282376, "loss": 3.3007, "step": 64655 }, { "epoch": 4.39325995379807, "grad_norm": 1.629738450050354, "learning_rate": 0.0004510548308194048, "loss": 3.5369, "step": 64660 }, { "epoch": 4.393599673868732, "grad_norm": 2.40571665763855, "learning_rate": 0.0004510123658105721, "loss": 3.2529, "step": 64665 }, { "epoch": 4.393939393939394, "grad_norm": 1.6764466762542725, "learning_rate": 0.00045096990080173936, "loss": 3.2859, "step": 64670 }, { "epoch": 4.394279114010056, "grad_norm": 1.722762942314148, "learning_rate": 0.0004509274357929067, "loss": 3.2383, "step": 64675 }, { "epoch": 4.394618834080718, "grad_norm": 1.5340008735656738, "learning_rate": 0.0004508849707840739, "loss": 3.5843, "step": 64680 }, { "epoch": 4.394958554151379, "grad_norm": 1.7807093858718872, "learning_rate": 0.0004508425057752412, "loss": 3.4813, "step": 64685 }, { "epoch": 4.395298274222041, "grad_norm": 2.0356853008270264, "learning_rate": 0.00045080004076640854, "loss": 3.4113, "step": 64690 }, { "epoch": 4.395637994292703, "grad_norm": 2.134979486465454, "learning_rate": 0.00045075757575757577, "loss": 3.2806, "step": 64695 }, { "epoch": 4.395977714363364, "grad_norm": 1.9636642932891846, "learning_rate": 0.00045071511074874305, "loss": 3.164, "step": 64700 }, { "epoch": 4.396317434434026, "grad_norm": 3.056079864501953, "learning_rate": 0.0004506726457399103, "loss": 3.1134, "step": 64705 }, { "epoch": 4.396657154504688, "grad_norm": 1.801955223083496, "learning_rate": 0.0004506301807310776, "loss": 3.2207, "step": 64710 }, { "epoch": 4.39699687457535, "grad_norm": 1.5038107633590698, "learning_rate": 0.0004505877157222449, "loss": 3.2543, "step": 64715 }, { "epoch": 4.397336594646012, "grad_norm": 2.5807037353515625, "learning_rate": 0.00045054525071341217, "loss": 3.2404, "step": 64720 }, { "epoch": 4.397676314716674, "grad_norm": 1.799155354499817, "learning_rate": 0.00045050278570457945, "loss": 3.2823, "step": 64725 }, { "epoch": 4.398016034787335, "grad_norm": 1.6308729648590088, "learning_rate": 0.0004504603206957467, "loss": 3.2383, "step": 64730 }, { "epoch": 4.398355754857997, "grad_norm": 1.3332715034484863, "learning_rate": 0.000450417855686914, "loss": 3.5145, "step": 64735 }, { "epoch": 4.398695474928659, "grad_norm": 2.0403995513916016, "learning_rate": 0.00045037539067808123, "loss": 3.21, "step": 64740 }, { "epoch": 4.39903519499932, "grad_norm": 1.782570481300354, "learning_rate": 0.00045033292566924857, "loss": 3.3942, "step": 64745 }, { "epoch": 4.399374915069982, "grad_norm": 1.5541508197784424, "learning_rate": 0.00045029046066041585, "loss": 3.2104, "step": 64750 }, { "epoch": 4.399714635140644, "grad_norm": 1.6216773986816406, "learning_rate": 0.00045024799565158307, "loss": 3.4758, "step": 64755 }, { "epoch": 4.400054355211306, "grad_norm": 1.9447611570358276, "learning_rate": 0.0004502055306427504, "loss": 3.3593, "step": 64760 }, { "epoch": 4.400394075281968, "grad_norm": 1.5364813804626465, "learning_rate": 0.0004501630656339177, "loss": 3.1565, "step": 64765 }, { "epoch": 4.40073379535263, "grad_norm": 1.702623963356018, "learning_rate": 0.0004501206006250849, "loss": 3.2824, "step": 64770 }, { "epoch": 4.401073515423291, "grad_norm": 1.483439564704895, "learning_rate": 0.0004500781356162522, "loss": 3.4181, "step": 64775 }, { "epoch": 4.401413235493953, "grad_norm": 1.6713221073150635, "learning_rate": 0.0004500356706074195, "loss": 3.3394, "step": 64780 }, { "epoch": 4.401752955564615, "grad_norm": 1.8603520393371582, "learning_rate": 0.00044999320559858675, "loss": 3.2023, "step": 64785 }, { "epoch": 4.402092675635276, "grad_norm": 1.8662257194519043, "learning_rate": 0.00044995074058975403, "loss": 3.407, "step": 64790 }, { "epoch": 4.402432395705938, "grad_norm": 1.9278852939605713, "learning_rate": 0.00044990827558092137, "loss": 3.374, "step": 64795 }, { "epoch": 4.4027721157766, "grad_norm": 1.8346525430679321, "learning_rate": 0.0004498658105720886, "loss": 3.0492, "step": 64800 }, { "epoch": 4.403111835847262, "grad_norm": 2.567089080810547, "learning_rate": 0.00044982334556325587, "loss": 3.3846, "step": 64805 }, { "epoch": 4.403451555917924, "grad_norm": 2.2353615760803223, "learning_rate": 0.00044978088055442315, "loss": 3.3687, "step": 64810 }, { "epoch": 4.403791275988586, "grad_norm": 2.0683071613311768, "learning_rate": 0.00044973841554559043, "loss": 3.3913, "step": 64815 }, { "epoch": 4.404130996059247, "grad_norm": 1.9080140590667725, "learning_rate": 0.0004496959505367577, "loss": 3.298, "step": 64820 }, { "epoch": 4.404470716129909, "grad_norm": 1.5740243196487427, "learning_rate": 0.000449653485527925, "loss": 3.1763, "step": 64825 }, { "epoch": 4.404810436200571, "grad_norm": 1.6174594163894653, "learning_rate": 0.0004496110205190923, "loss": 3.7008, "step": 64830 }, { "epoch": 4.405150156271232, "grad_norm": 1.779199242591858, "learning_rate": 0.00044956855551025955, "loss": 3.5314, "step": 64835 }, { "epoch": 4.405489876341894, "grad_norm": 1.6921343803405762, "learning_rate": 0.00044952609050142683, "loss": 3.3473, "step": 64840 }, { "epoch": 4.4058295964125564, "grad_norm": 1.865946888923645, "learning_rate": 0.0004494836254925941, "loss": 3.1383, "step": 64845 }, { "epoch": 4.406169316483218, "grad_norm": 1.5150701999664307, "learning_rate": 0.0004494411604837614, "loss": 3.4088, "step": 64850 }, { "epoch": 4.40650903655388, "grad_norm": 1.7562050819396973, "learning_rate": 0.0004493986954749287, "loss": 3.3604, "step": 64855 }, { "epoch": 4.406848756624542, "grad_norm": 2.014815330505371, "learning_rate": 0.00044935623046609595, "loss": 3.2424, "step": 64860 }, { "epoch": 4.407188476695203, "grad_norm": 1.744317650794983, "learning_rate": 0.00044931376545726323, "loss": 3.3709, "step": 64865 }, { "epoch": 4.407528196765865, "grad_norm": 2.0018856525421143, "learning_rate": 0.0004492713004484305, "loss": 3.267, "step": 64870 }, { "epoch": 4.407867916836526, "grad_norm": 2.177947759628296, "learning_rate": 0.0004492288354395978, "loss": 3.4388, "step": 64875 }, { "epoch": 4.408207636907188, "grad_norm": 1.9847081899642944, "learning_rate": 0.000449186370430765, "loss": 3.6166, "step": 64880 }, { "epoch": 4.40854735697785, "grad_norm": 1.404948115348816, "learning_rate": 0.00044914390542193235, "loss": 3.2537, "step": 64885 }, { "epoch": 4.408887077048512, "grad_norm": 1.9210681915283203, "learning_rate": 0.00044910144041309963, "loss": 3.4452, "step": 64890 }, { "epoch": 4.409226797119174, "grad_norm": 1.536621332168579, "learning_rate": 0.00044905897540426686, "loss": 3.3057, "step": 64895 }, { "epoch": 4.409566517189836, "grad_norm": 1.7078410387039185, "learning_rate": 0.0004490165103954342, "loss": 3.3278, "step": 64900 }, { "epoch": 4.409906237260497, "grad_norm": 1.925953984260559, "learning_rate": 0.0004489740453866015, "loss": 3.3027, "step": 64905 }, { "epoch": 4.410245957331159, "grad_norm": 2.0465927124023438, "learning_rate": 0.0004489315803777687, "loss": 3.4378, "step": 64910 }, { "epoch": 4.410585677401821, "grad_norm": 2.109347105026245, "learning_rate": 0.00044888911536893603, "loss": 3.5626, "step": 64915 }, { "epoch": 4.410925397472482, "grad_norm": 2.1377925872802734, "learning_rate": 0.0004488466503601033, "loss": 3.0943, "step": 64920 }, { "epoch": 4.411265117543144, "grad_norm": 1.7624990940093994, "learning_rate": 0.00044880418535127054, "loss": 3.2555, "step": 64925 }, { "epoch": 4.411604837613806, "grad_norm": 1.4711674451828003, "learning_rate": 0.0004487617203424378, "loss": 3.2258, "step": 64930 }, { "epoch": 4.411944557684468, "grad_norm": 1.8844050168991089, "learning_rate": 0.00044871925533360515, "loss": 3.5012, "step": 64935 }, { "epoch": 4.41228427775513, "grad_norm": 1.632546305656433, "learning_rate": 0.0004486767903247724, "loss": 3.32, "step": 64940 }, { "epoch": 4.412623997825792, "grad_norm": 1.6960290670394897, "learning_rate": 0.00044863432531593966, "loss": 3.319, "step": 64945 }, { "epoch": 4.412963717896453, "grad_norm": 2.0828843116760254, "learning_rate": 0.000448591860307107, "loss": 3.3002, "step": 64950 }, { "epoch": 4.413303437967115, "grad_norm": 1.950202226638794, "learning_rate": 0.0004485493952982742, "loss": 3.4316, "step": 64955 }, { "epoch": 4.413643158037777, "grad_norm": 1.8168435096740723, "learning_rate": 0.0004485069302894415, "loss": 3.689, "step": 64960 }, { "epoch": 4.413982878108438, "grad_norm": 1.2683322429656982, "learning_rate": 0.0004484644652806088, "loss": 3.3124, "step": 64965 }, { "epoch": 4.4143225981791, "grad_norm": 2.1289381980895996, "learning_rate": 0.00044842200027177606, "loss": 3.5465, "step": 64970 }, { "epoch": 4.414662318249762, "grad_norm": 1.5594290494918823, "learning_rate": 0.00044837953526294334, "loss": 3.3054, "step": 64975 }, { "epoch": 4.415002038320424, "grad_norm": 1.6987260580062866, "learning_rate": 0.0004483370702541106, "loss": 3.4931, "step": 64980 }, { "epoch": 4.415341758391086, "grad_norm": 1.96963369846344, "learning_rate": 0.0004482946052452779, "loss": 3.5721, "step": 64985 }, { "epoch": 4.415681478461748, "grad_norm": 1.7813607454299927, "learning_rate": 0.0004482521402364452, "loss": 3.5767, "step": 64990 }, { "epoch": 4.416021198532409, "grad_norm": 1.648568868637085, "learning_rate": 0.00044820967522761246, "loss": 3.4013, "step": 64995 }, { "epoch": 4.416360918603071, "grad_norm": 1.9732160568237305, "learning_rate": 0.0004481672102187797, "loss": 3.0994, "step": 65000 }, { "epoch": 4.416700638673733, "grad_norm": 1.5794447660446167, "learning_rate": 0.000448124745209947, "loss": 3.0108, "step": 65005 }, { "epoch": 4.417040358744394, "grad_norm": 1.4873663187026978, "learning_rate": 0.0004480822802011143, "loss": 3.2437, "step": 65010 }, { "epoch": 4.417380078815056, "grad_norm": 1.9358571767807007, "learning_rate": 0.0004480398151922816, "loss": 3.3808, "step": 65015 }, { "epoch": 4.417719798885718, "grad_norm": 1.9746365547180176, "learning_rate": 0.00044799735018344886, "loss": 3.4511, "step": 65020 }, { "epoch": 4.41805951895638, "grad_norm": 1.738538146018982, "learning_rate": 0.00044795488517461614, "loss": 3.2299, "step": 65025 }, { "epoch": 4.418399239027042, "grad_norm": 1.9980428218841553, "learning_rate": 0.0004479124201657834, "loss": 3.5999, "step": 65030 }, { "epoch": 4.418738959097704, "grad_norm": 1.8276960849761963, "learning_rate": 0.00044786995515695065, "loss": 3.3686, "step": 65035 }, { "epoch": 4.419078679168365, "grad_norm": 1.5994455814361572, "learning_rate": 0.000447827490148118, "loss": 3.2797, "step": 65040 }, { "epoch": 4.419418399239027, "grad_norm": 2.0638463497161865, "learning_rate": 0.00044778502513928526, "loss": 3.1056, "step": 65045 }, { "epoch": 4.419758119309689, "grad_norm": 2.2326619625091553, "learning_rate": 0.0004477425601304525, "loss": 3.3817, "step": 65050 }, { "epoch": 4.42009783938035, "grad_norm": 1.5707324743270874, "learning_rate": 0.0004477000951216198, "loss": 3.6377, "step": 65055 }, { "epoch": 4.420437559451012, "grad_norm": 1.7092106342315674, "learning_rate": 0.0004476576301127871, "loss": 3.5898, "step": 65060 }, { "epoch": 4.420777279521674, "grad_norm": 1.5232332944869995, "learning_rate": 0.00044761516510395433, "loss": 3.4145, "step": 65065 }, { "epoch": 4.421116999592336, "grad_norm": 1.711226463317871, "learning_rate": 0.0004475727000951216, "loss": 3.2249, "step": 65070 }, { "epoch": 4.421456719662998, "grad_norm": 1.964913010597229, "learning_rate": 0.00044753023508628894, "loss": 3.2272, "step": 65075 }, { "epoch": 4.42179643973366, "grad_norm": 1.9417657852172852, "learning_rate": 0.00044748777007745617, "loss": 3.2897, "step": 65080 }, { "epoch": 4.422136159804321, "grad_norm": 1.819454312324524, "learning_rate": 0.00044744530506862345, "loss": 3.5271, "step": 65085 }, { "epoch": 4.422475879874983, "grad_norm": 1.642629623413086, "learning_rate": 0.0004474028400597908, "loss": 3.0397, "step": 65090 }, { "epoch": 4.422815599945645, "grad_norm": 1.9443799257278442, "learning_rate": 0.000447360375050958, "loss": 3.3802, "step": 65095 }, { "epoch": 4.423155320016306, "grad_norm": 1.7475947141647339, "learning_rate": 0.0004473179100421253, "loss": 3.4887, "step": 65100 }, { "epoch": 4.423495040086968, "grad_norm": 1.9598162174224854, "learning_rate": 0.00044727544503329257, "loss": 3.4378, "step": 65105 }, { "epoch": 4.42383476015763, "grad_norm": 1.5461233854293823, "learning_rate": 0.00044723298002445985, "loss": 3.3477, "step": 65110 }, { "epoch": 4.424174480228292, "grad_norm": 2.0311450958251953, "learning_rate": 0.00044719051501562713, "loss": 3.4295, "step": 65115 }, { "epoch": 4.424514200298954, "grad_norm": 2.193528652191162, "learning_rate": 0.0004471480500067944, "loss": 3.2837, "step": 65120 }, { "epoch": 4.424853920369616, "grad_norm": 2.4546658992767334, "learning_rate": 0.0004471055849979617, "loss": 3.2837, "step": 65125 }, { "epoch": 4.425193640440277, "grad_norm": 1.773696780204773, "learning_rate": 0.00044706311998912897, "loss": 3.1182, "step": 65130 }, { "epoch": 4.425533360510939, "grad_norm": 1.513872742652893, "learning_rate": 0.00044702065498029625, "loss": 3.2051, "step": 65135 }, { "epoch": 4.425873080581601, "grad_norm": 1.946563482284546, "learning_rate": 0.0004469781899714635, "loss": 3.339, "step": 65140 }, { "epoch": 4.426212800652262, "grad_norm": 2.425863027572632, "learning_rate": 0.0004469357249626308, "loss": 3.5522, "step": 65145 }, { "epoch": 4.426552520722924, "grad_norm": 1.861241102218628, "learning_rate": 0.0004468932599537981, "loss": 3.3704, "step": 65150 }, { "epoch": 4.4268922407935865, "grad_norm": 1.8166714906692505, "learning_rate": 0.0004468507949449653, "loss": 3.4016, "step": 65155 }, { "epoch": 4.427231960864248, "grad_norm": 1.8250221014022827, "learning_rate": 0.00044680832993613265, "loss": 3.7891, "step": 65160 }, { "epoch": 4.42757168093491, "grad_norm": 2.1231915950775146, "learning_rate": 0.00044676586492729993, "loss": 3.6904, "step": 65165 }, { "epoch": 4.427911401005572, "grad_norm": 1.7919342517852783, "learning_rate": 0.00044672339991846716, "loss": 3.4601, "step": 65170 }, { "epoch": 4.428251121076233, "grad_norm": 2.0839035511016846, "learning_rate": 0.00044668093490963444, "loss": 3.2164, "step": 65175 }, { "epoch": 4.428590841146895, "grad_norm": 1.5737636089324951, "learning_rate": 0.00044663846990080177, "loss": 3.4783, "step": 65180 }, { "epoch": 4.428930561217557, "grad_norm": 1.7011358737945557, "learning_rate": 0.00044659600489196905, "loss": 3.5095, "step": 65185 }, { "epoch": 4.429270281288218, "grad_norm": 1.821164846420288, "learning_rate": 0.0004465535398831363, "loss": 3.562, "step": 65190 }, { "epoch": 4.42961000135888, "grad_norm": 1.7457956075668335, "learning_rate": 0.0004465110748743036, "loss": 3.2867, "step": 65195 }, { "epoch": 4.4299497214295425, "grad_norm": 1.677951455116272, "learning_rate": 0.0004464686098654709, "loss": 3.6328, "step": 65200 }, { "epoch": 4.430289441500204, "grad_norm": 1.8033673763275146, "learning_rate": 0.0004464261448566381, "loss": 3.4209, "step": 65205 }, { "epoch": 4.430629161570866, "grad_norm": 1.4290587902069092, "learning_rate": 0.00044638367984780545, "loss": 3.1739, "step": 65210 }, { "epoch": 4.430968881641528, "grad_norm": 2.310960054397583, "learning_rate": 0.00044634121483897273, "loss": 3.5771, "step": 65215 }, { "epoch": 4.431308601712189, "grad_norm": 1.7498706579208374, "learning_rate": 0.00044629874983013996, "loss": 3.5911, "step": 65220 }, { "epoch": 4.431648321782851, "grad_norm": 1.754834771156311, "learning_rate": 0.00044625628482130724, "loss": 3.3836, "step": 65225 }, { "epoch": 4.431988041853513, "grad_norm": 2.1502857208251953, "learning_rate": 0.00044621381981247457, "loss": 3.4415, "step": 65230 }, { "epoch": 4.432327761924174, "grad_norm": 2.2431640625, "learning_rate": 0.0004461713548036418, "loss": 3.5485, "step": 65235 }, { "epoch": 4.432667481994836, "grad_norm": 2.224797487258911, "learning_rate": 0.0004461288897948091, "loss": 3.433, "step": 65240 }, { "epoch": 4.4330072020654985, "grad_norm": 1.952824354171753, "learning_rate": 0.0004460864247859764, "loss": 3.3396, "step": 65245 }, { "epoch": 4.43334692213616, "grad_norm": 1.4497042894363403, "learning_rate": 0.00044604395977714364, "loss": 3.7283, "step": 65250 }, { "epoch": 4.433686642206822, "grad_norm": 2.0349044799804688, "learning_rate": 0.0004460014947683109, "loss": 3.3057, "step": 65255 }, { "epoch": 4.434026362277484, "grad_norm": 2.182559013366699, "learning_rate": 0.0004459590297594782, "loss": 3.3652, "step": 65260 }, { "epoch": 4.434366082348145, "grad_norm": 2.3754656314849854, "learning_rate": 0.0004459165647506455, "loss": 3.5893, "step": 65265 }, { "epoch": 4.434705802418807, "grad_norm": 1.8900299072265625, "learning_rate": 0.00044587409974181276, "loss": 3.5961, "step": 65270 }, { "epoch": 4.435045522489468, "grad_norm": 1.7895383834838867, "learning_rate": 0.00044583163473298004, "loss": 3.3411, "step": 65275 }, { "epoch": 4.43538524256013, "grad_norm": 1.968557357788086, "learning_rate": 0.0004457891697241473, "loss": 3.3451, "step": 65280 }, { "epoch": 4.435724962630792, "grad_norm": 1.9632465839385986, "learning_rate": 0.0004457467047153146, "loss": 3.7532, "step": 65285 }, { "epoch": 4.436064682701454, "grad_norm": 2.0310747623443604, "learning_rate": 0.0004457042397064819, "loss": 3.1216, "step": 65290 }, { "epoch": 4.436404402772116, "grad_norm": 2.6385912895202637, "learning_rate": 0.0004456617746976491, "loss": 3.3463, "step": 65295 }, { "epoch": 4.436744122842778, "grad_norm": 1.894304633140564, "learning_rate": 0.00044561930968881644, "loss": 3.3944, "step": 65300 }, { "epoch": 4.437083842913439, "grad_norm": 1.95399808883667, "learning_rate": 0.0004455768446799837, "loss": 3.5105, "step": 65305 }, { "epoch": 4.437423562984101, "grad_norm": 1.9404572248458862, "learning_rate": 0.00044553437967115094, "loss": 3.5897, "step": 65310 }, { "epoch": 4.437763283054763, "grad_norm": 1.857170581817627, "learning_rate": 0.0004454919146623183, "loss": 3.2444, "step": 65315 }, { "epoch": 4.438103003125424, "grad_norm": 1.624802589416504, "learning_rate": 0.00044544944965348556, "loss": 3.6266, "step": 65320 }, { "epoch": 4.438442723196086, "grad_norm": 1.7468866109848022, "learning_rate": 0.0004454069846446528, "loss": 3.1891, "step": 65325 }, { "epoch": 4.438782443266748, "grad_norm": 1.49713134765625, "learning_rate": 0.00044536451963582006, "loss": 3.2462, "step": 65330 }, { "epoch": 4.43912216333741, "grad_norm": 1.8653451204299927, "learning_rate": 0.0004453220546269874, "loss": 3.5771, "step": 65335 }, { "epoch": 4.439461883408072, "grad_norm": 1.8390488624572754, "learning_rate": 0.0004452795896181546, "loss": 3.3032, "step": 65340 }, { "epoch": 4.439801603478734, "grad_norm": 1.7464821338653564, "learning_rate": 0.0004452371246093219, "loss": 3.2648, "step": 65345 }, { "epoch": 4.440141323549395, "grad_norm": 1.6820625066757202, "learning_rate": 0.00044519465960048924, "loss": 3.4831, "step": 65350 }, { "epoch": 4.440481043620057, "grad_norm": 1.596714735031128, "learning_rate": 0.0004451521945916565, "loss": 3.276, "step": 65355 }, { "epoch": 4.440820763690719, "grad_norm": 2.1196186542510986, "learning_rate": 0.00044510972958282374, "loss": 3.2686, "step": 65360 }, { "epoch": 4.44116048376138, "grad_norm": 1.7743985652923584, "learning_rate": 0.000445067264573991, "loss": 3.567, "step": 65365 }, { "epoch": 4.441500203832042, "grad_norm": 1.973243236541748, "learning_rate": 0.00044502479956515836, "loss": 3.2448, "step": 65370 }, { "epoch": 4.441839923902704, "grad_norm": 2.1186437606811523, "learning_rate": 0.0004449823345563256, "loss": 3.1596, "step": 65375 }, { "epoch": 4.442179643973366, "grad_norm": 1.970597267150879, "learning_rate": 0.00044493986954749286, "loss": 3.4512, "step": 65380 }, { "epoch": 4.442519364044028, "grad_norm": 1.7276427745819092, "learning_rate": 0.0004448974045386602, "loss": 3.0832, "step": 65385 }, { "epoch": 4.44285908411469, "grad_norm": 1.9779632091522217, "learning_rate": 0.0004448549395298274, "loss": 3.4809, "step": 65390 }, { "epoch": 4.443198804185351, "grad_norm": 2.160933017730713, "learning_rate": 0.0004448124745209947, "loss": 3.4061, "step": 65395 }, { "epoch": 4.443538524256013, "grad_norm": 2.0705580711364746, "learning_rate": 0.000444770009512162, "loss": 3.4364, "step": 65400 }, { "epoch": 4.443878244326675, "grad_norm": 1.7182941436767578, "learning_rate": 0.00044472754450332927, "loss": 3.3127, "step": 65405 }, { "epoch": 4.444217964397336, "grad_norm": 1.6270700693130493, "learning_rate": 0.00044468507949449655, "loss": 3.2633, "step": 65410 }, { "epoch": 4.444557684467998, "grad_norm": 1.7597410678863525, "learning_rate": 0.0004446426144856638, "loss": 3.3555, "step": 65415 }, { "epoch": 4.4448974045386604, "grad_norm": 1.7400448322296143, "learning_rate": 0.0004446001494768311, "loss": 3.3907, "step": 65420 }, { "epoch": 4.445237124609322, "grad_norm": 1.938644289970398, "learning_rate": 0.0004445576844679984, "loss": 3.5597, "step": 65425 }, { "epoch": 4.445576844679984, "grad_norm": 1.910553216934204, "learning_rate": 0.00044451521945916567, "loss": 3.5079, "step": 65430 }, { "epoch": 4.445916564750646, "grad_norm": 2.192323923110962, "learning_rate": 0.0004444727544503329, "loss": 3.3264, "step": 65435 }, { "epoch": 4.446256284821307, "grad_norm": 1.4627636671066284, "learning_rate": 0.0004444302894415002, "loss": 3.4062, "step": 65440 }, { "epoch": 4.446596004891969, "grad_norm": 1.8522697687149048, "learning_rate": 0.0004443878244326675, "loss": 3.4966, "step": 65445 }, { "epoch": 4.446935724962631, "grad_norm": 1.6728732585906982, "learning_rate": 0.00044434535942383473, "loss": 3.3885, "step": 65450 }, { "epoch": 4.447275445033292, "grad_norm": 1.836928129196167, "learning_rate": 0.00044430289441500207, "loss": 3.4632, "step": 65455 }, { "epoch": 4.447615165103954, "grad_norm": 1.7162940502166748, "learning_rate": 0.00044426042940616935, "loss": 3.2159, "step": 65460 }, { "epoch": 4.4479548851746165, "grad_norm": 2.5226011276245117, "learning_rate": 0.00044421796439733657, "loss": 3.4814, "step": 65465 }, { "epoch": 4.448294605245278, "grad_norm": 1.9000893831253052, "learning_rate": 0.00044417549938850385, "loss": 3.3292, "step": 65470 }, { "epoch": 4.44863432531594, "grad_norm": 2.000744342803955, "learning_rate": 0.0004441330343796712, "loss": 3.2635, "step": 65475 }, { "epoch": 4.448974045386602, "grad_norm": 2.294005870819092, "learning_rate": 0.0004440905693708384, "loss": 3.3136, "step": 65480 }, { "epoch": 4.449313765457263, "grad_norm": 1.8924751281738281, "learning_rate": 0.0004440481043620057, "loss": 3.2552, "step": 65485 }, { "epoch": 4.449653485527925, "grad_norm": 1.6626960039138794, "learning_rate": 0.000444005639353173, "loss": 3.3406, "step": 65490 }, { "epoch": 4.449993205598587, "grad_norm": 1.6727702617645264, "learning_rate": 0.00044396317434434025, "loss": 3.4876, "step": 65495 }, { "epoch": 4.450332925669248, "grad_norm": 1.3902729749679565, "learning_rate": 0.00044392070933550753, "loss": 3.3533, "step": 65500 }, { "epoch": 4.45067264573991, "grad_norm": 1.7572954893112183, "learning_rate": 0.00044387824432667487, "loss": 3.3249, "step": 65505 }, { "epoch": 4.4510123658105725, "grad_norm": 1.7171672582626343, "learning_rate": 0.0004438357793178421, "loss": 3.3022, "step": 65510 }, { "epoch": 4.451352085881234, "grad_norm": 1.7984827756881714, "learning_rate": 0.00044379331430900937, "loss": 3.7589, "step": 65515 }, { "epoch": 4.451691805951896, "grad_norm": 1.7652335166931152, "learning_rate": 0.00044375084930017665, "loss": 3.4772, "step": 65520 }, { "epoch": 4.452031526022558, "grad_norm": 1.9586899280548096, "learning_rate": 0.000443708384291344, "loss": 3.4703, "step": 65525 }, { "epoch": 4.452371246093219, "grad_norm": 1.690705418586731, "learning_rate": 0.0004436659192825112, "loss": 3.0048, "step": 65530 }, { "epoch": 4.452710966163881, "grad_norm": 1.7471469640731812, "learning_rate": 0.0004436234542736785, "loss": 3.25, "step": 65535 }, { "epoch": 4.453050686234543, "grad_norm": 1.6825593709945679, "learning_rate": 0.00044358098926484583, "loss": 3.3741, "step": 65540 }, { "epoch": 4.453390406305204, "grad_norm": 1.7623215913772583, "learning_rate": 0.00044353852425601305, "loss": 3.5056, "step": 65545 }, { "epoch": 4.453730126375866, "grad_norm": 1.4240422248840332, "learning_rate": 0.00044349605924718033, "loss": 3.2613, "step": 65550 }, { "epoch": 4.454069846446528, "grad_norm": 1.5750585794448853, "learning_rate": 0.0004434535942383476, "loss": 3.3394, "step": 65555 }, { "epoch": 4.45440956651719, "grad_norm": 1.4523541927337646, "learning_rate": 0.0004434111292295149, "loss": 3.2906, "step": 65560 }, { "epoch": 4.454749286587852, "grad_norm": 1.8525421619415283, "learning_rate": 0.0004433686642206822, "loss": 3.3017, "step": 65565 }, { "epoch": 4.455089006658513, "grad_norm": 2.3355305194854736, "learning_rate": 0.00044332619921184945, "loss": 3.6166, "step": 65570 }, { "epoch": 4.455428726729175, "grad_norm": 1.426655888557434, "learning_rate": 0.00044328373420301673, "loss": 3.4755, "step": 65575 }, { "epoch": 4.455768446799837, "grad_norm": 1.880454182624817, "learning_rate": 0.000443241269194184, "loss": 3.3004, "step": 65580 }, { "epoch": 4.456108166870498, "grad_norm": 1.8299401998519897, "learning_rate": 0.0004431988041853513, "loss": 3.1572, "step": 65585 }, { "epoch": 4.45644788694116, "grad_norm": 1.918237328529358, "learning_rate": 0.0004431563391765185, "loss": 3.2959, "step": 65590 }, { "epoch": 4.456787607011822, "grad_norm": 1.7077908515930176, "learning_rate": 0.00044311387416768585, "loss": 3.387, "step": 65595 }, { "epoch": 4.457127327082484, "grad_norm": 1.998282551765442, "learning_rate": 0.00044307140915885313, "loss": 3.3958, "step": 65600 }, { "epoch": 4.457467047153146, "grad_norm": 1.9657167196273804, "learning_rate": 0.00044302894415002036, "loss": 3.3871, "step": 65605 }, { "epoch": 4.457806767223808, "grad_norm": 2.424586296081543, "learning_rate": 0.0004429864791411877, "loss": 3.4953, "step": 65610 }, { "epoch": 4.458146487294469, "grad_norm": 2.025543689727783, "learning_rate": 0.000442944014132355, "loss": 3.4746, "step": 65615 }, { "epoch": 4.458486207365131, "grad_norm": 1.6123621463775635, "learning_rate": 0.0004429015491235222, "loss": 3.5251, "step": 65620 }, { "epoch": 4.458825927435793, "grad_norm": 1.8000309467315674, "learning_rate": 0.0004428590841146895, "loss": 3.0426, "step": 65625 }, { "epoch": 4.459165647506454, "grad_norm": 2.4994142055511475, "learning_rate": 0.0004428166191058568, "loss": 3.3619, "step": 65630 }, { "epoch": 4.459505367577116, "grad_norm": 2.0503392219543457, "learning_rate": 0.00044277415409702404, "loss": 3.4201, "step": 65635 }, { "epoch": 4.459845087647778, "grad_norm": 1.5578696727752686, "learning_rate": 0.0004427316890881913, "loss": 3.6561, "step": 65640 }, { "epoch": 4.46018480771844, "grad_norm": 2.1458852291107178, "learning_rate": 0.00044268922407935865, "loss": 3.3335, "step": 65645 }, { "epoch": 4.460524527789102, "grad_norm": 1.6295005083084106, "learning_rate": 0.0004426467590705259, "loss": 3.2675, "step": 65650 }, { "epoch": 4.460864247859764, "grad_norm": 2.093937873840332, "learning_rate": 0.00044260429406169316, "loss": 3.3556, "step": 65655 }, { "epoch": 4.461203967930425, "grad_norm": 2.4839425086975098, "learning_rate": 0.00044256182905286044, "loss": 3.2423, "step": 65660 }, { "epoch": 4.461543688001087, "grad_norm": 1.4422036409378052, "learning_rate": 0.0004425193640440277, "loss": 3.3483, "step": 65665 }, { "epoch": 4.461883408071749, "grad_norm": 1.8628220558166504, "learning_rate": 0.000442476899035195, "loss": 3.4268, "step": 65670 }, { "epoch": 4.46222312814241, "grad_norm": 1.6301852464675903, "learning_rate": 0.0004424344340263623, "loss": 3.5344, "step": 65675 }, { "epoch": 4.462562848213072, "grad_norm": 1.2908437252044678, "learning_rate": 0.00044239196901752956, "loss": 3.3992, "step": 65680 }, { "epoch": 4.462902568283734, "grad_norm": 2.0448083877563477, "learning_rate": 0.00044234950400869684, "loss": 3.4874, "step": 65685 }, { "epoch": 4.463242288354396, "grad_norm": 1.9016869068145752, "learning_rate": 0.0004423070389998641, "loss": 3.5111, "step": 65690 }, { "epoch": 4.463582008425058, "grad_norm": 2.2417638301849365, "learning_rate": 0.0004422645739910314, "loss": 3.0797, "step": 65695 }, { "epoch": 4.46392172849572, "grad_norm": 2.0589773654937744, "learning_rate": 0.0004422221089821987, "loss": 3.3444, "step": 65700 }, { "epoch": 4.464261448566381, "grad_norm": 1.6224751472473145, "learning_rate": 0.00044217964397336596, "loss": 3.416, "step": 65705 }, { "epoch": 4.464601168637043, "grad_norm": 1.592044472694397, "learning_rate": 0.00044213717896453324, "loss": 3.1848, "step": 65710 }, { "epoch": 4.464940888707705, "grad_norm": 2.417717218399048, "learning_rate": 0.0004420947139557005, "loss": 3.387, "step": 65715 }, { "epoch": 4.465280608778366, "grad_norm": 2.0593667030334473, "learning_rate": 0.0004420522489468678, "loss": 3.4282, "step": 65720 }, { "epoch": 4.465620328849028, "grad_norm": 2.042123556137085, "learning_rate": 0.0004420097839380351, "loss": 3.3258, "step": 65725 }, { "epoch": 4.4659600489196905, "grad_norm": 1.654567837715149, "learning_rate": 0.0004419673189292023, "loss": 3.3292, "step": 65730 }, { "epoch": 4.466299768990352, "grad_norm": 1.9619523286819458, "learning_rate": 0.00044192485392036964, "loss": 3.3706, "step": 65735 }, { "epoch": 4.466639489061014, "grad_norm": 1.899104356765747, "learning_rate": 0.0004418823889115369, "loss": 3.0613, "step": 65740 }, { "epoch": 4.466979209131676, "grad_norm": 1.7399274110794067, "learning_rate": 0.00044183992390270415, "loss": 3.3354, "step": 65745 }, { "epoch": 4.467318929202337, "grad_norm": 1.4735249280929565, "learning_rate": 0.0004417974588938715, "loss": 3.2682, "step": 65750 }, { "epoch": 4.467658649272999, "grad_norm": 1.747891902923584, "learning_rate": 0.00044175499388503876, "loss": 3.3675, "step": 65755 }, { "epoch": 4.467998369343661, "grad_norm": 1.566597580909729, "learning_rate": 0.000441712528876206, "loss": 3.6112, "step": 65760 }, { "epoch": 4.468338089414322, "grad_norm": 1.675849199295044, "learning_rate": 0.00044167006386737327, "loss": 3.3982, "step": 65765 }, { "epoch": 4.468677809484984, "grad_norm": 2.1130595207214355, "learning_rate": 0.0004416275988585406, "loss": 3.4485, "step": 65770 }, { "epoch": 4.4690175295556465, "grad_norm": 1.723252296447754, "learning_rate": 0.00044158513384970783, "loss": 3.4962, "step": 65775 }, { "epoch": 4.469357249626308, "grad_norm": 1.6721926927566528, "learning_rate": 0.0004415426688408751, "loss": 3.1405, "step": 65780 }, { "epoch": 4.46969696969697, "grad_norm": 1.741782784461975, "learning_rate": 0.00044150020383204244, "loss": 3.2274, "step": 65785 }, { "epoch": 4.470036689767632, "grad_norm": 1.994969129562378, "learning_rate": 0.00044145773882320967, "loss": 3.3842, "step": 65790 }, { "epoch": 4.470376409838293, "grad_norm": 1.867851972579956, "learning_rate": 0.00044141527381437695, "loss": 3.6213, "step": 65795 }, { "epoch": 4.470716129908955, "grad_norm": 1.9412811994552612, "learning_rate": 0.0004413728088055443, "loss": 3.2612, "step": 65800 }, { "epoch": 4.471055849979617, "grad_norm": 2.213585376739502, "learning_rate": 0.0004413303437967115, "loss": 3.4002, "step": 65805 }, { "epoch": 4.471395570050278, "grad_norm": 1.8187979459762573, "learning_rate": 0.0004412878787878788, "loss": 3.501, "step": 65810 }, { "epoch": 4.47173529012094, "grad_norm": 1.6318765878677368, "learning_rate": 0.00044124541377904607, "loss": 3.2809, "step": 65815 }, { "epoch": 4.4720750101916025, "grad_norm": 2.2251675128936768, "learning_rate": 0.00044120294877021335, "loss": 3.3086, "step": 65820 }, { "epoch": 4.472414730262264, "grad_norm": 2.0306472778320312, "learning_rate": 0.00044116048376138063, "loss": 3.2656, "step": 65825 }, { "epoch": 4.472754450332926, "grad_norm": 2.102119207382202, "learning_rate": 0.0004411180187525479, "loss": 3.4874, "step": 65830 }, { "epoch": 4.473094170403588, "grad_norm": 1.8732515573501587, "learning_rate": 0.0004410755537437152, "loss": 3.1725, "step": 65835 }, { "epoch": 4.473433890474249, "grad_norm": 1.7666521072387695, "learning_rate": 0.00044103308873488247, "loss": 3.4146, "step": 65840 }, { "epoch": 4.473773610544911, "grad_norm": 2.206961154937744, "learning_rate": 0.00044099062372604975, "loss": 3.2818, "step": 65845 }, { "epoch": 4.474113330615573, "grad_norm": 1.770323634147644, "learning_rate": 0.000440948158717217, "loss": 3.1972, "step": 65850 }, { "epoch": 4.474453050686234, "grad_norm": 2.9968745708465576, "learning_rate": 0.0004409056937083843, "loss": 3.0269, "step": 65855 }, { "epoch": 4.474792770756896, "grad_norm": 2.002129316329956, "learning_rate": 0.0004408632286995516, "loss": 3.1129, "step": 65860 }, { "epoch": 4.4751324908275585, "grad_norm": 1.7975114583969116, "learning_rate": 0.00044082076369071887, "loss": 3.4571, "step": 65865 }, { "epoch": 4.47547221089822, "grad_norm": 1.5938814878463745, "learning_rate": 0.00044077829868188615, "loss": 3.353, "step": 65870 }, { "epoch": 4.475811930968882, "grad_norm": 2.4391839504241943, "learning_rate": 0.00044073583367305343, "loss": 3.45, "step": 65875 }, { "epoch": 4.476151651039544, "grad_norm": 1.5789066553115845, "learning_rate": 0.0004406933686642207, "loss": 3.4241, "step": 65880 }, { "epoch": 4.476491371110205, "grad_norm": 2.2707650661468506, "learning_rate": 0.00044065090365538794, "loss": 3.253, "step": 65885 }, { "epoch": 4.476831091180867, "grad_norm": 1.929306983947754, "learning_rate": 0.00044060843864655527, "loss": 3.5233, "step": 65890 }, { "epoch": 4.477170811251529, "grad_norm": 1.79081392288208, "learning_rate": 0.00044056597363772255, "loss": 3.4032, "step": 65895 }, { "epoch": 4.47751053132219, "grad_norm": 2.4993860721588135, "learning_rate": 0.0004405235086288898, "loss": 3.3043, "step": 65900 }, { "epoch": 4.477850251392852, "grad_norm": 2.0298221111297607, "learning_rate": 0.0004404810436200571, "loss": 3.3016, "step": 65905 }, { "epoch": 4.4781899714635145, "grad_norm": 2.3010544776916504, "learning_rate": 0.0004404385786112244, "loss": 3.2654, "step": 65910 }, { "epoch": 4.478529691534176, "grad_norm": 1.5265483856201172, "learning_rate": 0.0004403961136023916, "loss": 3.6026, "step": 65915 }, { "epoch": 4.478869411604838, "grad_norm": 1.9693360328674316, "learning_rate": 0.0004403536485935589, "loss": 3.1873, "step": 65920 }, { "epoch": 4.4792091316755, "grad_norm": 2.1097676753997803, "learning_rate": 0.00044031118358472623, "loss": 3.4042, "step": 65925 }, { "epoch": 4.479548851746161, "grad_norm": 1.8683044910430908, "learning_rate": 0.00044026871857589346, "loss": 3.2946, "step": 65930 }, { "epoch": 4.479888571816823, "grad_norm": 2.796907901763916, "learning_rate": 0.00044022625356706074, "loss": 3.3096, "step": 65935 }, { "epoch": 4.480228291887485, "grad_norm": 1.5871952772140503, "learning_rate": 0.00044018378855822807, "loss": 3.2425, "step": 65940 }, { "epoch": 4.480568011958146, "grad_norm": 2.535250186920166, "learning_rate": 0.0004401413235493953, "loss": 3.3232, "step": 65945 }, { "epoch": 4.480907732028808, "grad_norm": 1.7465558052062988, "learning_rate": 0.0004400988585405626, "loss": 3.4062, "step": 65950 }, { "epoch": 4.4812474520994705, "grad_norm": 2.1524925231933594, "learning_rate": 0.00044005639353172986, "loss": 3.4706, "step": 65955 }, { "epoch": 4.481587172170132, "grad_norm": 2.165268659591675, "learning_rate": 0.00044001392852289714, "loss": 3.4138, "step": 65960 }, { "epoch": 4.481926892240794, "grad_norm": 1.7551475763320923, "learning_rate": 0.0004399714635140644, "loss": 3.2701, "step": 65965 }, { "epoch": 4.482266612311455, "grad_norm": 1.7764599323272705, "learning_rate": 0.0004399289985052317, "loss": 3.1412, "step": 65970 }, { "epoch": 4.482606332382117, "grad_norm": 1.7298439741134644, "learning_rate": 0.000439886533496399, "loss": 3.2966, "step": 65975 }, { "epoch": 4.482946052452779, "grad_norm": 2.0046091079711914, "learning_rate": 0.00043984406848756626, "loss": 3.3518, "step": 65980 }, { "epoch": 4.48328577252344, "grad_norm": 2.8634755611419678, "learning_rate": 0.00043980160347873354, "loss": 3.5243, "step": 65985 }, { "epoch": 4.483625492594102, "grad_norm": 1.4434186220169067, "learning_rate": 0.00043975913846990076, "loss": 3.2497, "step": 65990 }, { "epoch": 4.483965212664764, "grad_norm": 1.768206000328064, "learning_rate": 0.0004397166734610681, "loss": 3.2653, "step": 65995 }, { "epoch": 4.484304932735426, "grad_norm": 1.8765552043914795, "learning_rate": 0.0004396742084522354, "loss": 3.3357, "step": 66000 }, { "epoch": 4.484644652806088, "grad_norm": 2.995039701461792, "learning_rate": 0.0004396317434434026, "loss": 3.5866, "step": 66005 }, { "epoch": 4.48498437287675, "grad_norm": 1.9642540216445923, "learning_rate": 0.00043958927843456994, "loss": 3.2469, "step": 66010 }, { "epoch": 4.485324092947411, "grad_norm": 1.8521660566329956, "learning_rate": 0.0004395468134257372, "loss": 3.4619, "step": 66015 }, { "epoch": 4.485663813018073, "grad_norm": 1.7277706861495972, "learning_rate": 0.00043950434841690444, "loss": 3.3869, "step": 66020 }, { "epoch": 4.486003533088735, "grad_norm": 1.8931961059570312, "learning_rate": 0.0004394618834080717, "loss": 3.3385, "step": 66025 }, { "epoch": 4.486343253159396, "grad_norm": 1.7847148180007935, "learning_rate": 0.00043941941839923906, "loss": 3.2541, "step": 66030 }, { "epoch": 4.486682973230058, "grad_norm": 2.0831353664398193, "learning_rate": 0.00043937695339040634, "loss": 3.3036, "step": 66035 }, { "epoch": 4.4870226933007205, "grad_norm": 1.8267208337783813, "learning_rate": 0.00043933448838157356, "loss": 3.5216, "step": 66040 }, { "epoch": 4.487362413371382, "grad_norm": 2.342899799346924, "learning_rate": 0.0004392920233727409, "loss": 3.5752, "step": 66045 }, { "epoch": 4.487702133442044, "grad_norm": 1.730571985244751, "learning_rate": 0.0004392495583639082, "loss": 3.5263, "step": 66050 }, { "epoch": 4.488041853512706, "grad_norm": 1.6355727910995483, "learning_rate": 0.0004392070933550754, "loss": 3.1759, "step": 66055 }, { "epoch": 4.488381573583367, "grad_norm": 1.8992438316345215, "learning_rate": 0.00043916462834624274, "loss": 3.3765, "step": 66060 }, { "epoch": 4.488721293654029, "grad_norm": 1.639428973197937, "learning_rate": 0.00043912216333741, "loss": 3.4382, "step": 66065 }, { "epoch": 4.489061013724691, "grad_norm": 2.3698134422302246, "learning_rate": 0.00043907969832857724, "loss": 3.5634, "step": 66070 }, { "epoch": 4.489400733795352, "grad_norm": 2.186591863632202, "learning_rate": 0.0004390372333197445, "loss": 3.476, "step": 66075 }, { "epoch": 4.489740453866014, "grad_norm": 1.811455488204956, "learning_rate": 0.00043899476831091186, "loss": 3.1207, "step": 66080 }, { "epoch": 4.4900801739366765, "grad_norm": 2.2622499465942383, "learning_rate": 0.0004389523033020791, "loss": 3.3928, "step": 66085 }, { "epoch": 4.490419894007338, "grad_norm": 2.060516595840454, "learning_rate": 0.00043890983829324636, "loss": 3.4185, "step": 66090 }, { "epoch": 4.490759614078, "grad_norm": 1.895536184310913, "learning_rate": 0.0004388673732844137, "loss": 3.1874, "step": 66095 }, { "epoch": 4.491099334148662, "grad_norm": 2.4014062881469727, "learning_rate": 0.0004388249082755809, "loss": 3.3282, "step": 66100 }, { "epoch": 4.491439054219323, "grad_norm": 1.4043645858764648, "learning_rate": 0.0004387824432667482, "loss": 3.4034, "step": 66105 }, { "epoch": 4.491778774289985, "grad_norm": 1.9428068399429321, "learning_rate": 0.0004387399782579155, "loss": 3.3345, "step": 66110 }, { "epoch": 4.492118494360647, "grad_norm": 1.899408221244812, "learning_rate": 0.00043869751324908277, "loss": 3.3503, "step": 66115 }, { "epoch": 4.492458214431308, "grad_norm": 1.5780493021011353, "learning_rate": 0.00043865504824025005, "loss": 3.6191, "step": 66120 }, { "epoch": 4.49279793450197, "grad_norm": 1.342339038848877, "learning_rate": 0.0004386125832314173, "loss": 3.2752, "step": 66125 }, { "epoch": 4.4931376545726325, "grad_norm": 2.4284939765930176, "learning_rate": 0.0004385701182225846, "loss": 3.3551, "step": 66130 }, { "epoch": 4.493477374643294, "grad_norm": 2.2766146659851074, "learning_rate": 0.0004385276532137519, "loss": 3.2388, "step": 66135 }, { "epoch": 4.493817094713956, "grad_norm": 1.7027076482772827, "learning_rate": 0.00043848518820491917, "loss": 3.4868, "step": 66140 }, { "epoch": 4.494156814784618, "grad_norm": 1.7725087404251099, "learning_rate": 0.0004384427231960864, "loss": 3.3784, "step": 66145 }, { "epoch": 4.494496534855279, "grad_norm": 1.761215090751648, "learning_rate": 0.0004384002581872537, "loss": 3.3757, "step": 66150 }, { "epoch": 4.494836254925941, "grad_norm": 1.606955885887146, "learning_rate": 0.000438357793178421, "loss": 3.3909, "step": 66155 }, { "epoch": 4.495175974996603, "grad_norm": 1.8365281820297241, "learning_rate": 0.00043831532816958823, "loss": 3.6695, "step": 66160 }, { "epoch": 4.495515695067264, "grad_norm": 2.2833704948425293, "learning_rate": 0.00043827286316075557, "loss": 3.3432, "step": 66165 }, { "epoch": 4.495855415137926, "grad_norm": 2.3338255882263184, "learning_rate": 0.00043823039815192285, "loss": 3.3436, "step": 66170 }, { "epoch": 4.4961951352085885, "grad_norm": 2.433746099472046, "learning_rate": 0.00043818793314309007, "loss": 3.3956, "step": 66175 }, { "epoch": 4.49653485527925, "grad_norm": 1.7325202226638794, "learning_rate": 0.00043814546813425735, "loss": 3.3693, "step": 66180 }, { "epoch": 4.496874575349912, "grad_norm": 1.9357433319091797, "learning_rate": 0.0004381030031254247, "loss": 3.3353, "step": 66185 }, { "epoch": 4.497214295420574, "grad_norm": 1.804970145225525, "learning_rate": 0.0004380605381165919, "loss": 3.6177, "step": 66190 }, { "epoch": 4.497554015491235, "grad_norm": 1.7315000295639038, "learning_rate": 0.0004380180731077592, "loss": 3.4575, "step": 66195 }, { "epoch": 4.497893735561897, "grad_norm": 2.3668055534362793, "learning_rate": 0.0004379756080989265, "loss": 3.4532, "step": 66200 }, { "epoch": 4.498233455632559, "grad_norm": 2.607213258743286, "learning_rate": 0.0004379331430900938, "loss": 3.2609, "step": 66205 }, { "epoch": 4.49857317570322, "grad_norm": 1.45490562915802, "learning_rate": 0.00043789067808126103, "loss": 3.5314, "step": 66210 }, { "epoch": 4.498912895773882, "grad_norm": 1.5875437259674072, "learning_rate": 0.0004378482130724283, "loss": 3.2765, "step": 66215 }, { "epoch": 4.4992526158445445, "grad_norm": 1.6264110803604126, "learning_rate": 0.00043780574806359565, "loss": 3.3263, "step": 66220 }, { "epoch": 4.499592335915206, "grad_norm": 2.4027628898620605, "learning_rate": 0.00043776328305476287, "loss": 3.5464, "step": 66225 }, { "epoch": 4.499932055985868, "grad_norm": 1.6514055728912354, "learning_rate": 0.00043772081804593015, "loss": 3.4586, "step": 66230 }, { "epoch": 4.500271776056529, "grad_norm": 1.8435102701187134, "learning_rate": 0.0004376783530370975, "loss": 3.25, "step": 66235 }, { "epoch": 4.500611496127191, "grad_norm": 1.7827179431915283, "learning_rate": 0.0004376358880282647, "loss": 3.1045, "step": 66240 }, { "epoch": 4.500951216197853, "grad_norm": 2.3047664165496826, "learning_rate": 0.000437593423019432, "loss": 3.3023, "step": 66245 }, { "epoch": 4.501290936268514, "grad_norm": 1.8233745098114014, "learning_rate": 0.0004375509580105993, "loss": 3.3682, "step": 66250 }, { "epoch": 4.501630656339176, "grad_norm": 1.6901644468307495, "learning_rate": 0.00043750849300176655, "loss": 3.1607, "step": 66255 }, { "epoch": 4.501970376409838, "grad_norm": 1.5260488986968994, "learning_rate": 0.00043746602799293383, "loss": 3.3179, "step": 66260 }, { "epoch": 4.5023100964805, "grad_norm": 1.6470589637756348, "learning_rate": 0.0004374235629841011, "loss": 3.2454, "step": 66265 }, { "epoch": 4.502649816551162, "grad_norm": 2.0067906379699707, "learning_rate": 0.0004373810979752684, "loss": 3.2778, "step": 66270 }, { "epoch": 4.502989536621824, "grad_norm": 1.7899465560913086, "learning_rate": 0.0004373386329664357, "loss": 3.391, "step": 66275 }, { "epoch": 4.503329256692485, "grad_norm": 2.3174936771392822, "learning_rate": 0.00043729616795760295, "loss": 3.3948, "step": 66280 }, { "epoch": 4.503668976763147, "grad_norm": 1.7108805179595947, "learning_rate": 0.0004372537029487702, "loss": 3.5665, "step": 66285 }, { "epoch": 4.504008696833809, "grad_norm": 1.9661855697631836, "learning_rate": 0.0004372112379399375, "loss": 3.5041, "step": 66290 }, { "epoch": 4.50434841690447, "grad_norm": 1.9725825786590576, "learning_rate": 0.0004371687729311048, "loss": 3.387, "step": 66295 }, { "epoch": 4.504688136975132, "grad_norm": 1.6478663682937622, "learning_rate": 0.000437126307922272, "loss": 3.4044, "step": 66300 }, { "epoch": 4.5050278570457944, "grad_norm": 1.4704338312149048, "learning_rate": 0.00043708384291343935, "loss": 3.4335, "step": 66305 }, { "epoch": 4.505367577116456, "grad_norm": 1.771200180053711, "learning_rate": 0.00043704137790460663, "loss": 3.0884, "step": 66310 }, { "epoch": 4.505707297187118, "grad_norm": 2.221484661102295, "learning_rate": 0.00043699891289577386, "loss": 3.3261, "step": 66315 }, { "epoch": 4.50604701725778, "grad_norm": 1.9600389003753662, "learning_rate": 0.00043695644788694114, "loss": 3.2689, "step": 66320 }, { "epoch": 4.506386737328441, "grad_norm": 1.7934809923171997, "learning_rate": 0.0004369139828781085, "loss": 3.5954, "step": 66325 }, { "epoch": 4.506726457399103, "grad_norm": 1.861041784286499, "learning_rate": 0.0004368715178692757, "loss": 3.441, "step": 66330 }, { "epoch": 4.507066177469765, "grad_norm": 1.5816835165023804, "learning_rate": 0.000436829052860443, "loss": 3.458, "step": 66335 }, { "epoch": 4.507405897540426, "grad_norm": 2.2375600337982178, "learning_rate": 0.0004367865878516103, "loss": 3.0464, "step": 66340 }, { "epoch": 4.507745617611088, "grad_norm": 1.7748152017593384, "learning_rate": 0.00043674412284277754, "loss": 3.7114, "step": 66345 }, { "epoch": 4.5080853376817505, "grad_norm": 1.8645145893096924, "learning_rate": 0.0004367016578339448, "loss": 3.3836, "step": 66350 }, { "epoch": 4.508425057752412, "grad_norm": 1.9210644960403442, "learning_rate": 0.00043665919282511215, "loss": 3.4029, "step": 66355 }, { "epoch": 4.508764777823074, "grad_norm": 1.4137461185455322, "learning_rate": 0.0004366167278162794, "loss": 3.4485, "step": 66360 }, { "epoch": 4.509104497893736, "grad_norm": 1.7377530336380005, "learning_rate": 0.00043657426280744666, "loss": 3.6541, "step": 66365 }, { "epoch": 4.509444217964397, "grad_norm": 2.2893240451812744, "learning_rate": 0.00043653179779861394, "loss": 3.3908, "step": 66370 }, { "epoch": 4.509783938035059, "grad_norm": 1.6783525943756104, "learning_rate": 0.0004364893327897812, "loss": 3.1992, "step": 66375 }, { "epoch": 4.510123658105721, "grad_norm": 1.790104627609253, "learning_rate": 0.0004364468677809485, "loss": 3.4096, "step": 66380 }, { "epoch": 4.510463378176382, "grad_norm": 1.854498267173767, "learning_rate": 0.0004364044027721158, "loss": 3.6183, "step": 66385 }, { "epoch": 4.510803098247044, "grad_norm": 2.0826761722564697, "learning_rate": 0.0004363619377632831, "loss": 3.339, "step": 66390 }, { "epoch": 4.5111428183177065, "grad_norm": 1.774226427078247, "learning_rate": 0.00043631947275445034, "loss": 3.3101, "step": 66395 }, { "epoch": 4.511482538388368, "grad_norm": 2.3128092288970947, "learning_rate": 0.0004362770077456176, "loss": 3.2147, "step": 66400 }, { "epoch": 4.51182225845903, "grad_norm": 2.4440953731536865, "learning_rate": 0.0004362345427367849, "loss": 3.4353, "step": 66405 }, { "epoch": 4.512161978529692, "grad_norm": 2.6962952613830566, "learning_rate": 0.0004361920777279522, "loss": 3.3618, "step": 66410 }, { "epoch": 4.512501698600353, "grad_norm": 1.7457102537155151, "learning_rate": 0.00043614961271911946, "loss": 3.366, "step": 66415 }, { "epoch": 4.512841418671015, "grad_norm": 1.9861441850662231, "learning_rate": 0.00043610714771028674, "loss": 3.4215, "step": 66420 }, { "epoch": 4.513181138741677, "grad_norm": 1.7412195205688477, "learning_rate": 0.000436064682701454, "loss": 3.2948, "step": 66425 }, { "epoch": 4.513520858812338, "grad_norm": 2.0231854915618896, "learning_rate": 0.0004360222176926213, "loss": 3.2271, "step": 66430 }, { "epoch": 4.513860578883, "grad_norm": 1.7818249464035034, "learning_rate": 0.0004359797526837886, "loss": 3.4862, "step": 66435 }, { "epoch": 4.5142002989536625, "grad_norm": 2.1143088340759277, "learning_rate": 0.0004359372876749558, "loss": 3.4212, "step": 66440 }, { "epoch": 4.514540019024324, "grad_norm": 1.8087472915649414, "learning_rate": 0.00043589482266612314, "loss": 3.2736, "step": 66445 }, { "epoch": 4.514879739094986, "grad_norm": 1.8759294748306274, "learning_rate": 0.0004358523576572904, "loss": 3.4605, "step": 66450 }, { "epoch": 4.515219459165648, "grad_norm": 1.7936594486236572, "learning_rate": 0.00043580989264845765, "loss": 3.2418, "step": 66455 }, { "epoch": 4.515559179236309, "grad_norm": 1.3593430519104004, "learning_rate": 0.000435767427639625, "loss": 3.3345, "step": 66460 }, { "epoch": 4.515898899306971, "grad_norm": 1.9191536903381348, "learning_rate": 0.00043572496263079226, "loss": 3.4644, "step": 66465 }, { "epoch": 4.516238619377633, "grad_norm": 1.897255778312683, "learning_rate": 0.0004356824976219595, "loss": 3.3662, "step": 66470 }, { "epoch": 4.516578339448294, "grad_norm": 2.120915412902832, "learning_rate": 0.00043564003261312677, "loss": 3.143, "step": 66475 }, { "epoch": 4.516918059518956, "grad_norm": 1.902070164680481, "learning_rate": 0.0004355975676042941, "loss": 3.1159, "step": 66480 }, { "epoch": 4.5172577795896185, "grad_norm": 1.7499583959579468, "learning_rate": 0.00043555510259546133, "loss": 3.4058, "step": 66485 }, { "epoch": 4.51759749966028, "grad_norm": 1.6216905117034912, "learning_rate": 0.0004355126375866286, "loss": 3.3598, "step": 66490 }, { "epoch": 4.517937219730942, "grad_norm": 1.9205505847930908, "learning_rate": 0.00043547017257779594, "loss": 3.3773, "step": 66495 }, { "epoch": 4.518276939801604, "grad_norm": 1.9745121002197266, "learning_rate": 0.00043542770756896317, "loss": 3.4877, "step": 66500 }, { "epoch": 4.518616659872265, "grad_norm": 1.5064677000045776, "learning_rate": 0.00043538524256013045, "loss": 3.125, "step": 66505 }, { "epoch": 4.518956379942927, "grad_norm": 2.270439863204956, "learning_rate": 0.00043534277755129773, "loss": 3.4716, "step": 66510 }, { "epoch": 4.519296100013589, "grad_norm": 2.129828929901123, "learning_rate": 0.000435300312542465, "loss": 3.4653, "step": 66515 }, { "epoch": 4.51963582008425, "grad_norm": 1.6323367357254028, "learning_rate": 0.0004352578475336323, "loss": 3.4434, "step": 66520 }, { "epoch": 4.519975540154912, "grad_norm": 1.6102834939956665, "learning_rate": 0.00043521538252479957, "loss": 3.539, "step": 66525 }, { "epoch": 4.5203152602255745, "grad_norm": 1.3990037441253662, "learning_rate": 0.00043517291751596685, "loss": 3.4283, "step": 66530 }, { "epoch": 4.520654980296236, "grad_norm": 1.65175199508667, "learning_rate": 0.00043513045250713413, "loss": 3.1393, "step": 66535 }, { "epoch": 4.520994700366898, "grad_norm": 2.1670851707458496, "learning_rate": 0.0004350879874983014, "loss": 3.5608, "step": 66540 }, { "epoch": 4.52133442043756, "grad_norm": 1.5563857555389404, "learning_rate": 0.00043504552248946863, "loss": 3.3457, "step": 66545 }, { "epoch": 4.521674140508221, "grad_norm": 1.8395968675613403, "learning_rate": 0.00043500305748063597, "loss": 3.3519, "step": 66550 }, { "epoch": 4.522013860578883, "grad_norm": 1.7634690999984741, "learning_rate": 0.00043496059247180325, "loss": 3.3522, "step": 66555 }, { "epoch": 4.522353580649545, "grad_norm": 1.7093243598937988, "learning_rate": 0.00043491812746297053, "loss": 3.2994, "step": 66560 }, { "epoch": 4.522693300720206, "grad_norm": 1.7723520994186401, "learning_rate": 0.0004348756624541378, "loss": 3.3119, "step": 66565 }, { "epoch": 4.523033020790868, "grad_norm": 1.5509120225906372, "learning_rate": 0.0004348331974453051, "loss": 3.5622, "step": 66570 }, { "epoch": 4.5233727408615305, "grad_norm": 2.381335735321045, "learning_rate": 0.00043479073243647237, "loss": 3.1449, "step": 66575 }, { "epoch": 4.523712460932192, "grad_norm": 1.5980829000473022, "learning_rate": 0.0004347482674276396, "loss": 3.5011, "step": 66580 }, { "epoch": 4.524052181002854, "grad_norm": 1.9057074785232544, "learning_rate": 0.00043470580241880693, "loss": 3.3469, "step": 66585 }, { "epoch": 4.524391901073516, "grad_norm": 1.539584755897522, "learning_rate": 0.0004346633374099742, "loss": 3.6307, "step": 66590 }, { "epoch": 4.524731621144177, "grad_norm": 1.4256397485733032, "learning_rate": 0.00043462087240114144, "loss": 3.3594, "step": 66595 }, { "epoch": 4.525071341214839, "grad_norm": 1.9121577739715576, "learning_rate": 0.00043457840739230877, "loss": 3.5771, "step": 66600 }, { "epoch": 4.525411061285501, "grad_norm": 2.2946112155914307, "learning_rate": 0.00043453594238347605, "loss": 3.5634, "step": 66605 }, { "epoch": 4.525750781356162, "grad_norm": 1.9758363962173462, "learning_rate": 0.0004344934773746433, "loss": 3.3259, "step": 66610 }, { "epoch": 4.5260905014268245, "grad_norm": 1.7980871200561523, "learning_rate": 0.00043445101236581056, "loss": 3.304, "step": 66615 }, { "epoch": 4.5264302214974865, "grad_norm": 1.8827985525131226, "learning_rate": 0.0004344085473569779, "loss": 3.1915, "step": 66620 }, { "epoch": 4.526769941568148, "grad_norm": 1.3138105869293213, "learning_rate": 0.0004343660823481451, "loss": 3.118, "step": 66625 }, { "epoch": 4.52710966163881, "grad_norm": 2.235393762588501, "learning_rate": 0.0004343236173393124, "loss": 3.1579, "step": 66630 }, { "epoch": 4.527449381709472, "grad_norm": 1.8008300065994263, "learning_rate": 0.00043428115233047973, "loss": 3.3601, "step": 66635 }, { "epoch": 4.527789101780133, "grad_norm": 1.5270881652832031, "learning_rate": 0.00043423868732164696, "loss": 3.1699, "step": 66640 }, { "epoch": 4.528128821850795, "grad_norm": 1.7829537391662598, "learning_rate": 0.00043419622231281424, "loss": 3.4615, "step": 66645 }, { "epoch": 4.528468541921457, "grad_norm": 1.6621828079223633, "learning_rate": 0.00043415375730398157, "loss": 3.4382, "step": 66650 }, { "epoch": 4.528808261992118, "grad_norm": 1.4216609001159668, "learning_rate": 0.0004341112922951488, "loss": 3.4941, "step": 66655 }, { "epoch": 4.5291479820627805, "grad_norm": 1.6351913213729858, "learning_rate": 0.0004340688272863161, "loss": 3.3253, "step": 66660 }, { "epoch": 4.5294877021334425, "grad_norm": 1.8192185163497925, "learning_rate": 0.00043402636227748336, "loss": 3.3731, "step": 66665 }, { "epoch": 4.529827422204104, "grad_norm": 1.6121565103530884, "learning_rate": 0.00043398389726865064, "loss": 3.5116, "step": 66670 }, { "epoch": 4.530167142274766, "grad_norm": 2.1979012489318848, "learning_rate": 0.0004339414322598179, "loss": 3.607, "step": 66675 }, { "epoch": 4.530506862345427, "grad_norm": 1.8392138481140137, "learning_rate": 0.0004338989672509852, "loss": 3.6575, "step": 66680 }, { "epoch": 4.530846582416089, "grad_norm": 1.9494836330413818, "learning_rate": 0.0004338565022421525, "loss": 3.5088, "step": 66685 }, { "epoch": 4.531186302486751, "grad_norm": 2.24112868309021, "learning_rate": 0.00043381403723331976, "loss": 3.3486, "step": 66690 }, { "epoch": 4.531526022557412, "grad_norm": 1.8326538801193237, "learning_rate": 0.00043377157222448704, "loss": 3.4366, "step": 66695 }, { "epoch": 4.531865742628074, "grad_norm": 1.4672895669937134, "learning_rate": 0.00043372910721565426, "loss": 3.2844, "step": 66700 }, { "epoch": 4.5322054626987365, "grad_norm": 2.0155975818634033, "learning_rate": 0.0004336866422068216, "loss": 3.4779, "step": 66705 }, { "epoch": 4.532545182769398, "grad_norm": 1.730432152748108, "learning_rate": 0.0004336441771979889, "loss": 3.4787, "step": 66710 }, { "epoch": 4.53288490284006, "grad_norm": 1.8587027788162231, "learning_rate": 0.0004336017121891561, "loss": 3.1898, "step": 66715 }, { "epoch": 4.533224622910722, "grad_norm": 2.1689178943634033, "learning_rate": 0.00043355924718032344, "loss": 3.5562, "step": 66720 }, { "epoch": 4.533564342981383, "grad_norm": 1.752883791923523, "learning_rate": 0.0004335167821714907, "loss": 3.2371, "step": 66725 }, { "epoch": 4.533904063052045, "grad_norm": 1.8541343212127686, "learning_rate": 0.000433474317162658, "loss": 3.285, "step": 66730 }, { "epoch": 4.534243783122707, "grad_norm": 1.731774091720581, "learning_rate": 0.0004334318521538252, "loss": 3.3897, "step": 66735 }, { "epoch": 4.534583503193368, "grad_norm": 1.617419958114624, "learning_rate": 0.00043338938714499256, "loss": 3.3844, "step": 66740 }, { "epoch": 4.53492322326403, "grad_norm": 1.6096327304840088, "learning_rate": 0.00043334692213615984, "loss": 3.4981, "step": 66745 }, { "epoch": 4.5352629433346925, "grad_norm": 2.383535861968994, "learning_rate": 0.00043330445712732706, "loss": 3.3914, "step": 66750 }, { "epoch": 4.535602663405354, "grad_norm": 1.831680178642273, "learning_rate": 0.0004332619921184944, "loss": 3.2405, "step": 66755 }, { "epoch": 4.535942383476016, "grad_norm": 1.4783592224121094, "learning_rate": 0.0004332195271096617, "loss": 3.5177, "step": 66760 }, { "epoch": 4.536282103546678, "grad_norm": 1.4988435506820679, "learning_rate": 0.0004331770621008289, "loss": 3.3293, "step": 66765 }, { "epoch": 4.536621823617339, "grad_norm": 1.7090846300125122, "learning_rate": 0.0004331345970919962, "loss": 3.2385, "step": 66770 }, { "epoch": 4.536961543688001, "grad_norm": 1.797607183456421, "learning_rate": 0.0004330921320831635, "loss": 3.4677, "step": 66775 }, { "epoch": 4.537301263758663, "grad_norm": 1.9489952325820923, "learning_rate": 0.00043304966707433074, "loss": 3.1553, "step": 66780 }, { "epoch": 4.537640983829324, "grad_norm": 2.2373263835906982, "learning_rate": 0.000433007202065498, "loss": 3.4743, "step": 66785 }, { "epoch": 4.537980703899986, "grad_norm": 2.135601043701172, "learning_rate": 0.00043296473705666536, "loss": 3.4679, "step": 66790 }, { "epoch": 4.5383204239706485, "grad_norm": 1.9648410081863403, "learning_rate": 0.0004329222720478326, "loss": 3.4333, "step": 66795 }, { "epoch": 4.53866014404131, "grad_norm": 1.689530611038208, "learning_rate": 0.00043287980703899986, "loss": 3.4397, "step": 66800 }, { "epoch": 4.538999864111972, "grad_norm": 2.0093443393707275, "learning_rate": 0.00043283734203016714, "loss": 3.5071, "step": 66805 }, { "epoch": 4.539339584182634, "grad_norm": 1.5679724216461182, "learning_rate": 0.0004327948770213344, "loss": 3.7828, "step": 66810 }, { "epoch": 4.539679304253295, "grad_norm": 2.0356392860412598, "learning_rate": 0.0004327524120125017, "loss": 3.6099, "step": 66815 }, { "epoch": 4.540019024323957, "grad_norm": 1.708413004875183, "learning_rate": 0.000432709947003669, "loss": 3.445, "step": 66820 }, { "epoch": 4.540358744394619, "grad_norm": 1.8736852407455444, "learning_rate": 0.00043266748199483627, "loss": 3.5538, "step": 66825 }, { "epoch": 4.54069846446528, "grad_norm": 1.4639050960540771, "learning_rate": 0.00043262501698600355, "loss": 3.3849, "step": 66830 }, { "epoch": 4.541038184535942, "grad_norm": 2.42014479637146, "learning_rate": 0.0004325825519771708, "loss": 3.4761, "step": 66835 }, { "epoch": 4.5413779046066045, "grad_norm": 1.7576082944869995, "learning_rate": 0.00043254008696833805, "loss": 3.3518, "step": 66840 }, { "epoch": 4.541717624677266, "grad_norm": 2.2035014629364014, "learning_rate": 0.0004324976219595054, "loss": 3.23, "step": 66845 }, { "epoch": 4.542057344747928, "grad_norm": 1.8015203475952148, "learning_rate": 0.00043245515695067267, "loss": 3.4762, "step": 66850 }, { "epoch": 4.54239706481859, "grad_norm": 1.8503823280334473, "learning_rate": 0.0004324126919418399, "loss": 3.3744, "step": 66855 }, { "epoch": 4.542736784889251, "grad_norm": 1.4121768474578857, "learning_rate": 0.0004323702269330072, "loss": 3.3868, "step": 66860 }, { "epoch": 4.543076504959913, "grad_norm": 2.0140016078948975, "learning_rate": 0.0004323277619241745, "loss": 3.4697, "step": 66865 }, { "epoch": 4.543416225030575, "grad_norm": 2.450481653213501, "learning_rate": 0.00043228529691534173, "loss": 3.4859, "step": 66870 }, { "epoch": 4.543755945101236, "grad_norm": 1.6481842994689941, "learning_rate": 0.000432242831906509, "loss": 3.4182, "step": 66875 }, { "epoch": 4.5440956651718984, "grad_norm": 1.740173101425171, "learning_rate": 0.00043220036689767635, "loss": 3.6055, "step": 66880 }, { "epoch": 4.54443538524256, "grad_norm": 2.0969321727752686, "learning_rate": 0.00043215790188884357, "loss": 3.2839, "step": 66885 }, { "epoch": 4.544775105313222, "grad_norm": 1.8244985342025757, "learning_rate": 0.00043211543688001085, "loss": 3.2108, "step": 66890 }, { "epoch": 4.545114825383884, "grad_norm": 1.7272615432739258, "learning_rate": 0.0004320729718711782, "loss": 3.2639, "step": 66895 }, { "epoch": 4.545454545454545, "grad_norm": 1.986655831336975, "learning_rate": 0.00043203050686234547, "loss": 3.2465, "step": 66900 }, { "epoch": 4.545794265525207, "grad_norm": 2.085327625274658, "learning_rate": 0.0004319880418535127, "loss": 3.3056, "step": 66905 }, { "epoch": 4.546133985595869, "grad_norm": 2.0742430686950684, "learning_rate": 0.00043194557684468, "loss": 3.5538, "step": 66910 }, { "epoch": 4.54647370566653, "grad_norm": 1.4417517185211182, "learning_rate": 0.0004319031118358473, "loss": 3.3685, "step": 66915 }, { "epoch": 4.546813425737192, "grad_norm": 1.571747899055481, "learning_rate": 0.00043186064682701453, "loss": 3.2805, "step": 66920 }, { "epoch": 4.5471531458078545, "grad_norm": 1.5022752285003662, "learning_rate": 0.0004318181818181818, "loss": 3.5319, "step": 66925 }, { "epoch": 4.547492865878516, "grad_norm": 1.8446669578552246, "learning_rate": 0.00043177571680934915, "loss": 3.4411, "step": 66930 }, { "epoch": 4.547832585949178, "grad_norm": 1.6780309677124023, "learning_rate": 0.00043173325180051637, "loss": 3.3041, "step": 66935 }, { "epoch": 4.54817230601984, "grad_norm": 2.5472636222839355, "learning_rate": 0.00043169078679168365, "loss": 3.2747, "step": 66940 }, { "epoch": 4.548512026090501, "grad_norm": 2.037580728530884, "learning_rate": 0.000431648321782851, "loss": 3.4405, "step": 66945 }, { "epoch": 4.548851746161163, "grad_norm": 1.978440523147583, "learning_rate": 0.0004316058567740182, "loss": 3.5486, "step": 66950 }, { "epoch": 4.549191466231825, "grad_norm": 2.1215147972106934, "learning_rate": 0.0004315633917651855, "loss": 3.4376, "step": 66955 }, { "epoch": 4.549531186302486, "grad_norm": 1.7899303436279297, "learning_rate": 0.0004315209267563528, "loss": 3.4589, "step": 66960 }, { "epoch": 4.549870906373148, "grad_norm": 1.924323558807373, "learning_rate": 0.00043147846174752005, "loss": 3.1833, "step": 66965 }, { "epoch": 4.5502106264438105, "grad_norm": 1.5502084493637085, "learning_rate": 0.00043143599673868733, "loss": 3.3144, "step": 66970 }, { "epoch": 4.550550346514472, "grad_norm": 2.04856276512146, "learning_rate": 0.0004313935317298546, "loss": 3.2729, "step": 66975 }, { "epoch": 4.550890066585134, "grad_norm": 1.639575481414795, "learning_rate": 0.0004313510667210219, "loss": 3.2354, "step": 66980 }, { "epoch": 4.551229786655796, "grad_norm": 1.7561997175216675, "learning_rate": 0.0004313086017121892, "loss": 3.4352, "step": 66985 }, { "epoch": 4.551569506726457, "grad_norm": 2.195744752883911, "learning_rate": 0.00043126613670335645, "loss": 3.3124, "step": 66990 }, { "epoch": 4.551909226797119, "grad_norm": 1.8256051540374756, "learning_rate": 0.0004312236716945237, "loss": 3.451, "step": 66995 }, { "epoch": 4.552248946867781, "grad_norm": 2.064863681793213, "learning_rate": 0.000431181206685691, "loss": 3.1958, "step": 67000 }, { "epoch": 4.552588666938442, "grad_norm": 1.6508724689483643, "learning_rate": 0.0004311387416768583, "loss": 3.2726, "step": 67005 }, { "epoch": 4.552928387009104, "grad_norm": 1.4576503038406372, "learning_rate": 0.0004310962766680255, "loss": 3.3798, "step": 67010 }, { "epoch": 4.5532681070797665, "grad_norm": 1.6635974645614624, "learning_rate": 0.00043105381165919285, "loss": 3.2937, "step": 67015 }, { "epoch": 4.553607827150428, "grad_norm": 2.0387635231018066, "learning_rate": 0.00043101134665036013, "loss": 3.2567, "step": 67020 }, { "epoch": 4.55394754722109, "grad_norm": 2.0748488903045654, "learning_rate": 0.00043096888164152736, "loss": 3.5021, "step": 67025 }, { "epoch": 4.554287267291752, "grad_norm": 2.2253973484039307, "learning_rate": 0.00043092641663269464, "loss": 3.4162, "step": 67030 }, { "epoch": 4.554626987362413, "grad_norm": 1.9757850170135498, "learning_rate": 0.000430883951623862, "loss": 3.2714, "step": 67035 }, { "epoch": 4.554966707433075, "grad_norm": 1.61243736743927, "learning_rate": 0.0004308414866150292, "loss": 3.4894, "step": 67040 }, { "epoch": 4.555306427503737, "grad_norm": 1.82559072971344, "learning_rate": 0.0004307990216061965, "loss": 3.3513, "step": 67045 }, { "epoch": 4.555646147574398, "grad_norm": 1.7840608358383179, "learning_rate": 0.0004307565565973638, "loss": 3.3023, "step": 67050 }, { "epoch": 4.55598586764506, "grad_norm": 1.694890022277832, "learning_rate": 0.00043071409158853104, "loss": 3.4169, "step": 67055 }, { "epoch": 4.5563255877157225, "grad_norm": 1.950738549232483, "learning_rate": 0.0004306716265796983, "loss": 3.5857, "step": 67060 }, { "epoch": 4.556665307786384, "grad_norm": 2.054185390472412, "learning_rate": 0.0004306291615708656, "loss": 3.4364, "step": 67065 }, { "epoch": 4.557005027857046, "grad_norm": 1.915806531906128, "learning_rate": 0.00043058669656203293, "loss": 3.4234, "step": 67070 }, { "epoch": 4.557344747927708, "grad_norm": 1.8233083486557007, "learning_rate": 0.00043054423155320016, "loss": 3.7477, "step": 67075 }, { "epoch": 4.557684467998369, "grad_norm": 1.6804357767105103, "learning_rate": 0.00043050176654436744, "loss": 3.4028, "step": 67080 }, { "epoch": 4.558024188069031, "grad_norm": 1.7550469636917114, "learning_rate": 0.0004304593015355348, "loss": 3.516, "step": 67085 }, { "epoch": 4.558363908139693, "grad_norm": 1.4693597555160522, "learning_rate": 0.000430416836526702, "loss": 3.4047, "step": 67090 }, { "epoch": 4.558703628210354, "grad_norm": 1.4215093851089478, "learning_rate": 0.0004303743715178693, "loss": 3.3539, "step": 67095 }, { "epoch": 4.559043348281016, "grad_norm": 2.131558895111084, "learning_rate": 0.00043033190650903656, "loss": 3.2029, "step": 67100 }, { "epoch": 4.5593830683516785, "grad_norm": 2.028865098953247, "learning_rate": 0.00043028944150020384, "loss": 3.4815, "step": 67105 }, { "epoch": 4.55972278842234, "grad_norm": 1.862390398979187, "learning_rate": 0.0004302469764913711, "loss": 3.2326, "step": 67110 }, { "epoch": 4.560062508493002, "grad_norm": 1.6644644737243652, "learning_rate": 0.0004302045114825384, "loss": 3.2886, "step": 67115 }, { "epoch": 4.560402228563664, "grad_norm": 1.9257628917694092, "learning_rate": 0.0004301620464737057, "loss": 3.3808, "step": 67120 }, { "epoch": 4.560741948634325, "grad_norm": 1.9627577066421509, "learning_rate": 0.00043011958146487296, "loss": 3.2058, "step": 67125 }, { "epoch": 4.561081668704987, "grad_norm": 1.5127419233322144, "learning_rate": 0.00043007711645604024, "loss": 3.3312, "step": 67130 }, { "epoch": 4.561421388775649, "grad_norm": 1.963087558746338, "learning_rate": 0.00043003465144720747, "loss": 3.1734, "step": 67135 }, { "epoch": 4.56176110884631, "grad_norm": 1.6945364475250244, "learning_rate": 0.0004299921864383748, "loss": 3.4236, "step": 67140 }, { "epoch": 4.562100828916972, "grad_norm": 1.797080397605896, "learning_rate": 0.0004299497214295421, "loss": 3.3635, "step": 67145 }, { "epoch": 4.5624405489876345, "grad_norm": 2.0344996452331543, "learning_rate": 0.0004299072564207093, "loss": 3.4893, "step": 67150 }, { "epoch": 4.562780269058296, "grad_norm": 1.8043644428253174, "learning_rate": 0.00042986479141187664, "loss": 3.4034, "step": 67155 }, { "epoch": 4.563119989128958, "grad_norm": 2.0217769145965576, "learning_rate": 0.0004298223264030439, "loss": 3.469, "step": 67160 }, { "epoch": 4.56345970919962, "grad_norm": 1.823150873184204, "learning_rate": 0.00042977986139421115, "loss": 3.4861, "step": 67165 }, { "epoch": 4.563799429270281, "grad_norm": 1.5966601371765137, "learning_rate": 0.00042973739638537843, "loss": 3.4512, "step": 67170 }, { "epoch": 4.564139149340943, "grad_norm": 1.8186806440353394, "learning_rate": 0.00042969493137654576, "loss": 3.3897, "step": 67175 }, { "epoch": 4.564478869411605, "grad_norm": 2.2038042545318604, "learning_rate": 0.000429652466367713, "loss": 3.4487, "step": 67180 }, { "epoch": 4.564818589482266, "grad_norm": 2.8503663539886475, "learning_rate": 0.00042961000135888027, "loss": 3.2709, "step": 67185 }, { "epoch": 4.5651583095529285, "grad_norm": 1.9888285398483276, "learning_rate": 0.0004295675363500476, "loss": 3.3956, "step": 67190 }, { "epoch": 4.5654980296235905, "grad_norm": 1.808779239654541, "learning_rate": 0.00042952507134121483, "loss": 3.2291, "step": 67195 }, { "epoch": 4.565837749694252, "grad_norm": 2.38224196434021, "learning_rate": 0.0004294826063323821, "loss": 3.323, "step": 67200 }, { "epoch": 4.566177469764914, "grad_norm": 2.1672046184539795, "learning_rate": 0.00042944014132354944, "loss": 3.5041, "step": 67205 }, { "epoch": 4.566517189835576, "grad_norm": 1.8724769353866577, "learning_rate": 0.00042939767631471667, "loss": 3.4605, "step": 67210 }, { "epoch": 4.566856909906237, "grad_norm": 1.7848092317581177, "learning_rate": 0.00042935521130588395, "loss": 3.1654, "step": 67215 }, { "epoch": 4.567196629976899, "grad_norm": 1.6013206243515015, "learning_rate": 0.00042931274629705123, "loss": 3.1616, "step": 67220 }, { "epoch": 4.567536350047561, "grad_norm": 1.9243757724761963, "learning_rate": 0.0004292702812882185, "loss": 3.1619, "step": 67225 }, { "epoch": 4.567876070118222, "grad_norm": 1.8681126832962036, "learning_rate": 0.0004292278162793858, "loss": 3.2564, "step": 67230 }, { "epoch": 4.5682157901888845, "grad_norm": 1.5195406675338745, "learning_rate": 0.00042918535127055307, "loss": 3.5287, "step": 67235 }, { "epoch": 4.5685555102595465, "grad_norm": 1.7726330757141113, "learning_rate": 0.0004291428862617204, "loss": 3.4003, "step": 67240 }, { "epoch": 4.568895230330208, "grad_norm": 1.3483145236968994, "learning_rate": 0.00042910042125288763, "loss": 3.3893, "step": 67245 }, { "epoch": 4.56923495040087, "grad_norm": 2.0203280448913574, "learning_rate": 0.0004290579562440549, "loss": 3.589, "step": 67250 }, { "epoch": 4.569574670471532, "grad_norm": 1.7830876111984253, "learning_rate": 0.0004290154912352222, "loss": 3.2952, "step": 67255 }, { "epoch": 4.569914390542193, "grad_norm": 2.149995803833008, "learning_rate": 0.00042897302622638947, "loss": 3.3211, "step": 67260 }, { "epoch": 4.570254110612855, "grad_norm": 2.16103196144104, "learning_rate": 0.00042893056121755675, "loss": 3.3173, "step": 67265 }, { "epoch": 4.570593830683517, "grad_norm": 1.6303067207336426, "learning_rate": 0.00042888809620872403, "loss": 3.3377, "step": 67270 }, { "epoch": 4.570933550754178, "grad_norm": 1.857170581817627, "learning_rate": 0.0004288456311998913, "loss": 3.3643, "step": 67275 }, { "epoch": 4.5712732708248405, "grad_norm": 1.5485351085662842, "learning_rate": 0.0004288031661910586, "loss": 3.0708, "step": 67280 }, { "epoch": 4.5716129908955025, "grad_norm": 1.8306450843811035, "learning_rate": 0.00042876070118222587, "loss": 3.3559, "step": 67285 }, { "epoch": 4.571952710966164, "grad_norm": 1.7947636842727661, "learning_rate": 0.0004287182361733931, "loss": 3.5374, "step": 67290 }, { "epoch": 4.572292431036826, "grad_norm": 2.0653350353240967, "learning_rate": 0.00042867577116456043, "loss": 3.1987, "step": 67295 }, { "epoch": 4.572632151107488, "grad_norm": 2.2741401195526123, "learning_rate": 0.0004286333061557277, "loss": 3.4816, "step": 67300 }, { "epoch": 4.572971871178149, "grad_norm": 1.9929755926132202, "learning_rate": 0.00042859084114689494, "loss": 3.2803, "step": 67305 }, { "epoch": 4.573311591248811, "grad_norm": 1.5589410066604614, "learning_rate": 0.00042854837613806227, "loss": 3.5736, "step": 67310 }, { "epoch": 4.573651311319473, "grad_norm": 1.801190972328186, "learning_rate": 0.00042850591112922955, "loss": 3.4303, "step": 67315 }, { "epoch": 4.573991031390134, "grad_norm": 1.7654132843017578, "learning_rate": 0.0004284634461203968, "loss": 3.2543, "step": 67320 }, { "epoch": 4.5743307514607965, "grad_norm": 2.2711143493652344, "learning_rate": 0.00042842098111156406, "loss": 3.3586, "step": 67325 }, { "epoch": 4.574670471531459, "grad_norm": 1.434133529663086, "learning_rate": 0.0004283785161027314, "loss": 3.3341, "step": 67330 }, { "epoch": 4.57501019160212, "grad_norm": 1.9828801155090332, "learning_rate": 0.0004283360510938986, "loss": 3.5034, "step": 67335 }, { "epoch": 4.575349911672782, "grad_norm": 1.3847630023956299, "learning_rate": 0.0004282935860850659, "loss": 2.8718, "step": 67340 }, { "epoch": 4.575689631743444, "grad_norm": 1.718241810798645, "learning_rate": 0.00042825112107623323, "loss": 3.4556, "step": 67345 }, { "epoch": 4.576029351814105, "grad_norm": 1.6741831302642822, "learning_rate": 0.00042820865606740046, "loss": 3.3313, "step": 67350 }, { "epoch": 4.576369071884767, "grad_norm": 1.77134108543396, "learning_rate": 0.00042816619105856774, "loss": 3.4261, "step": 67355 }, { "epoch": 4.576708791955428, "grad_norm": 2.045252561569214, "learning_rate": 0.000428123726049735, "loss": 3.4134, "step": 67360 }, { "epoch": 4.57704851202609, "grad_norm": 1.653509259223938, "learning_rate": 0.0004280812610409023, "loss": 3.3163, "step": 67365 }, { "epoch": 4.5773882320967525, "grad_norm": 2.3721776008605957, "learning_rate": 0.0004280387960320696, "loss": 3.2183, "step": 67370 }, { "epoch": 4.577727952167414, "grad_norm": 1.7571743726730347, "learning_rate": 0.00042799633102323686, "loss": 3.2663, "step": 67375 }, { "epoch": 4.578067672238076, "grad_norm": 1.8102995157241821, "learning_rate": 0.00042795386601440414, "loss": 3.5124, "step": 67380 }, { "epoch": 4.578407392308738, "grad_norm": 1.7643026113510132, "learning_rate": 0.0004279114010055714, "loss": 3.5567, "step": 67385 }, { "epoch": 4.578747112379399, "grad_norm": 2.0262644290924072, "learning_rate": 0.0004278689359967387, "loss": 3.3987, "step": 67390 }, { "epoch": 4.579086832450061, "grad_norm": 1.3771779537200928, "learning_rate": 0.0004278264709879059, "loss": 3.2557, "step": 67395 }, { "epoch": 4.579426552520723, "grad_norm": 3.777310609817505, "learning_rate": 0.00042778400597907326, "loss": 3.4148, "step": 67400 }, { "epoch": 4.579766272591384, "grad_norm": 1.8553988933563232, "learning_rate": 0.00042774154097024054, "loss": 3.5182, "step": 67405 }, { "epoch": 4.580105992662046, "grad_norm": 1.7922282218933105, "learning_rate": 0.0004276990759614078, "loss": 3.5061, "step": 67410 }, { "epoch": 4.5804457127327085, "grad_norm": 1.688665747642517, "learning_rate": 0.0004276566109525751, "loss": 3.3597, "step": 67415 }, { "epoch": 4.58078543280337, "grad_norm": 2.0447137355804443, "learning_rate": 0.0004276141459437424, "loss": 3.1967, "step": 67420 }, { "epoch": 4.581125152874032, "grad_norm": 1.5364643335342407, "learning_rate": 0.00042757168093490966, "loss": 3.3387, "step": 67425 }, { "epoch": 4.581464872944694, "grad_norm": 1.8297851085662842, "learning_rate": 0.0004275292159260769, "loss": 3.2572, "step": 67430 }, { "epoch": 4.581804593015355, "grad_norm": 1.9539128541946411, "learning_rate": 0.0004274867509172442, "loss": 3.5254, "step": 67435 }, { "epoch": 4.582144313086017, "grad_norm": 2.0807793140411377, "learning_rate": 0.0004274442859084115, "loss": 3.1654, "step": 67440 }, { "epoch": 4.582484033156679, "grad_norm": 1.9181749820709229, "learning_rate": 0.0004274018208995787, "loss": 3.3396, "step": 67445 }, { "epoch": 4.58282375322734, "grad_norm": 2.0147006511688232, "learning_rate": 0.00042735935589074606, "loss": 3.288, "step": 67450 }, { "epoch": 4.583163473298002, "grad_norm": 1.6404765844345093, "learning_rate": 0.00042731689088191334, "loss": 3.2239, "step": 67455 }, { "epoch": 4.5835031933686645, "grad_norm": 1.9554630517959595, "learning_rate": 0.00042727442587308056, "loss": 3.6063, "step": 67460 }, { "epoch": 4.583842913439326, "grad_norm": 1.6606401205062866, "learning_rate": 0.00042723196086424784, "loss": 3.2613, "step": 67465 }, { "epoch": 4.584182633509988, "grad_norm": 1.4943033456802368, "learning_rate": 0.0004271894958554152, "loss": 3.2704, "step": 67470 }, { "epoch": 4.58452235358065, "grad_norm": 2.322953462600708, "learning_rate": 0.0004271470308465824, "loss": 3.5326, "step": 67475 }, { "epoch": 4.584862073651311, "grad_norm": 2.281665325164795, "learning_rate": 0.0004271045658377497, "loss": 3.2111, "step": 67480 }, { "epoch": 4.585201793721973, "grad_norm": 1.5561715364456177, "learning_rate": 0.000427062100828917, "loss": 3.2124, "step": 67485 }, { "epoch": 4.585541513792635, "grad_norm": 3.2257556915283203, "learning_rate": 0.00042701963582008424, "loss": 3.5521, "step": 67490 }, { "epoch": 4.585881233863296, "grad_norm": 1.987822413444519, "learning_rate": 0.0004269771708112515, "loss": 3.2941, "step": 67495 }, { "epoch": 4.5862209539339585, "grad_norm": 1.8232966661453247, "learning_rate": 0.00042693470580241886, "loss": 3.5501, "step": 67500 }, { "epoch": 4.5865606740046205, "grad_norm": 1.5074574947357178, "learning_rate": 0.0004268922407935861, "loss": 3.3949, "step": 67505 }, { "epoch": 4.586900394075282, "grad_norm": 2.050370454788208, "learning_rate": 0.00042684977578475336, "loss": 3.375, "step": 67510 }, { "epoch": 4.587240114145944, "grad_norm": 1.3909425735473633, "learning_rate": 0.00042680731077592064, "loss": 3.3866, "step": 67515 }, { "epoch": 4.587579834216606, "grad_norm": 1.881459355354309, "learning_rate": 0.0004267648457670879, "loss": 3.4893, "step": 67520 }, { "epoch": 4.587919554287267, "grad_norm": 1.719783067703247, "learning_rate": 0.0004267223807582552, "loss": 3.4921, "step": 67525 }, { "epoch": 4.588259274357929, "grad_norm": 2.0116591453552246, "learning_rate": 0.0004266799157494225, "loss": 3.433, "step": 67530 }, { "epoch": 4.588598994428591, "grad_norm": 1.8696012496948242, "learning_rate": 0.00042663745074058976, "loss": 3.3063, "step": 67535 }, { "epoch": 4.588938714499252, "grad_norm": 2.179152727127075, "learning_rate": 0.00042659498573175705, "loss": 3.362, "step": 67540 }, { "epoch": 4.5892784345699145, "grad_norm": 1.8665004968643188, "learning_rate": 0.0004265525207229243, "loss": 3.5264, "step": 67545 }, { "epoch": 4.5896181546405765, "grad_norm": 2.240241050720215, "learning_rate": 0.00042651005571409155, "loss": 3.3797, "step": 67550 }, { "epoch": 4.589957874711238, "grad_norm": 1.9199655055999756, "learning_rate": 0.0004264675907052589, "loss": 3.3541, "step": 67555 }, { "epoch": 4.5902975947819, "grad_norm": 1.577232003211975, "learning_rate": 0.00042642512569642617, "loss": 3.4886, "step": 67560 }, { "epoch": 4.590637314852561, "grad_norm": 2.2772750854492188, "learning_rate": 0.0004263826606875934, "loss": 3.2109, "step": 67565 }, { "epoch": 4.590977034923223, "grad_norm": 1.9245176315307617, "learning_rate": 0.0004263401956787607, "loss": 3.1959, "step": 67570 }, { "epoch": 4.591316754993885, "grad_norm": 1.567610502243042, "learning_rate": 0.000426297730669928, "loss": 3.3587, "step": 67575 }, { "epoch": 4.591656475064546, "grad_norm": 1.9351003170013428, "learning_rate": 0.0004262552656610953, "loss": 3.3355, "step": 67580 }, { "epoch": 4.591996195135208, "grad_norm": 1.6084662675857544, "learning_rate": 0.0004262128006522625, "loss": 3.4343, "step": 67585 }, { "epoch": 4.5923359152058705, "grad_norm": 1.6786174774169922, "learning_rate": 0.00042617033564342985, "loss": 3.3119, "step": 67590 }, { "epoch": 4.592675635276532, "grad_norm": 1.9121578931808472, "learning_rate": 0.0004261278706345971, "loss": 3.0282, "step": 67595 }, { "epoch": 4.593015355347194, "grad_norm": 1.798752784729004, "learning_rate": 0.00042608540562576435, "loss": 3.2682, "step": 67600 }, { "epoch": 4.593355075417856, "grad_norm": 1.9025605916976929, "learning_rate": 0.0004260429406169317, "loss": 3.3211, "step": 67605 }, { "epoch": 4.593694795488517, "grad_norm": 1.8241956233978271, "learning_rate": 0.00042600047560809897, "loss": 3.3123, "step": 67610 }, { "epoch": 4.594034515559179, "grad_norm": 1.9506531953811646, "learning_rate": 0.0004259580105992662, "loss": 3.3021, "step": 67615 }, { "epoch": 4.594374235629841, "grad_norm": 1.9912854433059692, "learning_rate": 0.00042591554559043347, "loss": 3.2744, "step": 67620 }, { "epoch": 4.594713955700502, "grad_norm": 2.4354822635650635, "learning_rate": 0.0004258730805816008, "loss": 3.3186, "step": 67625 }, { "epoch": 4.595053675771164, "grad_norm": 1.7509571313858032, "learning_rate": 0.00042583061557276803, "loss": 3.4635, "step": 67630 }, { "epoch": 4.5953933958418265, "grad_norm": 1.4708936214447021, "learning_rate": 0.0004257881505639353, "loss": 3.2465, "step": 67635 }, { "epoch": 4.595733115912488, "grad_norm": 2.078488826751709, "learning_rate": 0.00042574568555510265, "loss": 3.1953, "step": 67640 }, { "epoch": 4.59607283598315, "grad_norm": 2.0320394039154053, "learning_rate": 0.00042570322054626987, "loss": 3.6648, "step": 67645 }, { "epoch": 4.596412556053812, "grad_norm": 2.0213723182678223, "learning_rate": 0.00042566075553743715, "loss": 3.4912, "step": 67650 }, { "epoch": 4.596752276124473, "grad_norm": 1.9447689056396484, "learning_rate": 0.00042561829052860443, "loss": 3.6235, "step": 67655 }, { "epoch": 4.597091996195135, "grad_norm": 2.193000078201294, "learning_rate": 0.0004255758255197717, "loss": 3.2642, "step": 67660 }, { "epoch": 4.597431716265797, "grad_norm": 1.7971723079681396, "learning_rate": 0.000425533360510939, "loss": 3.3923, "step": 67665 }, { "epoch": 4.597771436336458, "grad_norm": 1.410441279411316, "learning_rate": 0.00042549089550210627, "loss": 3.4538, "step": 67670 }, { "epoch": 4.59811115640712, "grad_norm": 1.8119022846221924, "learning_rate": 0.00042544843049327355, "loss": 3.4198, "step": 67675 }, { "epoch": 4.5984508764777825, "grad_norm": 1.6429924964904785, "learning_rate": 0.00042540596548444083, "loss": 3.3134, "step": 67680 }, { "epoch": 4.598790596548444, "grad_norm": 2.2494277954101562, "learning_rate": 0.0004253635004756081, "loss": 3.186, "step": 67685 }, { "epoch": 4.599130316619106, "grad_norm": 1.9796490669250488, "learning_rate": 0.00042532103546677534, "loss": 3.4415, "step": 67690 }, { "epoch": 4.599470036689768, "grad_norm": 1.671807885169983, "learning_rate": 0.0004252785704579427, "loss": 3.2357, "step": 67695 }, { "epoch": 4.599809756760429, "grad_norm": 1.8129563331604004, "learning_rate": 0.00042523610544910995, "loss": 3.4353, "step": 67700 }, { "epoch": 4.600149476831091, "grad_norm": 2.042140007019043, "learning_rate": 0.0004251936404402772, "loss": 3.5767, "step": 67705 }, { "epoch": 4.600489196901753, "grad_norm": 1.8474841117858887, "learning_rate": 0.0004251511754314445, "loss": 3.1028, "step": 67710 }, { "epoch": 4.600828916972414, "grad_norm": 2.0884206295013428, "learning_rate": 0.0004251087104226118, "loss": 3.2367, "step": 67715 }, { "epoch": 4.601168637043076, "grad_norm": 2.138296127319336, "learning_rate": 0.000425066245413779, "loss": 3.294, "step": 67720 }, { "epoch": 4.6015083571137385, "grad_norm": 1.6802669763565063, "learning_rate": 0.0004250237804049463, "loss": 3.271, "step": 67725 }, { "epoch": 4.6018480771844, "grad_norm": 1.9784202575683594, "learning_rate": 0.00042498131539611363, "loss": 3.3171, "step": 67730 }, { "epoch": 4.602187797255062, "grad_norm": 1.7765451669692993, "learning_rate": 0.00042493885038728086, "loss": 3.3592, "step": 67735 }, { "epoch": 4.602527517325724, "grad_norm": 2.0208113193511963, "learning_rate": 0.00042489638537844814, "loss": 3.4942, "step": 67740 }, { "epoch": 4.602867237396385, "grad_norm": 1.8621978759765625, "learning_rate": 0.0004248539203696155, "loss": 3.3621, "step": 67745 }, { "epoch": 4.603206957467047, "grad_norm": 2.0348474979400635, "learning_rate": 0.00042481145536078275, "loss": 3.4137, "step": 67750 }, { "epoch": 4.603546677537709, "grad_norm": 2.144712209701538, "learning_rate": 0.00042476899035195, "loss": 3.268, "step": 67755 }, { "epoch": 4.60388639760837, "grad_norm": 2.052469491958618, "learning_rate": 0.00042472652534311726, "loss": 3.4627, "step": 67760 }, { "epoch": 4.6042261176790324, "grad_norm": 2.161104679107666, "learning_rate": 0.0004246840603342846, "loss": 3.4563, "step": 67765 }, { "epoch": 4.6045658377496945, "grad_norm": 2.0813815593719482, "learning_rate": 0.0004246415953254518, "loss": 3.6284, "step": 67770 }, { "epoch": 4.604905557820356, "grad_norm": 2.0796332359313965, "learning_rate": 0.0004245991303166191, "loss": 3.5112, "step": 67775 }, { "epoch": 4.605245277891018, "grad_norm": 1.8554766178131104, "learning_rate": 0.00042455666530778643, "loss": 3.414, "step": 67780 }, { "epoch": 4.60558499796168, "grad_norm": 2.03842830657959, "learning_rate": 0.00042451420029895366, "loss": 3.467, "step": 67785 }, { "epoch": 4.605924718032341, "grad_norm": 1.848076581954956, "learning_rate": 0.00042447173529012094, "loss": 3.4175, "step": 67790 }, { "epoch": 4.606264438103003, "grad_norm": 1.2834770679473877, "learning_rate": 0.0004244292702812883, "loss": 3.4064, "step": 67795 }, { "epoch": 4.606604158173665, "grad_norm": 2.1441757678985596, "learning_rate": 0.0004243868052724555, "loss": 3.2857, "step": 67800 }, { "epoch": 4.606943878244326, "grad_norm": 1.61810302734375, "learning_rate": 0.0004243443402636228, "loss": 3.4616, "step": 67805 }, { "epoch": 4.6072835983149885, "grad_norm": 1.9624124765396118, "learning_rate": 0.00042430187525479006, "loss": 3.2385, "step": 67810 }, { "epoch": 4.6076233183856505, "grad_norm": 1.531187891960144, "learning_rate": 0.00042425941024595734, "loss": 3.4715, "step": 67815 }, { "epoch": 4.607963038456312, "grad_norm": 1.6057672500610352, "learning_rate": 0.0004242169452371246, "loss": 3.0715, "step": 67820 }, { "epoch": 4.608302758526974, "grad_norm": 1.9416656494140625, "learning_rate": 0.0004241744802282919, "loss": 3.4397, "step": 67825 }, { "epoch": 4.608642478597636, "grad_norm": 1.8146480321884155, "learning_rate": 0.0004241320152194592, "loss": 3.686, "step": 67830 }, { "epoch": 4.608982198668297, "grad_norm": 1.8221739530563354, "learning_rate": 0.00042408955021062646, "loss": 3.2886, "step": 67835 }, { "epoch": 4.609321918738959, "grad_norm": 1.5540648698806763, "learning_rate": 0.00042404708520179374, "loss": 3.1071, "step": 67840 }, { "epoch": 4.609661638809621, "grad_norm": 1.9647746086120605, "learning_rate": 0.00042400462019296097, "loss": 3.3545, "step": 67845 }, { "epoch": 4.610001358880282, "grad_norm": 2.1902101039886475, "learning_rate": 0.0004239621551841283, "loss": 3.3989, "step": 67850 }, { "epoch": 4.6103410789509445, "grad_norm": 1.9326322078704834, "learning_rate": 0.0004239196901752956, "loss": 3.3763, "step": 67855 }, { "epoch": 4.6106807990216065, "grad_norm": 2.170781135559082, "learning_rate": 0.0004238772251664628, "loss": 3.3501, "step": 67860 }, { "epoch": 4.611020519092268, "grad_norm": 2.0613198280334473, "learning_rate": 0.00042383476015763014, "loss": 3.3615, "step": 67865 }, { "epoch": 4.61136023916293, "grad_norm": 2.0830695629119873, "learning_rate": 0.0004237922951487974, "loss": 3.0508, "step": 67870 }, { "epoch": 4.611699959233592, "grad_norm": 2.175957202911377, "learning_rate": 0.00042374983013996465, "loss": 3.3733, "step": 67875 }, { "epoch": 4.612039679304253, "grad_norm": 1.516674518585205, "learning_rate": 0.00042370736513113193, "loss": 3.4491, "step": 67880 }, { "epoch": 4.612379399374915, "grad_norm": 1.3300042152404785, "learning_rate": 0.00042366490012229926, "loss": 3.272, "step": 67885 }, { "epoch": 4.612719119445577, "grad_norm": 1.8661457300186157, "learning_rate": 0.0004236224351134665, "loss": 3.5597, "step": 67890 }, { "epoch": 4.613058839516238, "grad_norm": 2.4688878059387207, "learning_rate": 0.00042357997010463377, "loss": 3.2873, "step": 67895 }, { "epoch": 4.6133985595869005, "grad_norm": 1.9556292295455933, "learning_rate": 0.0004235375050958011, "loss": 3.478, "step": 67900 }, { "epoch": 4.613738279657563, "grad_norm": 1.3792569637298584, "learning_rate": 0.00042349504008696833, "loss": 3.2995, "step": 67905 }, { "epoch": 4.614077999728224, "grad_norm": 2.0575942993164062, "learning_rate": 0.0004234525750781356, "loss": 3.2566, "step": 67910 }, { "epoch": 4.614417719798886, "grad_norm": 2.098261833190918, "learning_rate": 0.0004234101100693029, "loss": 3.3423, "step": 67915 }, { "epoch": 4.614757439869548, "grad_norm": 1.7361377477645874, "learning_rate": 0.0004233676450604702, "loss": 3.2928, "step": 67920 }, { "epoch": 4.615097159940209, "grad_norm": 1.8651716709136963, "learning_rate": 0.00042332518005163745, "loss": 3.583, "step": 67925 }, { "epoch": 4.615436880010871, "grad_norm": 1.610450267791748, "learning_rate": 0.00042328271504280473, "loss": 3.1881, "step": 67930 }, { "epoch": 4.615776600081533, "grad_norm": 1.4828107357025146, "learning_rate": 0.00042324025003397206, "loss": 3.2176, "step": 67935 }, { "epoch": 4.616116320152194, "grad_norm": 1.7215007543563843, "learning_rate": 0.0004231977850251393, "loss": 3.3372, "step": 67940 }, { "epoch": 4.6164560402228565, "grad_norm": 1.6703894138336182, "learning_rate": 0.00042315532001630657, "loss": 3.5334, "step": 67945 }, { "epoch": 4.616795760293519, "grad_norm": 1.4732599258422852, "learning_rate": 0.00042311285500747385, "loss": 3.4055, "step": 67950 }, { "epoch": 4.61713548036418, "grad_norm": 1.7812269926071167, "learning_rate": 0.00042307038999864113, "loss": 3.5715, "step": 67955 }, { "epoch": 4.617475200434842, "grad_norm": 2.231348752975464, "learning_rate": 0.0004230279249898084, "loss": 3.314, "step": 67960 }, { "epoch": 4.617814920505504, "grad_norm": 1.584802508354187, "learning_rate": 0.0004229854599809757, "loss": 3.2556, "step": 67965 }, { "epoch": 4.618154640576165, "grad_norm": 2.463658571243286, "learning_rate": 0.00042294299497214297, "loss": 3.2767, "step": 67970 }, { "epoch": 4.618494360646827, "grad_norm": 2.461693286895752, "learning_rate": 0.00042290052996331025, "loss": 3.4172, "step": 67975 }, { "epoch": 4.618834080717489, "grad_norm": 1.87998366355896, "learning_rate": 0.00042285806495447753, "loss": 3.1832, "step": 67980 }, { "epoch": 4.61917380078815, "grad_norm": 1.765078067779541, "learning_rate": 0.00042281559994564476, "loss": 3.3506, "step": 67985 }, { "epoch": 4.6195135208588125, "grad_norm": 1.916540503501892, "learning_rate": 0.0004227731349368121, "loss": 3.4387, "step": 67990 }, { "epoch": 4.619853240929475, "grad_norm": 2.167253017425537, "learning_rate": 0.00042273066992797937, "loss": 3.5325, "step": 67995 }, { "epoch": 4.620192961000136, "grad_norm": 2.081573724746704, "learning_rate": 0.0004226882049191466, "loss": 3.1601, "step": 68000 }, { "epoch": 4.620532681070798, "grad_norm": 1.7110402584075928, "learning_rate": 0.00042264573991031393, "loss": 3.4062, "step": 68005 }, { "epoch": 4.62087240114146, "grad_norm": 1.7075430154800415, "learning_rate": 0.0004226032749014812, "loss": 3.4403, "step": 68010 }, { "epoch": 4.621212121212121, "grad_norm": 1.618744969367981, "learning_rate": 0.00042256080989264844, "loss": 3.1523, "step": 68015 }, { "epoch": 4.621551841282783, "grad_norm": 1.752685546875, "learning_rate": 0.0004225183448838157, "loss": 3.2993, "step": 68020 }, { "epoch": 4.621891561353445, "grad_norm": 1.867063045501709, "learning_rate": 0.00042247587987498305, "loss": 3.1516, "step": 68025 }, { "epoch": 4.622231281424106, "grad_norm": 1.8414533138275146, "learning_rate": 0.0004224334148661503, "loss": 3.2335, "step": 68030 }, { "epoch": 4.6225710014947685, "grad_norm": 2.092395782470703, "learning_rate": 0.00042239094985731756, "loss": 3.3432, "step": 68035 }, { "epoch": 4.62291072156543, "grad_norm": 2.077040433883667, "learning_rate": 0.0004223484848484849, "loss": 3.317, "step": 68040 }, { "epoch": 4.623250441636092, "grad_norm": 2.1075992584228516, "learning_rate": 0.0004223060198396521, "loss": 3.1149, "step": 68045 }, { "epoch": 4.623590161706754, "grad_norm": 1.9806627035140991, "learning_rate": 0.0004222635548308194, "loss": 3.3641, "step": 68050 }, { "epoch": 4.623929881777415, "grad_norm": 1.775447964668274, "learning_rate": 0.00042222108982198673, "loss": 3.2669, "step": 68055 }, { "epoch": 4.624269601848077, "grad_norm": 1.9689422845840454, "learning_rate": 0.00042217862481315396, "loss": 3.4495, "step": 68060 }, { "epoch": 4.624609321918739, "grad_norm": 1.8025994300842285, "learning_rate": 0.00042213615980432124, "loss": 3.6128, "step": 68065 }, { "epoch": 4.6249490419894, "grad_norm": 2.1252660751342773, "learning_rate": 0.0004220936947954885, "loss": 3.1732, "step": 68070 }, { "epoch": 4.6252887620600625, "grad_norm": 1.6395560503005981, "learning_rate": 0.0004220512297866558, "loss": 3.3644, "step": 68075 }, { "epoch": 4.6256284821307245, "grad_norm": 2.1845104694366455, "learning_rate": 0.0004220087647778231, "loss": 3.3915, "step": 68080 }, { "epoch": 4.625968202201386, "grad_norm": 1.7207471132278442, "learning_rate": 0.00042196629976899036, "loss": 3.3566, "step": 68085 }, { "epoch": 4.626307922272048, "grad_norm": 2.164003610610962, "learning_rate": 0.0004219238347601577, "loss": 3.3619, "step": 68090 }, { "epoch": 4.62664764234271, "grad_norm": 1.5987143516540527, "learning_rate": 0.0004218813697513249, "loss": 3.4944, "step": 68095 }, { "epoch": 4.626987362413371, "grad_norm": 2.2404415607452393, "learning_rate": 0.0004218389047424922, "loss": 3.5605, "step": 68100 }, { "epoch": 4.627327082484033, "grad_norm": 1.8753674030303955, "learning_rate": 0.0004217964397336595, "loss": 3.359, "step": 68105 }, { "epoch": 4.627666802554695, "grad_norm": 1.6972709894180298, "learning_rate": 0.00042175397472482676, "loss": 3.152, "step": 68110 }, { "epoch": 4.628006522625356, "grad_norm": 1.6612260341644287, "learning_rate": 0.00042171150971599404, "loss": 3.4357, "step": 68115 }, { "epoch": 4.6283462426960185, "grad_norm": 1.8443503379821777, "learning_rate": 0.0004216690447071613, "loss": 3.3015, "step": 68120 }, { "epoch": 4.6286859627666805, "grad_norm": 2.3015806674957275, "learning_rate": 0.0004216265796983286, "loss": 3.4937, "step": 68125 }, { "epoch": 4.629025682837342, "grad_norm": 1.5250144004821777, "learning_rate": 0.0004215841146894959, "loss": 3.4549, "step": 68130 }, { "epoch": 4.629365402908004, "grad_norm": 1.4996612071990967, "learning_rate": 0.00042154164968066316, "loss": 3.3352, "step": 68135 }, { "epoch": 4.629705122978666, "grad_norm": 1.7666679620742798, "learning_rate": 0.0004214991846718304, "loss": 3.324, "step": 68140 }, { "epoch": 4.630044843049327, "grad_norm": 1.5230745077133179, "learning_rate": 0.0004214567196629977, "loss": 3.3285, "step": 68145 }, { "epoch": 4.630384563119989, "grad_norm": 2.265835762023926, "learning_rate": 0.000421414254654165, "loss": 3.3921, "step": 68150 }, { "epoch": 4.630724283190651, "grad_norm": 1.7914577722549438, "learning_rate": 0.0004213717896453322, "loss": 3.2267, "step": 68155 }, { "epoch": 4.631064003261312, "grad_norm": 1.7062561511993408, "learning_rate": 0.00042132932463649956, "loss": 3.4527, "step": 68160 }, { "epoch": 4.6314037233319745, "grad_norm": 2.036616563796997, "learning_rate": 0.00042128685962766684, "loss": 3.4211, "step": 68165 }, { "epoch": 4.6317434434026366, "grad_norm": 1.6552683115005493, "learning_rate": 0.00042124439461883406, "loss": 3.3404, "step": 68170 }, { "epoch": 4.632083163473298, "grad_norm": 1.96787691116333, "learning_rate": 0.00042120192961000134, "loss": 3.4309, "step": 68175 }, { "epoch": 4.63242288354396, "grad_norm": 1.5590683221817017, "learning_rate": 0.0004211594646011687, "loss": 3.3466, "step": 68180 }, { "epoch": 4.632762603614622, "grad_norm": 1.9934649467468262, "learning_rate": 0.0004211169995923359, "loss": 3.3877, "step": 68185 }, { "epoch": 4.633102323685283, "grad_norm": 1.786136269569397, "learning_rate": 0.0004210745345835032, "loss": 3.346, "step": 68190 }, { "epoch": 4.633442043755945, "grad_norm": 1.645216464996338, "learning_rate": 0.0004210320695746705, "loss": 3.6526, "step": 68195 }, { "epoch": 4.633781763826607, "grad_norm": 1.7490614652633667, "learning_rate": 0.00042098960456583774, "loss": 3.2563, "step": 68200 }, { "epoch": 4.634121483897268, "grad_norm": 2.018580675125122, "learning_rate": 0.000420947139557005, "loss": 3.5175, "step": 68205 }, { "epoch": 4.6344612039679305, "grad_norm": 1.9995760917663574, "learning_rate": 0.0004209046745481723, "loss": 3.0729, "step": 68210 }, { "epoch": 4.634800924038593, "grad_norm": 1.6768364906311035, "learning_rate": 0.0004208622095393396, "loss": 3.7103, "step": 68215 }, { "epoch": 4.635140644109254, "grad_norm": 1.8049652576446533, "learning_rate": 0.00042081974453050686, "loss": 3.2726, "step": 68220 }, { "epoch": 4.635480364179916, "grad_norm": 1.6392453908920288, "learning_rate": 0.00042077727952167414, "loss": 3.5219, "step": 68225 }, { "epoch": 4.635820084250578, "grad_norm": 1.804840326309204, "learning_rate": 0.0004207348145128414, "loss": 3.1987, "step": 68230 }, { "epoch": 4.636159804321239, "grad_norm": 2.028620481491089, "learning_rate": 0.0004206923495040087, "loss": 3.3112, "step": 68235 }, { "epoch": 4.636499524391901, "grad_norm": 2.0241568088531494, "learning_rate": 0.000420649884495176, "loss": 3.4957, "step": 68240 }, { "epoch": 4.636839244462563, "grad_norm": 1.675426721572876, "learning_rate": 0.0004206074194863432, "loss": 3.3585, "step": 68245 }, { "epoch": 4.637178964533224, "grad_norm": 2.2857632637023926, "learning_rate": 0.00042056495447751054, "loss": 3.4168, "step": 68250 }, { "epoch": 4.6375186846038865, "grad_norm": 1.5780495405197144, "learning_rate": 0.0004205224894686778, "loss": 3.2019, "step": 68255 }, { "epoch": 4.637858404674548, "grad_norm": 1.710516095161438, "learning_rate": 0.0004204800244598451, "loss": 3.523, "step": 68260 }, { "epoch": 4.63819812474521, "grad_norm": 1.6648162603378296, "learning_rate": 0.0004204375594510124, "loss": 3.352, "step": 68265 }, { "epoch": 4.638537844815872, "grad_norm": 1.6573022603988647, "learning_rate": 0.00042039509444217967, "loss": 3.1712, "step": 68270 }, { "epoch": 4.638877564886533, "grad_norm": 1.5741218328475952, "learning_rate": 0.00042035262943334695, "loss": 3.5679, "step": 68275 }, { "epoch": 4.639217284957195, "grad_norm": 1.9605861902236938, "learning_rate": 0.00042031016442451417, "loss": 3.5195, "step": 68280 }, { "epoch": 4.639557005027857, "grad_norm": 1.9725092649459839, "learning_rate": 0.0004202676994156815, "loss": 3.0899, "step": 68285 }, { "epoch": 4.639896725098518, "grad_norm": 1.5573654174804688, "learning_rate": 0.0004202252344068488, "loss": 3.2167, "step": 68290 }, { "epoch": 4.64023644516918, "grad_norm": 1.8968743085861206, "learning_rate": 0.000420182769398016, "loss": 3.4574, "step": 68295 }, { "epoch": 4.6405761652398425, "grad_norm": 1.967404842376709, "learning_rate": 0.00042014030438918335, "loss": 3.1098, "step": 68300 }, { "epoch": 4.640915885310504, "grad_norm": 1.8101806640625, "learning_rate": 0.0004200978393803506, "loss": 3.1312, "step": 68305 }, { "epoch": 4.641255605381166, "grad_norm": 1.7784500122070312, "learning_rate": 0.00042005537437151785, "loss": 3.2728, "step": 68310 }, { "epoch": 4.641595325451828, "grad_norm": 2.151669502258301, "learning_rate": 0.00042001290936268513, "loss": 3.476, "step": 68315 }, { "epoch": 4.641935045522489, "grad_norm": 1.6398355960845947, "learning_rate": 0.00041997044435385247, "loss": 3.3739, "step": 68320 }, { "epoch": 4.642274765593151, "grad_norm": 1.8002636432647705, "learning_rate": 0.0004199279793450197, "loss": 3.2496, "step": 68325 }, { "epoch": 4.642614485663813, "grad_norm": 2.548447608947754, "learning_rate": 0.00041988551433618697, "loss": 3.4793, "step": 68330 }, { "epoch": 4.642954205734474, "grad_norm": 1.5330373048782349, "learning_rate": 0.0004198430493273543, "loss": 3.3168, "step": 68335 }, { "epoch": 4.6432939258051364, "grad_norm": 1.730836272239685, "learning_rate": 0.00041980058431852153, "loss": 3.4541, "step": 68340 }, { "epoch": 4.6436336458757985, "grad_norm": 1.9535136222839355, "learning_rate": 0.0004197581193096888, "loss": 3.0601, "step": 68345 }, { "epoch": 4.64397336594646, "grad_norm": 2.032755136489868, "learning_rate": 0.00041971565430085615, "loss": 3.4285, "step": 68350 }, { "epoch": 4.644313086017122, "grad_norm": 1.780427098274231, "learning_rate": 0.00041967318929202337, "loss": 3.3328, "step": 68355 }, { "epoch": 4.644652806087784, "grad_norm": 1.8698481321334839, "learning_rate": 0.00041963072428319065, "loss": 3.349, "step": 68360 }, { "epoch": 4.644992526158445, "grad_norm": 2.0240025520324707, "learning_rate": 0.00041958825927435793, "loss": 3.135, "step": 68365 }, { "epoch": 4.645332246229107, "grad_norm": 2.002873659133911, "learning_rate": 0.0004195457942655252, "loss": 3.6171, "step": 68370 }, { "epoch": 4.645671966299769, "grad_norm": 1.967679738998413, "learning_rate": 0.0004195033292566925, "loss": 3.5993, "step": 68375 }, { "epoch": 4.64601168637043, "grad_norm": 1.8560625314712524, "learning_rate": 0.00041946086424785977, "loss": 3.2081, "step": 68380 }, { "epoch": 4.6463514064410925, "grad_norm": 1.6346806287765503, "learning_rate": 0.00041941839923902705, "loss": 3.3367, "step": 68385 }, { "epoch": 4.6466911265117545, "grad_norm": 1.773461937904358, "learning_rate": 0.00041937593423019433, "loss": 3.3079, "step": 68390 }, { "epoch": 4.647030846582416, "grad_norm": 2.353480577468872, "learning_rate": 0.0004193334692213616, "loss": 3.4819, "step": 68395 }, { "epoch": 4.647370566653078, "grad_norm": 1.8492529392242432, "learning_rate": 0.00041929100421252884, "loss": 3.6024, "step": 68400 }, { "epoch": 4.64771028672374, "grad_norm": 1.9163175821304321, "learning_rate": 0.0004192485392036962, "loss": 3.3736, "step": 68405 }, { "epoch": 4.648050006794401, "grad_norm": 1.752747654914856, "learning_rate": 0.00041920607419486345, "loss": 3.2329, "step": 68410 }, { "epoch": 4.648389726865063, "grad_norm": 2.4251413345336914, "learning_rate": 0.0004191636091860307, "loss": 3.4065, "step": 68415 }, { "epoch": 4.648729446935725, "grad_norm": 1.564185380935669, "learning_rate": 0.000419121144177198, "loss": 3.2787, "step": 68420 }, { "epoch": 4.649069167006386, "grad_norm": 1.6799242496490479, "learning_rate": 0.0004190786791683653, "loss": 3.2567, "step": 68425 }, { "epoch": 4.6494088870770485, "grad_norm": 1.7982861995697021, "learning_rate": 0.0004190362141595326, "loss": 3.3753, "step": 68430 }, { "epoch": 4.6497486071477105, "grad_norm": 1.891758680343628, "learning_rate": 0.0004189937491506998, "loss": 3.3807, "step": 68435 }, { "epoch": 4.650088327218372, "grad_norm": 1.5265600681304932, "learning_rate": 0.00041895128414186713, "loss": 3.5879, "step": 68440 }, { "epoch": 4.650428047289034, "grad_norm": 1.614911675453186, "learning_rate": 0.0004189088191330344, "loss": 3.4829, "step": 68445 }, { "epoch": 4.650767767359696, "grad_norm": 2.11139178276062, "learning_rate": 0.00041886635412420164, "loss": 3.4263, "step": 68450 }, { "epoch": 4.651107487430357, "grad_norm": 1.6442548036575317, "learning_rate": 0.000418823889115369, "loss": 3.4294, "step": 68455 }, { "epoch": 4.651447207501019, "grad_norm": 1.5368049144744873, "learning_rate": 0.00041878142410653625, "loss": 3.7165, "step": 68460 }, { "epoch": 4.651786927571681, "grad_norm": 1.4503897428512573, "learning_rate": 0.0004187389590977035, "loss": 3.2882, "step": 68465 }, { "epoch": 4.652126647642342, "grad_norm": 1.7280585765838623, "learning_rate": 0.00041869649408887076, "loss": 3.4091, "step": 68470 }, { "epoch": 4.6524663677130045, "grad_norm": 2.217010021209717, "learning_rate": 0.0004186540290800381, "loss": 3.2008, "step": 68475 }, { "epoch": 4.6528060877836666, "grad_norm": 1.5354924201965332, "learning_rate": 0.0004186115640712053, "loss": 3.0804, "step": 68480 }, { "epoch": 4.653145807854328, "grad_norm": 2.342289924621582, "learning_rate": 0.0004185690990623726, "loss": 3.469, "step": 68485 }, { "epoch": 4.65348552792499, "grad_norm": 1.9310129880905151, "learning_rate": 0.00041852663405353993, "loss": 3.4771, "step": 68490 }, { "epoch": 4.653825247995652, "grad_norm": 1.7865679264068604, "learning_rate": 0.00041848416904470716, "loss": 3.1948, "step": 68495 }, { "epoch": 4.654164968066313, "grad_norm": 1.6722131967544556, "learning_rate": 0.00041844170403587444, "loss": 3.4617, "step": 68500 }, { "epoch": 4.654504688136975, "grad_norm": 1.7873880863189697, "learning_rate": 0.0004183992390270417, "loss": 3.3813, "step": 68505 }, { "epoch": 4.654844408207637, "grad_norm": 1.989492416381836, "learning_rate": 0.000418356774018209, "loss": 3.468, "step": 68510 }, { "epoch": 4.655184128278298, "grad_norm": 1.80553138256073, "learning_rate": 0.0004183143090093763, "loss": 3.2989, "step": 68515 }, { "epoch": 4.6555238483489605, "grad_norm": 1.8695837259292603, "learning_rate": 0.00041827184400054356, "loss": 3.4794, "step": 68520 }, { "epoch": 4.655863568419623, "grad_norm": 1.595505952835083, "learning_rate": 0.00041822937899171084, "loss": 3.342, "step": 68525 }, { "epoch": 4.656203288490284, "grad_norm": 2.083784818649292, "learning_rate": 0.0004181869139828781, "loss": 3.4252, "step": 68530 }, { "epoch": 4.656543008560946, "grad_norm": 1.9454162120819092, "learning_rate": 0.0004181444489740454, "loss": 3.5024, "step": 68535 }, { "epoch": 4.656882728631608, "grad_norm": 2.069272518157959, "learning_rate": 0.0004181019839652126, "loss": 3.488, "step": 68540 }, { "epoch": 4.657222448702269, "grad_norm": 1.941497802734375, "learning_rate": 0.00041805951895637996, "loss": 3.4704, "step": 68545 }, { "epoch": 4.657562168772931, "grad_norm": 1.9245812892913818, "learning_rate": 0.00041801705394754724, "loss": 3.5678, "step": 68550 }, { "epoch": 4.657901888843593, "grad_norm": 1.7060432434082031, "learning_rate": 0.00041797458893871447, "loss": 3.1693, "step": 68555 }, { "epoch": 4.658241608914254, "grad_norm": 1.9924858808517456, "learning_rate": 0.0004179321239298818, "loss": 3.3226, "step": 68560 }, { "epoch": 4.6585813289849165, "grad_norm": 1.7136892080307007, "learning_rate": 0.0004178896589210491, "loss": 3.5166, "step": 68565 }, { "epoch": 4.658921049055579, "grad_norm": 2.334174871444702, "learning_rate": 0.0004178471939122163, "loss": 3.6021, "step": 68570 }, { "epoch": 4.65926076912624, "grad_norm": 1.8535794019699097, "learning_rate": 0.0004178047289033836, "loss": 3.3025, "step": 68575 }, { "epoch": 4.659600489196902, "grad_norm": 1.6828486919403076, "learning_rate": 0.0004177622638945509, "loss": 3.4897, "step": 68580 }, { "epoch": 4.659940209267564, "grad_norm": 1.8210846185684204, "learning_rate": 0.00041771979888571815, "loss": 3.5279, "step": 68585 }, { "epoch": 4.660279929338225, "grad_norm": 1.747755527496338, "learning_rate": 0.00041767733387688543, "loss": 3.3946, "step": 68590 }, { "epoch": 4.660619649408887, "grad_norm": 1.8492964506149292, "learning_rate": 0.00041763486886805276, "loss": 3.4327, "step": 68595 }, { "epoch": 4.660959369479549, "grad_norm": 1.5099107027053833, "learning_rate": 0.00041759240385922004, "loss": 3.0366, "step": 68600 }, { "epoch": 4.66129908955021, "grad_norm": 1.6670016050338745, "learning_rate": 0.00041754993885038727, "loss": 3.403, "step": 68605 }, { "epoch": 4.6616388096208725, "grad_norm": 1.845618486404419, "learning_rate": 0.00041750747384155455, "loss": 3.4393, "step": 68610 }, { "epoch": 4.661978529691535, "grad_norm": 1.929569125175476, "learning_rate": 0.0004174650088327219, "loss": 3.3813, "step": 68615 }, { "epoch": 4.662318249762196, "grad_norm": 2.1990835666656494, "learning_rate": 0.0004174225438238891, "loss": 3.3099, "step": 68620 }, { "epoch": 4.662657969832858, "grad_norm": 1.6650725603103638, "learning_rate": 0.0004173800788150564, "loss": 3.3113, "step": 68625 }, { "epoch": 4.66299768990352, "grad_norm": 1.808411717414856, "learning_rate": 0.0004173376138062237, "loss": 3.5768, "step": 68630 }, { "epoch": 4.663337409974181, "grad_norm": 2.3163869380950928, "learning_rate": 0.00041729514879739095, "loss": 3.4284, "step": 68635 }, { "epoch": 4.663677130044843, "grad_norm": 2.0155344009399414, "learning_rate": 0.00041725268378855823, "loss": 3.2916, "step": 68640 }, { "epoch": 4.664016850115505, "grad_norm": 1.590402364730835, "learning_rate": 0.00041721021877972556, "loss": 3.3064, "step": 68645 }, { "epoch": 4.6643565701861665, "grad_norm": 2.152702569961548, "learning_rate": 0.0004171677537708928, "loss": 3.2782, "step": 68650 }, { "epoch": 4.6646962902568285, "grad_norm": 1.5619187355041504, "learning_rate": 0.00041712528876206007, "loss": 3.3712, "step": 68655 }, { "epoch": 4.665036010327491, "grad_norm": 1.543796181678772, "learning_rate": 0.00041708282375322735, "loss": 3.4322, "step": 68660 }, { "epoch": 4.665375730398152, "grad_norm": 1.7094979286193848, "learning_rate": 0.00041704035874439463, "loss": 3.3363, "step": 68665 }, { "epoch": 4.665715450468814, "grad_norm": 1.7332170009613037, "learning_rate": 0.0004169978937355619, "loss": 3.5346, "step": 68670 }, { "epoch": 4.666055170539476, "grad_norm": 1.7566858530044556, "learning_rate": 0.0004169554287267292, "loss": 3.5117, "step": 68675 }, { "epoch": 4.666394890610137, "grad_norm": 1.9468977451324463, "learning_rate": 0.00041691296371789647, "loss": 3.1299, "step": 68680 }, { "epoch": 4.666734610680799, "grad_norm": 2.148979425430298, "learning_rate": 0.00041687049870906375, "loss": 3.4605, "step": 68685 }, { "epoch": 4.667074330751461, "grad_norm": 2.367863416671753, "learning_rate": 0.00041682803370023103, "loss": 3.5385, "step": 68690 }, { "epoch": 4.6674140508221225, "grad_norm": 1.5618083477020264, "learning_rate": 0.00041678556869139825, "loss": 3.2779, "step": 68695 }, { "epoch": 4.6677537708927845, "grad_norm": 1.9240707159042358, "learning_rate": 0.0004167431036825656, "loss": 3.5033, "step": 68700 }, { "epoch": 4.668093490963447, "grad_norm": 1.7483505010604858, "learning_rate": 0.00041670063867373287, "loss": 3.3932, "step": 68705 }, { "epoch": 4.668433211034108, "grad_norm": 1.6362968683242798, "learning_rate": 0.0004166581736649001, "loss": 3.1551, "step": 68710 }, { "epoch": 4.66877293110477, "grad_norm": 1.7126224040985107, "learning_rate": 0.00041661570865606743, "loss": 3.3654, "step": 68715 }, { "epoch": 4.669112651175431, "grad_norm": 2.4084935188293457, "learning_rate": 0.0004165732436472347, "loss": 3.5524, "step": 68720 }, { "epoch": 4.669452371246093, "grad_norm": 2.057544231414795, "learning_rate": 0.00041653077863840194, "loss": 3.4653, "step": 68725 }, { "epoch": 4.669792091316755, "grad_norm": 2.082726001739502, "learning_rate": 0.0004164883136295692, "loss": 3.3242, "step": 68730 }, { "epoch": 4.670131811387416, "grad_norm": 2.095712184906006, "learning_rate": 0.00041644584862073655, "loss": 3.3914, "step": 68735 }, { "epoch": 4.6704715314580785, "grad_norm": 1.7207167148590088, "learning_rate": 0.0004164033836119038, "loss": 3.3668, "step": 68740 }, { "epoch": 4.6708112515287405, "grad_norm": 2.2278590202331543, "learning_rate": 0.00041636091860307106, "loss": 3.2837, "step": 68745 }, { "epoch": 4.671150971599402, "grad_norm": 2.3462412357330322, "learning_rate": 0.0004163184535942384, "loss": 3.4406, "step": 68750 }, { "epoch": 4.671490691670064, "grad_norm": 2.185497999191284, "learning_rate": 0.0004162759885854056, "loss": 3.3671, "step": 68755 }, { "epoch": 4.671830411740726, "grad_norm": 1.9871935844421387, "learning_rate": 0.0004162335235765729, "loss": 3.4901, "step": 68760 }, { "epoch": 4.672170131811387, "grad_norm": 1.5981131792068481, "learning_rate": 0.0004161910585677402, "loss": 3.4793, "step": 68765 }, { "epoch": 4.672509851882049, "grad_norm": 2.2023777961730957, "learning_rate": 0.0004161485935589075, "loss": 3.3529, "step": 68770 }, { "epoch": 4.672849571952711, "grad_norm": 1.3509578704833984, "learning_rate": 0.00041610612855007474, "loss": 3.5222, "step": 68775 }, { "epoch": 4.673189292023372, "grad_norm": 1.5812708139419556, "learning_rate": 0.000416063663541242, "loss": 3.5668, "step": 68780 }, { "epoch": 4.6735290120940345, "grad_norm": 1.617669939994812, "learning_rate": 0.00041602119853240935, "loss": 3.4604, "step": 68785 }, { "epoch": 4.673868732164697, "grad_norm": 1.5283282995224, "learning_rate": 0.0004159787335235766, "loss": 3.2209, "step": 68790 }, { "epoch": 4.674208452235358, "grad_norm": 1.78382408618927, "learning_rate": 0.00041593626851474386, "loss": 3.3694, "step": 68795 }, { "epoch": 4.67454817230602, "grad_norm": 1.666237473487854, "learning_rate": 0.00041589380350591114, "loss": 3.3305, "step": 68800 }, { "epoch": 4.674887892376682, "grad_norm": 1.7745698690414429, "learning_rate": 0.00041585983149884496, "loss": 3.1854, "step": 68805 }, { "epoch": 4.675227612447343, "grad_norm": 1.650184154510498, "learning_rate": 0.00041581736649001224, "loss": 3.5711, "step": 68810 }, { "epoch": 4.675567332518005, "grad_norm": 1.598429799079895, "learning_rate": 0.0004157749014811795, "loss": 3.4976, "step": 68815 }, { "epoch": 4.675907052588667, "grad_norm": 1.7223007678985596, "learning_rate": 0.0004157324364723468, "loss": 3.3901, "step": 68820 }, { "epoch": 4.676246772659328, "grad_norm": 1.8972283601760864, "learning_rate": 0.0004156899714635141, "loss": 3.3168, "step": 68825 }, { "epoch": 4.6765864927299905, "grad_norm": 1.6602290868759155, "learning_rate": 0.00041564750645468136, "loss": 3.2372, "step": 68830 }, { "epoch": 4.676926212800653, "grad_norm": 1.847217321395874, "learning_rate": 0.00041560504144584864, "loss": 3.3312, "step": 68835 }, { "epoch": 4.677265932871314, "grad_norm": 2.0968940258026123, "learning_rate": 0.00041556257643701587, "loss": 3.1319, "step": 68840 }, { "epoch": 4.677605652941976, "grad_norm": 2.054525136947632, "learning_rate": 0.0004155201114281832, "loss": 3.4352, "step": 68845 }, { "epoch": 4.677945373012638, "grad_norm": 1.68955659866333, "learning_rate": 0.0004154776464193505, "loss": 3.2906, "step": 68850 }, { "epoch": 4.678285093083299, "grad_norm": 1.5667595863342285, "learning_rate": 0.0004154351814105177, "loss": 3.3692, "step": 68855 }, { "epoch": 4.678624813153961, "grad_norm": 2.3003454208374023, "learning_rate": 0.00041539271640168504, "loss": 3.3287, "step": 68860 }, { "epoch": 4.678964533224623, "grad_norm": 2.2043118476867676, "learning_rate": 0.0004153502513928523, "loss": 3.1586, "step": 68865 }, { "epoch": 4.679304253295284, "grad_norm": 1.8167051076889038, "learning_rate": 0.00041530778638401955, "loss": 3.208, "step": 68870 }, { "epoch": 4.6796439733659465, "grad_norm": 2.0065670013427734, "learning_rate": 0.0004152653213751869, "loss": 3.4538, "step": 68875 }, { "epoch": 4.679983693436609, "grad_norm": 1.6033940315246582, "learning_rate": 0.00041522285636635416, "loss": 3.2566, "step": 68880 }, { "epoch": 4.68032341350727, "grad_norm": 2.0916593074798584, "learning_rate": 0.0004151803913575214, "loss": 3.3367, "step": 68885 }, { "epoch": 4.680663133577932, "grad_norm": 1.9668333530426025, "learning_rate": 0.00041513792634868867, "loss": 3.4359, "step": 68890 }, { "epoch": 4.681002853648594, "grad_norm": 1.905600905418396, "learning_rate": 0.000415095461339856, "loss": 3.3267, "step": 68895 }, { "epoch": 4.681342573719255, "grad_norm": 2.343017339706421, "learning_rate": 0.00041505299633102323, "loss": 3.3539, "step": 68900 }, { "epoch": 4.681682293789917, "grad_norm": 1.92930269241333, "learning_rate": 0.0004150105313221905, "loss": 3.4504, "step": 68905 }, { "epoch": 4.682022013860579, "grad_norm": 1.7853039503097534, "learning_rate": 0.00041496806631335784, "loss": 3.3129, "step": 68910 }, { "epoch": 4.68236173393124, "grad_norm": 2.106729507446289, "learning_rate": 0.00041492560130452507, "loss": 3.2873, "step": 68915 }, { "epoch": 4.6827014540019025, "grad_norm": 1.836050033569336, "learning_rate": 0.00041488313629569235, "loss": 3.6007, "step": 68920 }, { "epoch": 4.683041174072565, "grad_norm": 1.957862138748169, "learning_rate": 0.00041484067128685963, "loss": 3.4734, "step": 68925 }, { "epoch": 4.683380894143226, "grad_norm": 1.7848848104476929, "learning_rate": 0.0004147982062780269, "loss": 3.1819, "step": 68930 }, { "epoch": 4.683720614213888, "grad_norm": 1.8438010215759277, "learning_rate": 0.0004147557412691942, "loss": 3.0032, "step": 68935 }, { "epoch": 4.684060334284549, "grad_norm": 1.6110440492630005, "learning_rate": 0.00041471327626036147, "loss": 3.5148, "step": 68940 }, { "epoch": 4.684400054355211, "grad_norm": 1.778818130493164, "learning_rate": 0.00041467081125152875, "loss": 3.2294, "step": 68945 }, { "epoch": 4.684739774425873, "grad_norm": 1.8472046852111816, "learning_rate": 0.00041462834624269603, "loss": 3.352, "step": 68950 }, { "epoch": 4.685079494496534, "grad_norm": 2.2252378463745117, "learning_rate": 0.0004145858812338633, "loss": 3.4103, "step": 68955 }, { "epoch": 4.6854192145671965, "grad_norm": 1.94815194606781, "learning_rate": 0.00041454341622503053, "loss": 3.2972, "step": 68960 }, { "epoch": 4.6857589346378585, "grad_norm": 1.8557243347167969, "learning_rate": 0.00041450095121619787, "loss": 3.3782, "step": 68965 }, { "epoch": 4.68609865470852, "grad_norm": 1.7368355989456177, "learning_rate": 0.00041445848620736515, "loss": 3.1916, "step": 68970 }, { "epoch": 4.686438374779182, "grad_norm": 2.047997236251831, "learning_rate": 0.00041441602119853243, "loss": 3.4225, "step": 68975 }, { "epoch": 4.686778094849844, "grad_norm": 1.414310336112976, "learning_rate": 0.0004143735561896997, "loss": 3.5435, "step": 68980 }, { "epoch": 4.687117814920505, "grad_norm": 1.5623890161514282, "learning_rate": 0.000414331091180867, "loss": 3.0923, "step": 68985 }, { "epoch": 4.687457534991167, "grad_norm": 1.7098056077957153, "learning_rate": 0.00041428862617203427, "loss": 3.394, "step": 68990 }, { "epoch": 4.687797255061829, "grad_norm": 1.9535499811172485, "learning_rate": 0.0004142461611632015, "loss": 3.6342, "step": 68995 }, { "epoch": 4.68813697513249, "grad_norm": 2.279587984085083, "learning_rate": 0.00041420369615436883, "loss": 3.2649, "step": 69000 }, { "epoch": 4.6884766952031525, "grad_norm": 1.831976056098938, "learning_rate": 0.0004141612311455361, "loss": 3.6055, "step": 69005 }, { "epoch": 4.6888164152738145, "grad_norm": 1.8495012521743774, "learning_rate": 0.00041411876613670334, "loss": 3.5017, "step": 69010 }, { "epoch": 4.689156135344476, "grad_norm": 1.6009730100631714, "learning_rate": 0.00041407630112787067, "loss": 3.6452, "step": 69015 }, { "epoch": 4.689495855415138, "grad_norm": 2.2363574504852295, "learning_rate": 0.00041403383611903795, "loss": 3.4677, "step": 69020 }, { "epoch": 4.6898355754858, "grad_norm": 2.294581651687622, "learning_rate": 0.0004139913711102052, "loss": 3.5391, "step": 69025 }, { "epoch": 4.690175295556461, "grad_norm": 1.6415718793869019, "learning_rate": 0.00041394890610137246, "loss": 3.6909, "step": 69030 }, { "epoch": 4.690515015627123, "grad_norm": 1.463250756263733, "learning_rate": 0.0004139064410925398, "loss": 3.25, "step": 69035 }, { "epoch": 4.690854735697785, "grad_norm": 1.6948063373565674, "learning_rate": 0.000413863976083707, "loss": 3.1573, "step": 69040 }, { "epoch": 4.691194455768446, "grad_norm": 1.9006787538528442, "learning_rate": 0.0004138215110748743, "loss": 3.3583, "step": 69045 }, { "epoch": 4.6915341758391085, "grad_norm": 2.815253496170044, "learning_rate": 0.00041377904606604163, "loss": 3.4263, "step": 69050 }, { "epoch": 4.6918738959097706, "grad_norm": 2.051082134246826, "learning_rate": 0.00041373658105720886, "loss": 3.508, "step": 69055 }, { "epoch": 4.692213615980432, "grad_norm": 2.2257773876190186, "learning_rate": 0.00041369411604837614, "loss": 3.4843, "step": 69060 }, { "epoch": 4.692553336051094, "grad_norm": 1.63536536693573, "learning_rate": 0.0004136516510395434, "loss": 3.3622, "step": 69065 }, { "epoch": 4.692893056121756, "grad_norm": 1.8294527530670166, "learning_rate": 0.0004136091860307107, "loss": 3.4814, "step": 69070 }, { "epoch": 4.693232776192417, "grad_norm": 1.964123010635376, "learning_rate": 0.000413566721021878, "loss": 3.7005, "step": 69075 }, { "epoch": 4.693572496263079, "grad_norm": 1.6497377157211304, "learning_rate": 0.00041352425601304526, "loss": 3.4612, "step": 69080 }, { "epoch": 4.693912216333741, "grad_norm": 1.8151730298995972, "learning_rate": 0.00041348179100421254, "loss": 3.5963, "step": 69085 }, { "epoch": 4.694251936404402, "grad_norm": 2.051933526992798, "learning_rate": 0.0004134393259953798, "loss": 3.1153, "step": 69090 }, { "epoch": 4.6945916564750645, "grad_norm": 1.6152883768081665, "learning_rate": 0.0004133968609865471, "loss": 3.5633, "step": 69095 }, { "epoch": 4.694931376545727, "grad_norm": 1.776041865348816, "learning_rate": 0.0004133543959777143, "loss": 3.4997, "step": 69100 }, { "epoch": 4.695271096616388, "grad_norm": 1.6456876993179321, "learning_rate": 0.00041331193096888166, "loss": 3.4614, "step": 69105 }, { "epoch": 4.69561081668705, "grad_norm": 1.8102326393127441, "learning_rate": 0.00041326946596004894, "loss": 3.3703, "step": 69110 }, { "epoch": 4.695950536757712, "grad_norm": 1.9507838487625122, "learning_rate": 0.00041322700095121616, "loss": 3.4692, "step": 69115 }, { "epoch": 4.696290256828373, "grad_norm": 1.5315790176391602, "learning_rate": 0.0004131845359423835, "loss": 3.6302, "step": 69120 }, { "epoch": 4.696629976899035, "grad_norm": 1.8579208850860596, "learning_rate": 0.0004131420709335508, "loss": 3.3963, "step": 69125 }, { "epoch": 4.696969696969697, "grad_norm": 2.420931339263916, "learning_rate": 0.000413099605924718, "loss": 3.1474, "step": 69130 }, { "epoch": 4.697309417040358, "grad_norm": 1.457452654838562, "learning_rate": 0.0004130571409158853, "loss": 3.4223, "step": 69135 }, { "epoch": 4.6976491371110205, "grad_norm": 1.682785153388977, "learning_rate": 0.0004130146759070526, "loss": 3.274, "step": 69140 }, { "epoch": 4.697988857181683, "grad_norm": 1.9290789365768433, "learning_rate": 0.0004129722108982199, "loss": 3.2791, "step": 69145 }, { "epoch": 4.698328577252344, "grad_norm": 1.7186654806137085, "learning_rate": 0.0004129297458893871, "loss": 3.5819, "step": 69150 }, { "epoch": 4.698668297323006, "grad_norm": 2.0521039962768555, "learning_rate": 0.00041288728088055446, "loss": 3.4776, "step": 69155 }, { "epoch": 4.699008017393668, "grad_norm": 1.4899585247039795, "learning_rate": 0.00041284481587172174, "loss": 3.438, "step": 69160 }, { "epoch": 4.699347737464329, "grad_norm": 1.9241822957992554, "learning_rate": 0.00041280235086288896, "loss": 3.2742, "step": 69165 }, { "epoch": 4.699687457534991, "grad_norm": 1.7559031248092651, "learning_rate": 0.0004127598858540563, "loss": 3.3317, "step": 69170 }, { "epoch": 4.700027177605653, "grad_norm": 1.743762493133545, "learning_rate": 0.0004127174208452236, "loss": 3.2462, "step": 69175 }, { "epoch": 4.700366897676314, "grad_norm": 1.758573055267334, "learning_rate": 0.0004126749558363908, "loss": 3.2693, "step": 69180 }, { "epoch": 4.7007066177469765, "grad_norm": 1.783523678779602, "learning_rate": 0.0004126324908275581, "loss": 3.2534, "step": 69185 }, { "epoch": 4.701046337817639, "grad_norm": 2.3286454677581787, "learning_rate": 0.0004125900258187254, "loss": 3.0247, "step": 69190 }, { "epoch": 4.7013860578883, "grad_norm": 1.5746108293533325, "learning_rate": 0.00041254756080989264, "loss": 3.601, "step": 69195 }, { "epoch": 4.701725777958962, "grad_norm": 1.8437395095825195, "learning_rate": 0.0004125050958010599, "loss": 3.498, "step": 69200 }, { "epoch": 4.702065498029624, "grad_norm": 2.043131113052368, "learning_rate": 0.00041246263079222726, "loss": 3.4618, "step": 69205 }, { "epoch": 4.702405218100285, "grad_norm": 1.6364423036575317, "learning_rate": 0.0004124201657833945, "loss": 3.3659, "step": 69210 }, { "epoch": 4.702744938170947, "grad_norm": 2.658245801925659, "learning_rate": 0.00041237770077456176, "loss": 3.3997, "step": 69215 }, { "epoch": 4.703084658241609, "grad_norm": 2.3597412109375, "learning_rate": 0.00041233523576572904, "loss": 3.5269, "step": 69220 }, { "epoch": 4.7034243783122704, "grad_norm": 2.10456919670105, "learning_rate": 0.0004122927707568963, "loss": 3.4752, "step": 69225 }, { "epoch": 4.7037640983829325, "grad_norm": 1.6193040609359741, "learning_rate": 0.0004122503057480636, "loss": 3.482, "step": 69230 }, { "epoch": 4.704103818453595, "grad_norm": 1.8707916736602783, "learning_rate": 0.0004122078407392309, "loss": 3.3727, "step": 69235 }, { "epoch": 4.704443538524256, "grad_norm": 1.8020447492599487, "learning_rate": 0.00041216537573039816, "loss": 3.3728, "step": 69240 }, { "epoch": 4.704783258594918, "grad_norm": 1.9869595766067505, "learning_rate": 0.00041212291072156544, "loss": 3.5605, "step": 69245 }, { "epoch": 4.70512297866558, "grad_norm": 1.7418726682662964, "learning_rate": 0.0004120804457127327, "loss": 3.3577, "step": 69250 }, { "epoch": 4.705462698736241, "grad_norm": 1.9024364948272705, "learning_rate": 0.00041203798070389995, "loss": 3.4503, "step": 69255 }, { "epoch": 4.705802418806903, "grad_norm": 2.3310132026672363, "learning_rate": 0.0004119955156950673, "loss": 3.4907, "step": 69260 }, { "epoch": 4.706142138877565, "grad_norm": 1.9031482934951782, "learning_rate": 0.00041195305068623457, "loss": 3.2515, "step": 69265 }, { "epoch": 4.7064818589482265, "grad_norm": 1.4299376010894775, "learning_rate": 0.0004119105856774018, "loss": 3.4973, "step": 69270 }, { "epoch": 4.7068215790188885, "grad_norm": 1.901286005973816, "learning_rate": 0.0004118681206685691, "loss": 3.2886, "step": 69275 }, { "epoch": 4.707161299089551, "grad_norm": 1.6656187772750854, "learning_rate": 0.0004118256556597364, "loss": 3.1957, "step": 69280 }, { "epoch": 4.707501019160212, "grad_norm": 2.103726863861084, "learning_rate": 0.00041178319065090363, "loss": 3.2881, "step": 69285 }, { "epoch": 4.707840739230874, "grad_norm": 2.046396493911743, "learning_rate": 0.0004117407256420709, "loss": 3.1999, "step": 69290 }, { "epoch": 4.708180459301536, "grad_norm": 2.2051663398742676, "learning_rate": 0.00041169826063323825, "loss": 3.3111, "step": 69295 }, { "epoch": 4.708520179372197, "grad_norm": 2.007951021194458, "learning_rate": 0.00041165579562440547, "loss": 3.5098, "step": 69300 }, { "epoch": 4.708859899442859, "grad_norm": 1.6904029846191406, "learning_rate": 0.00041161333061557275, "loss": 3.5693, "step": 69305 }, { "epoch": 4.709199619513521, "grad_norm": 2.0424575805664062, "learning_rate": 0.0004115708656067401, "loss": 3.4093, "step": 69310 }, { "epoch": 4.7095393395841825, "grad_norm": 1.3787857294082642, "learning_rate": 0.00041152840059790737, "loss": 3.4171, "step": 69315 }, { "epoch": 4.7098790596548445, "grad_norm": 2.077287435531616, "learning_rate": 0.0004114859355890746, "loss": 3.3914, "step": 69320 }, { "epoch": 4.710218779725507, "grad_norm": 1.4835717678070068, "learning_rate": 0.00041144347058024187, "loss": 3.357, "step": 69325 }, { "epoch": 4.710558499796168, "grad_norm": 2.0761969089508057, "learning_rate": 0.0004114010055714092, "loss": 3.3456, "step": 69330 }, { "epoch": 4.71089821986683, "grad_norm": 2.302734375, "learning_rate": 0.00041135854056257643, "loss": 3.239, "step": 69335 }, { "epoch": 4.711237939937492, "grad_norm": 1.6263446807861328, "learning_rate": 0.0004113160755537437, "loss": 3.7164, "step": 69340 }, { "epoch": 4.711577660008153, "grad_norm": 2.1711814403533936, "learning_rate": 0.00041127361054491105, "loss": 3.5744, "step": 69345 }, { "epoch": 4.711917380078815, "grad_norm": 2.039222240447998, "learning_rate": 0.00041123114553607827, "loss": 3.1162, "step": 69350 }, { "epoch": 4.712257100149477, "grad_norm": 1.84600031375885, "learning_rate": 0.00041118868052724555, "loss": 3.2381, "step": 69355 }, { "epoch": 4.7125968202201385, "grad_norm": 1.4473155736923218, "learning_rate": 0.00041114621551841283, "loss": 3.3439, "step": 69360 }, { "epoch": 4.712936540290801, "grad_norm": 1.3667057752609253, "learning_rate": 0.0004111037505095801, "loss": 3.3452, "step": 69365 }, { "epoch": 4.713276260361463, "grad_norm": 1.9085266590118408, "learning_rate": 0.0004110612855007474, "loss": 3.3918, "step": 69370 }, { "epoch": 4.713615980432124, "grad_norm": 2.3679778575897217, "learning_rate": 0.00041101882049191467, "loss": 3.3684, "step": 69375 }, { "epoch": 4.713955700502786, "grad_norm": 1.5798060894012451, "learning_rate": 0.00041097635548308195, "loss": 3.3723, "step": 69380 }, { "epoch": 4.714295420573448, "grad_norm": 1.6402431726455688, "learning_rate": 0.00041093389047424923, "loss": 3.2181, "step": 69385 }, { "epoch": 4.714635140644109, "grad_norm": 1.6921836137771606, "learning_rate": 0.0004108914254654165, "loss": 3.1149, "step": 69390 }, { "epoch": 4.714974860714771, "grad_norm": 1.9426894187927246, "learning_rate": 0.00041084896045658374, "loss": 3.5319, "step": 69395 }, { "epoch": 4.715314580785432, "grad_norm": 1.6908318996429443, "learning_rate": 0.0004108064954477511, "loss": 3.3419, "step": 69400 }, { "epoch": 4.7156543008560945, "grad_norm": 2.363269329071045, "learning_rate": 0.00041076403043891835, "loss": 3.5244, "step": 69405 }, { "epoch": 4.715994020926757, "grad_norm": 2.073765277862549, "learning_rate": 0.0004107215654300856, "loss": 3.4248, "step": 69410 }, { "epoch": 4.716333740997418, "grad_norm": 1.9019970893859863, "learning_rate": 0.0004106791004212529, "loss": 3.2546, "step": 69415 }, { "epoch": 4.71667346106808, "grad_norm": 3.0158135890960693, "learning_rate": 0.0004106366354124202, "loss": 3.4279, "step": 69420 }, { "epoch": 4.717013181138742, "grad_norm": 1.5459511280059814, "learning_rate": 0.0004105941704035874, "loss": 3.4782, "step": 69425 }, { "epoch": 4.717352901209403, "grad_norm": 1.5026739835739136, "learning_rate": 0.00041055170539475475, "loss": 3.337, "step": 69430 }, { "epoch": 4.717692621280065, "grad_norm": 1.5157088041305542, "learning_rate": 0.00041050924038592203, "loss": 3.3747, "step": 69435 }, { "epoch": 4.718032341350727, "grad_norm": 1.7242134809494019, "learning_rate": 0.00041046677537708926, "loss": 3.1092, "step": 69440 }, { "epoch": 4.718372061421388, "grad_norm": 2.2528162002563477, "learning_rate": 0.00041042431036825654, "loss": 3.4578, "step": 69445 }, { "epoch": 4.7187117814920505, "grad_norm": 1.6989340782165527, "learning_rate": 0.0004103818453594239, "loss": 3.4779, "step": 69450 }, { "epoch": 4.719051501562713, "grad_norm": 2.105489492416382, "learning_rate": 0.0004103393803505911, "loss": 2.9768, "step": 69455 }, { "epoch": 4.719391221633374, "grad_norm": 2.437474012374878, "learning_rate": 0.0004102969153417584, "loss": 3.3048, "step": 69460 }, { "epoch": 4.719730941704036, "grad_norm": 2.0718600749969482, "learning_rate": 0.0004102544503329257, "loss": 3.3952, "step": 69465 }, { "epoch": 4.720070661774698, "grad_norm": 1.9895085096359253, "learning_rate": 0.00041021198532409294, "loss": 3.456, "step": 69470 }, { "epoch": 4.720410381845359, "grad_norm": 1.9531688690185547, "learning_rate": 0.0004101695203152602, "loss": 3.2797, "step": 69475 }, { "epoch": 4.720750101916021, "grad_norm": 1.5387303829193115, "learning_rate": 0.0004101270553064275, "loss": 3.3392, "step": 69480 }, { "epoch": 4.721089821986683, "grad_norm": 2.1231679916381836, "learning_rate": 0.00041008459029759483, "loss": 3.1116, "step": 69485 }, { "epoch": 4.721429542057344, "grad_norm": 1.5785044431686401, "learning_rate": 0.00041004212528876206, "loss": 3.2295, "step": 69490 }, { "epoch": 4.7217692621280065, "grad_norm": 1.7312052249908447, "learning_rate": 0.00040999966027992934, "loss": 3.4508, "step": 69495 }, { "epoch": 4.722108982198669, "grad_norm": 1.8791983127593994, "learning_rate": 0.0004099571952710967, "loss": 3.4536, "step": 69500 }, { "epoch": 4.72244870226933, "grad_norm": 1.530639410018921, "learning_rate": 0.0004099147302622639, "loss": 3.509, "step": 69505 }, { "epoch": 4.722788422339992, "grad_norm": 1.915970802307129, "learning_rate": 0.0004098722652534312, "loss": 3.5077, "step": 69510 }, { "epoch": 4.723128142410654, "grad_norm": 2.2748026847839355, "learning_rate": 0.00040982980024459846, "loss": 3.4056, "step": 69515 }, { "epoch": 4.723467862481315, "grad_norm": 1.9730260372161865, "learning_rate": 0.00040978733523576574, "loss": 3.33, "step": 69520 }, { "epoch": 4.723807582551977, "grad_norm": 1.5118979215621948, "learning_rate": 0.000409744870226933, "loss": 3.2672, "step": 69525 }, { "epoch": 4.724147302622639, "grad_norm": 2.13442325592041, "learning_rate": 0.0004097024052181003, "loss": 3.4256, "step": 69530 }, { "epoch": 4.7244870226933005, "grad_norm": 1.950814962387085, "learning_rate": 0.0004096599402092676, "loss": 3.2158, "step": 69535 }, { "epoch": 4.7248267427639625, "grad_norm": 1.5229018926620483, "learning_rate": 0.00040961747520043486, "loss": 3.1683, "step": 69540 }, { "epoch": 4.725166462834625, "grad_norm": 2.176461935043335, "learning_rate": 0.00040957501019160214, "loss": 3.4631, "step": 69545 }, { "epoch": 4.725506182905286, "grad_norm": 1.4216055870056152, "learning_rate": 0.00040953254518276937, "loss": 3.6752, "step": 69550 }, { "epoch": 4.725845902975948, "grad_norm": 1.4999661445617676, "learning_rate": 0.0004094900801739367, "loss": 3.1983, "step": 69555 }, { "epoch": 4.72618562304661, "grad_norm": 1.8018425703048706, "learning_rate": 0.000409447615165104, "loss": 3.1492, "step": 69560 }, { "epoch": 4.726525343117271, "grad_norm": 2.177997350692749, "learning_rate": 0.0004094051501562712, "loss": 3.4787, "step": 69565 }, { "epoch": 4.726865063187933, "grad_norm": 1.838706374168396, "learning_rate": 0.00040936268514743854, "loss": 3.323, "step": 69570 }, { "epoch": 4.727204783258595, "grad_norm": 1.6564315557479858, "learning_rate": 0.0004093202201386058, "loss": 3.2003, "step": 69575 }, { "epoch": 4.7275445033292565, "grad_norm": 1.9947818517684937, "learning_rate": 0.00040927775512977305, "loss": 3.401, "step": 69580 }, { "epoch": 4.7278842233999185, "grad_norm": 2.1436116695404053, "learning_rate": 0.00040923529012094033, "loss": 3.3764, "step": 69585 }, { "epoch": 4.728223943470581, "grad_norm": 1.880092740058899, "learning_rate": 0.00040919282511210766, "loss": 3.3966, "step": 69590 }, { "epoch": 4.728563663541242, "grad_norm": 2.283696174621582, "learning_rate": 0.0004091503601032749, "loss": 3.3541, "step": 69595 }, { "epoch": 4.728903383611904, "grad_norm": 1.755857229232788, "learning_rate": 0.00040910789509444217, "loss": 3.3429, "step": 69600 }, { "epoch": 4.729243103682566, "grad_norm": 1.6513981819152832, "learning_rate": 0.0004090654300856095, "loss": 3.1896, "step": 69605 }, { "epoch": 4.729582823753227, "grad_norm": 1.7121281623840332, "learning_rate": 0.00040902296507677673, "loss": 3.4856, "step": 69610 }, { "epoch": 4.729922543823889, "grad_norm": 2.045811891555786, "learning_rate": 0.000408980500067944, "loss": 3.2646, "step": 69615 }, { "epoch": 4.73026226389455, "grad_norm": 1.693510890007019, "learning_rate": 0.0004089380350591113, "loss": 3.4193, "step": 69620 }, { "epoch": 4.7306019839652125, "grad_norm": 1.4734690189361572, "learning_rate": 0.00040889557005027857, "loss": 3.2709, "step": 69625 }, { "epoch": 4.7309417040358746, "grad_norm": 1.6873708963394165, "learning_rate": 0.00040885310504144585, "loss": 3.4827, "step": 69630 }, { "epoch": 4.731281424106536, "grad_norm": 1.7977535724639893, "learning_rate": 0.00040881064003261313, "loss": 3.1921, "step": 69635 }, { "epoch": 4.731621144177198, "grad_norm": 2.01056170463562, "learning_rate": 0.0004087681750237804, "loss": 3.4483, "step": 69640 }, { "epoch": 4.73196086424786, "grad_norm": 1.720325231552124, "learning_rate": 0.0004087257100149477, "loss": 3.3096, "step": 69645 }, { "epoch": 4.732300584318521, "grad_norm": 1.5993692874908447, "learning_rate": 0.00040868324500611497, "loss": 3.2346, "step": 69650 }, { "epoch": 4.732640304389183, "grad_norm": 1.8914737701416016, "learning_rate": 0.00040864077999728225, "loss": 3.3616, "step": 69655 }, { "epoch": 4.732980024459845, "grad_norm": 1.7727282047271729, "learning_rate": 0.00040859831498844953, "loss": 3.3652, "step": 69660 }, { "epoch": 4.733319744530506, "grad_norm": 1.7006407976150513, "learning_rate": 0.0004085558499796168, "loss": 3.0029, "step": 69665 }, { "epoch": 4.7336594646011685, "grad_norm": 1.6759370565414429, "learning_rate": 0.0004085133849707841, "loss": 3.4589, "step": 69670 }, { "epoch": 4.733999184671831, "grad_norm": 2.1098885536193848, "learning_rate": 0.00040847091996195137, "loss": 3.5128, "step": 69675 }, { "epoch": 4.734338904742492, "grad_norm": 1.610954999923706, "learning_rate": 0.00040842845495311865, "loss": 3.3922, "step": 69680 }, { "epoch": 4.734678624813154, "grad_norm": 1.6208577156066895, "learning_rate": 0.00040838598994428593, "loss": 3.3436, "step": 69685 }, { "epoch": 4.735018344883816, "grad_norm": 1.6815295219421387, "learning_rate": 0.00040834352493545315, "loss": 3.6018, "step": 69690 }, { "epoch": 4.735358064954477, "grad_norm": 1.9170420169830322, "learning_rate": 0.0004083010599266205, "loss": 3.3507, "step": 69695 }, { "epoch": 4.735697785025139, "grad_norm": 1.8761235475540161, "learning_rate": 0.00040825859491778777, "loss": 3.2222, "step": 69700 }, { "epoch": 4.736037505095801, "grad_norm": 1.8794556856155396, "learning_rate": 0.000408216129908955, "loss": 3.2966, "step": 69705 }, { "epoch": 4.736377225166462, "grad_norm": 2.4968392848968506, "learning_rate": 0.00040817366490012233, "loss": 3.4488, "step": 69710 }, { "epoch": 4.7367169452371245, "grad_norm": 1.9485913515090942, "learning_rate": 0.0004081311998912896, "loss": 3.4783, "step": 69715 }, { "epoch": 4.737056665307787, "grad_norm": 2.147231101989746, "learning_rate": 0.00040808873488245684, "loss": 2.8919, "step": 69720 }, { "epoch": 4.737396385378448, "grad_norm": 1.5956259965896606, "learning_rate": 0.00040804626987362417, "loss": 3.7268, "step": 69725 }, { "epoch": 4.73773610544911, "grad_norm": 2.1148977279663086, "learning_rate": 0.00040800380486479145, "loss": 3.3628, "step": 69730 }, { "epoch": 4.738075825519772, "grad_norm": 1.7241601943969727, "learning_rate": 0.0004079613398559587, "loss": 3.2391, "step": 69735 }, { "epoch": 4.738415545590433, "grad_norm": 1.7595486640930176, "learning_rate": 0.00040791887484712596, "loss": 3.3955, "step": 69740 }, { "epoch": 4.738755265661095, "grad_norm": 2.1035866737365723, "learning_rate": 0.0004078764098382933, "loss": 3.5094, "step": 69745 }, { "epoch": 4.739094985731757, "grad_norm": 1.6618608236312866, "learning_rate": 0.0004078339448294605, "loss": 3.4038, "step": 69750 }, { "epoch": 4.739434705802418, "grad_norm": 1.9366649389266968, "learning_rate": 0.0004077914798206278, "loss": 3.5766, "step": 69755 }, { "epoch": 4.7397744258730805, "grad_norm": 3.1124038696289062, "learning_rate": 0.00040774901481179513, "loss": 3.3292, "step": 69760 }, { "epoch": 4.740114145943743, "grad_norm": 1.5189944505691528, "learning_rate": 0.00040770654980296236, "loss": 3.394, "step": 69765 }, { "epoch": 4.740453866014404, "grad_norm": 1.6033236980438232, "learning_rate": 0.00040766408479412964, "loss": 3.2811, "step": 69770 }, { "epoch": 4.740793586085066, "grad_norm": 2.0176799297332764, "learning_rate": 0.0004076216197852969, "loss": 3.359, "step": 69775 }, { "epoch": 4.741133306155728, "grad_norm": 2.594312906265259, "learning_rate": 0.0004075791547764642, "loss": 3.4052, "step": 69780 }, { "epoch": 4.741473026226389, "grad_norm": 1.8484188318252563, "learning_rate": 0.0004075366897676315, "loss": 3.3747, "step": 69785 }, { "epoch": 4.741812746297051, "grad_norm": 2.258113145828247, "learning_rate": 0.00040749422475879876, "loss": 3.3824, "step": 69790 }, { "epoch": 4.742152466367713, "grad_norm": 1.6181309223175049, "learning_rate": 0.00040745175974996604, "loss": 3.6216, "step": 69795 }, { "epoch": 4.7424921864383744, "grad_norm": 2.1212358474731445, "learning_rate": 0.0004074092947411333, "loss": 3.3249, "step": 69800 }, { "epoch": 4.7428319065090365, "grad_norm": 1.696053147315979, "learning_rate": 0.0004073668297323006, "loss": 3.2836, "step": 69805 }, { "epoch": 4.743171626579699, "grad_norm": 2.000816583633423, "learning_rate": 0.0004073243647234678, "loss": 3.4565, "step": 69810 }, { "epoch": 4.74351134665036, "grad_norm": 1.7864209413528442, "learning_rate": 0.00040728189971463516, "loss": 2.9158, "step": 69815 }, { "epoch": 4.743851066721022, "grad_norm": 1.6893154382705688, "learning_rate": 0.00040723943470580244, "loss": 3.309, "step": 69820 }, { "epoch": 4.744190786791684, "grad_norm": 1.7593358755111694, "learning_rate": 0.0004071969696969697, "loss": 3.2515, "step": 69825 }, { "epoch": 4.744530506862345, "grad_norm": 1.752711296081543, "learning_rate": 0.000407154504688137, "loss": 3.3148, "step": 69830 }, { "epoch": 4.744870226933007, "grad_norm": 2.016455888748169, "learning_rate": 0.0004071120396793043, "loss": 3.4576, "step": 69835 }, { "epoch": 4.745209947003669, "grad_norm": 1.8268719911575317, "learning_rate": 0.00040706957467047156, "loss": 3.1631, "step": 69840 }, { "epoch": 4.7455496670743305, "grad_norm": 1.949141502380371, "learning_rate": 0.0004070271096616388, "loss": 3.3488, "step": 69845 }, { "epoch": 4.7458893871449925, "grad_norm": 1.823012113571167, "learning_rate": 0.0004069846446528061, "loss": 3.4311, "step": 69850 }, { "epoch": 4.746229107215655, "grad_norm": 2.204578399658203, "learning_rate": 0.0004069421796439734, "loss": 3.5864, "step": 69855 }, { "epoch": 4.746568827286316, "grad_norm": 2.1630027294158936, "learning_rate": 0.0004068997146351406, "loss": 3.1619, "step": 69860 }, { "epoch": 4.746908547356978, "grad_norm": 1.6696327924728394, "learning_rate": 0.00040685724962630796, "loss": 3.4902, "step": 69865 }, { "epoch": 4.74724826742764, "grad_norm": 1.9900280237197876, "learning_rate": 0.00040681478461747524, "loss": 3.1938, "step": 69870 }, { "epoch": 4.747587987498301, "grad_norm": 1.9065765142440796, "learning_rate": 0.00040677231960864246, "loss": 3.2663, "step": 69875 }, { "epoch": 4.747927707568963, "grad_norm": 1.8455116748809814, "learning_rate": 0.00040672985459980974, "loss": 3.4561, "step": 69880 }, { "epoch": 4.748267427639625, "grad_norm": 1.9625481367111206, "learning_rate": 0.0004066873895909771, "loss": 3.4205, "step": 69885 }, { "epoch": 4.7486071477102865, "grad_norm": NaN, "learning_rate": 0.0004066534175839109, "loss": 3.611, "step": 69890 }, { "epoch": 4.7489468677809485, "grad_norm": 2.243345022201538, "learning_rate": 0.00040661095257507813, "loss": 3.3794, "step": 69895 }, { "epoch": 4.749286587851611, "grad_norm": 1.9109179973602295, "learning_rate": 0.0004065684875662454, "loss": 3.265, "step": 69900 }, { "epoch": 4.749626307922272, "grad_norm": 2.1686530113220215, "learning_rate": 0.00040652602255741274, "loss": 3.2694, "step": 69905 }, { "epoch": 4.749966027992934, "grad_norm": 1.6350294351577759, "learning_rate": 0.00040648355754857997, "loss": 3.4443, "step": 69910 }, { "epoch": 4.750305748063596, "grad_norm": 1.5481702089309692, "learning_rate": 0.00040644109253974725, "loss": 3.5377, "step": 69915 }, { "epoch": 4.750645468134257, "grad_norm": 1.9274671077728271, "learning_rate": 0.0004063986275309146, "loss": 3.3167, "step": 69920 }, { "epoch": 4.750985188204919, "grad_norm": 2.001687526702881, "learning_rate": 0.0004063561625220818, "loss": 3.2732, "step": 69925 }, { "epoch": 4.751324908275581, "grad_norm": 1.6696327924728394, "learning_rate": 0.0004063136975132491, "loss": 3.2477, "step": 69930 }, { "epoch": 4.7516646283462425, "grad_norm": 2.1980185508728027, "learning_rate": 0.00040627123250441637, "loss": 3.1882, "step": 69935 }, { "epoch": 4.7520043484169046, "grad_norm": 1.5852489471435547, "learning_rate": 0.00040622876749558365, "loss": 3.3618, "step": 69940 }, { "epoch": 4.752344068487567, "grad_norm": 2.0354011058807373, "learning_rate": 0.00040618630248675093, "loss": 3.6952, "step": 69945 }, { "epoch": 4.752683788558228, "grad_norm": 1.763509750366211, "learning_rate": 0.0004061438374779182, "loss": 3.3645, "step": 69950 }, { "epoch": 4.75302350862889, "grad_norm": 1.7231181859970093, "learning_rate": 0.00040610137246908543, "loss": 3.3459, "step": 69955 }, { "epoch": 4.753363228699552, "grad_norm": 2.2217531204223633, "learning_rate": 0.00040605890746025277, "loss": 3.438, "step": 69960 }, { "epoch": 4.753702948770213, "grad_norm": 1.7965847253799438, "learning_rate": 0.00040601644245142005, "loss": 3.5674, "step": 69965 }, { "epoch": 4.754042668840875, "grad_norm": 1.8041859865188599, "learning_rate": 0.0004059739774425873, "loss": 3.3926, "step": 69970 }, { "epoch": 4.754382388911537, "grad_norm": 2.5558838844299316, "learning_rate": 0.0004059315124337546, "loss": 3.2003, "step": 69975 }, { "epoch": 4.7547221089821985, "grad_norm": 1.9722031354904175, "learning_rate": 0.0004058890474249219, "loss": 3.3882, "step": 69980 }, { "epoch": 4.755061829052861, "grad_norm": 2.0281498432159424, "learning_rate": 0.0004058465824160891, "loss": 3.351, "step": 69985 }, { "epoch": 4.755401549123523, "grad_norm": 1.6038703918457031, "learning_rate": 0.00040580411740725645, "loss": 3.2669, "step": 69990 }, { "epoch": 4.755741269194184, "grad_norm": 2.4168426990509033, "learning_rate": 0.00040576165239842373, "loss": 3.1961, "step": 69995 }, { "epoch": 4.756080989264846, "grad_norm": 2.2601354122161865, "learning_rate": 0.00040571918738959096, "loss": 3.288, "step": 70000 }, { "epoch": 4.756420709335508, "grad_norm": 2.246244192123413, "learning_rate": 0.00040567672238075824, "loss": 3.7555, "step": 70005 }, { "epoch": 4.756760429406169, "grad_norm": 1.6520999670028687, "learning_rate": 0.00040563425737192557, "loss": 3.3789, "step": 70010 }, { "epoch": 4.757100149476831, "grad_norm": 1.6246923208236694, "learning_rate": 0.0004055917923630928, "loss": 3.3489, "step": 70015 }, { "epoch": 4.757439869547493, "grad_norm": 1.5502358675003052, "learning_rate": 0.0004055493273542601, "loss": 3.3411, "step": 70020 }, { "epoch": 4.7577795896181545, "grad_norm": 2.4307315349578857, "learning_rate": 0.0004055068623454274, "loss": 3.3505, "step": 70025 }, { "epoch": 4.758119309688817, "grad_norm": 1.9837192296981812, "learning_rate": 0.0004054643973365947, "loss": 3.3077, "step": 70030 }, { "epoch": 4.758459029759479, "grad_norm": 1.7243902683258057, "learning_rate": 0.0004054219323277619, "loss": 3.6104, "step": 70035 }, { "epoch": 4.75879874983014, "grad_norm": 1.8387359380722046, "learning_rate": 0.0004053794673189292, "loss": 3.503, "step": 70040 }, { "epoch": 4.759138469900802, "grad_norm": 1.746579647064209, "learning_rate": 0.00040533700231009653, "loss": 3.5462, "step": 70045 }, { "epoch": 4.759478189971464, "grad_norm": 1.3832635879516602, "learning_rate": 0.00040529453730126376, "loss": 2.9525, "step": 70050 }, { "epoch": 4.759817910042125, "grad_norm": 1.5005881786346436, "learning_rate": 0.00040525207229243104, "loss": 3.5338, "step": 70055 }, { "epoch": 4.760157630112787, "grad_norm": 1.4505090713500977, "learning_rate": 0.00040520960728359837, "loss": 3.1943, "step": 70060 }, { "epoch": 4.760497350183449, "grad_norm": 2.0439279079437256, "learning_rate": 0.0004051671422747656, "loss": 3.2782, "step": 70065 }, { "epoch": 4.7608370702541105, "grad_norm": 2.014866828918457, "learning_rate": 0.0004051246772659329, "loss": 3.6104, "step": 70070 }, { "epoch": 4.761176790324773, "grad_norm": 2.1810238361358643, "learning_rate": 0.00040508221225710016, "loss": 3.3206, "step": 70075 }, { "epoch": 4.761516510395434, "grad_norm": 1.6849279403686523, "learning_rate": 0.00040503974724826744, "loss": 3.4636, "step": 70080 }, { "epoch": 4.761856230466096, "grad_norm": 1.7457683086395264, "learning_rate": 0.0004049972822394347, "loss": 3.341, "step": 70085 }, { "epoch": 4.762195950536758, "grad_norm": 1.532480001449585, "learning_rate": 0.000404954817230602, "loss": 3.0546, "step": 70090 }, { "epoch": 4.762535670607419, "grad_norm": 2.297955274581909, "learning_rate": 0.0004049123522217693, "loss": 3.3063, "step": 70095 }, { "epoch": 4.762875390678081, "grad_norm": 1.6886123418807983, "learning_rate": 0.00040486988721293656, "loss": 3.4323, "step": 70100 }, { "epoch": 4.763215110748743, "grad_norm": 1.516772985458374, "learning_rate": 0.00040482742220410384, "loss": 3.3587, "step": 70105 }, { "epoch": 4.7635548308194045, "grad_norm": 1.5898512601852417, "learning_rate": 0.00040478495719527106, "loss": 3.0097, "step": 70110 }, { "epoch": 4.7638945508900665, "grad_norm": 2.104702949523926, "learning_rate": 0.0004047424921864384, "loss": 3.2867, "step": 70115 }, { "epoch": 4.764234270960729, "grad_norm": 2.1887474060058594, "learning_rate": 0.0004047000271776057, "loss": 3.3502, "step": 70120 }, { "epoch": 4.76457399103139, "grad_norm": 1.64860999584198, "learning_rate": 0.0004046575621687729, "loss": 3.3571, "step": 70125 }, { "epoch": 4.764913711102052, "grad_norm": 1.8945727348327637, "learning_rate": 0.00040461509715994024, "loss": 3.1677, "step": 70130 }, { "epoch": 4.765253431172714, "grad_norm": 2.00384521484375, "learning_rate": 0.0004045726321511075, "loss": 3.6064, "step": 70135 }, { "epoch": 4.765593151243375, "grad_norm": 1.8958226442337036, "learning_rate": 0.00040453016714227474, "loss": 3.3917, "step": 70140 }, { "epoch": 4.765932871314037, "grad_norm": 1.6666712760925293, "learning_rate": 0.000404487702133442, "loss": 3.2955, "step": 70145 }, { "epoch": 4.766272591384699, "grad_norm": 1.8476029634475708, "learning_rate": 0.00040444523712460936, "loss": 3.238, "step": 70150 }, { "epoch": 4.7666123114553605, "grad_norm": 1.6262047290802002, "learning_rate": 0.0004044027721157766, "loss": 3.2336, "step": 70155 }, { "epoch": 4.7669520315260225, "grad_norm": 1.9467822313308716, "learning_rate": 0.00040436030710694386, "loss": 3.502, "step": 70160 }, { "epoch": 4.767291751596685, "grad_norm": 1.6275449991226196, "learning_rate": 0.0004043178420981112, "loss": 3.5142, "step": 70165 }, { "epoch": 4.767631471667346, "grad_norm": 2.1270201206207275, "learning_rate": 0.0004042753770892784, "loss": 3.554, "step": 70170 }, { "epoch": 4.767971191738008, "grad_norm": 1.4776325225830078, "learning_rate": 0.0004042329120804457, "loss": 3.2936, "step": 70175 }, { "epoch": 4.76831091180867, "grad_norm": 1.729465365409851, "learning_rate": 0.000404190447071613, "loss": 3.6118, "step": 70180 }, { "epoch": 4.768650631879331, "grad_norm": 1.598802924156189, "learning_rate": 0.00040414798206278026, "loss": 3.4548, "step": 70185 }, { "epoch": 4.768990351949993, "grad_norm": 1.8101774454116821, "learning_rate": 0.00040410551705394754, "loss": 3.5834, "step": 70190 }, { "epoch": 4.769330072020655, "grad_norm": 1.7429966926574707, "learning_rate": 0.0004040630520451148, "loss": 3.3164, "step": 70195 }, { "epoch": 4.7696697920913165, "grad_norm": 1.5512803792953491, "learning_rate": 0.00040402058703628216, "loss": 3.2504, "step": 70200 }, { "epoch": 4.7700095121619785, "grad_norm": 1.5295990705490112, "learning_rate": 0.0004039781220274494, "loss": 3.3957, "step": 70205 }, { "epoch": 4.770349232232641, "grad_norm": 1.2396293878555298, "learning_rate": 0.00040393565701861666, "loss": 3.2974, "step": 70210 }, { "epoch": 4.770688952303302, "grad_norm": 1.7677531242370605, "learning_rate": 0.000403893192009784, "loss": 3.3838, "step": 70215 }, { "epoch": 4.771028672373964, "grad_norm": 1.7276511192321777, "learning_rate": 0.0004038507270009512, "loss": 3.4104, "step": 70220 }, { "epoch": 4.771368392444626, "grad_norm": 1.7797421216964722, "learning_rate": 0.0004038082619921185, "loss": 3.3056, "step": 70225 }, { "epoch": 4.771708112515287, "grad_norm": 1.6038111448287964, "learning_rate": 0.0004037657969832858, "loss": 3.4165, "step": 70230 }, { "epoch": 4.772047832585949, "grad_norm": 1.9688832759857178, "learning_rate": 0.00040372333197445306, "loss": 3.2642, "step": 70235 }, { "epoch": 4.772387552656611, "grad_norm": 1.6824603080749512, "learning_rate": 0.00040368086696562034, "loss": 3.2049, "step": 70240 }, { "epoch": 4.7727272727272725, "grad_norm": 1.6301665306091309, "learning_rate": 0.0004036384019567876, "loss": 3.3614, "step": 70245 }, { "epoch": 4.773066992797935, "grad_norm": 1.5306568145751953, "learning_rate": 0.0004035959369479549, "loss": 3.3053, "step": 70250 }, { "epoch": 4.773406712868597, "grad_norm": 1.5466952323913574, "learning_rate": 0.0004035534719391222, "loss": 3.3977, "step": 70255 }, { "epoch": 4.773746432939258, "grad_norm": 1.756892204284668, "learning_rate": 0.00040351100693028947, "loss": 3.362, "step": 70260 }, { "epoch": 4.77408615300992, "grad_norm": 2.0084452629089355, "learning_rate": 0.0004034685419214567, "loss": 3.2965, "step": 70265 }, { "epoch": 4.774425873080582, "grad_norm": 2.0052084922790527, "learning_rate": 0.000403426076912624, "loss": 3.3338, "step": 70270 }, { "epoch": 4.774765593151243, "grad_norm": 1.6408500671386719, "learning_rate": 0.0004033836119037913, "loss": 3.5007, "step": 70275 }, { "epoch": 4.775105313221905, "grad_norm": 1.6486390829086304, "learning_rate": 0.00040334114689495853, "loss": 3.1903, "step": 70280 }, { "epoch": 4.775445033292567, "grad_norm": 1.5960370302200317, "learning_rate": 0.00040329868188612587, "loss": 3.5638, "step": 70285 }, { "epoch": 4.7757847533632285, "grad_norm": 1.530975341796875, "learning_rate": 0.00040325621687729315, "loss": 3.3863, "step": 70290 }, { "epoch": 4.776124473433891, "grad_norm": 2.0569307804107666, "learning_rate": 0.00040321375186846037, "loss": 3.5031, "step": 70295 }, { "epoch": 4.776464193504552, "grad_norm": 1.9910328388214111, "learning_rate": 0.00040317128685962765, "loss": 3.4837, "step": 70300 }, { "epoch": 4.776803913575214, "grad_norm": 2.389148712158203, "learning_rate": 0.000403128821850795, "loss": 3.6343, "step": 70305 }, { "epoch": 4.777143633645876, "grad_norm": 2.3371994495391846, "learning_rate": 0.0004030863568419622, "loss": 3.2974, "step": 70310 }, { "epoch": 4.777483353716537, "grad_norm": 1.842818260192871, "learning_rate": 0.0004030438918331295, "loss": 3.3631, "step": 70315 }, { "epoch": 4.777823073787199, "grad_norm": 1.6880311965942383, "learning_rate": 0.0004030014268242968, "loss": 3.5317, "step": 70320 }, { "epoch": 4.778162793857861, "grad_norm": 1.7438489198684692, "learning_rate": 0.00040295896181546405, "loss": 3.3429, "step": 70325 }, { "epoch": 4.778502513928522, "grad_norm": 1.8618156909942627, "learning_rate": 0.00040291649680663133, "loss": 3.2873, "step": 70330 }, { "epoch": 4.7788422339991845, "grad_norm": 1.8323851823806763, "learning_rate": 0.0004028740317977986, "loss": 3.4562, "step": 70335 }, { "epoch": 4.779181954069847, "grad_norm": 2.0760693550109863, "learning_rate": 0.0004028315667889659, "loss": 3.2733, "step": 70340 }, { "epoch": 4.779521674140508, "grad_norm": 1.9732518196105957, "learning_rate": 0.00040278910178013317, "loss": 3.3907, "step": 70345 }, { "epoch": 4.77986139421117, "grad_norm": 1.6096289157867432, "learning_rate": 0.00040274663677130045, "loss": 3.1965, "step": 70350 }, { "epoch": 4.780201114281832, "grad_norm": 1.8778021335601807, "learning_rate": 0.00040270417176246773, "loss": 3.5529, "step": 70355 }, { "epoch": 4.780540834352493, "grad_norm": 1.3933789730072021, "learning_rate": 0.000402661706753635, "loss": 3.4269, "step": 70360 }, { "epoch": 4.780880554423155, "grad_norm": 2.3877627849578857, "learning_rate": 0.0004026192417448023, "loss": 3.1347, "step": 70365 }, { "epoch": 4.781220274493817, "grad_norm": 1.531680941581726, "learning_rate": 0.00040257677673596957, "loss": 3.526, "step": 70370 }, { "epoch": 4.7815599945644784, "grad_norm": 1.9493160247802734, "learning_rate": 0.00040253431172713685, "loss": 3.4623, "step": 70375 }, { "epoch": 4.7818997146351405, "grad_norm": 2.0070443153381348, "learning_rate": 0.00040249184671830413, "loss": 3.2098, "step": 70380 }, { "epoch": 4.782239434705803, "grad_norm": 1.718497395515442, "learning_rate": 0.0004024493817094714, "loss": 3.5502, "step": 70385 }, { "epoch": 4.782579154776464, "grad_norm": 1.7282086610794067, "learning_rate": 0.0004024069167006387, "loss": 3.4383, "step": 70390 }, { "epoch": 4.782918874847126, "grad_norm": 1.966155767440796, "learning_rate": 0.00040236445169180597, "loss": 3.2695, "step": 70395 }, { "epoch": 4.783258594917788, "grad_norm": 1.839410424232483, "learning_rate": 0.00040232198668297325, "loss": 3.149, "step": 70400 }, { "epoch": 4.783598314988449, "grad_norm": 2.033174514770508, "learning_rate": 0.0004022795216741405, "loss": 3.5285, "step": 70405 }, { "epoch": 4.783938035059111, "grad_norm": 1.5360136032104492, "learning_rate": 0.0004022370566653078, "loss": 3.5298, "step": 70410 }, { "epoch": 4.784277755129773, "grad_norm": 1.7784773111343384, "learning_rate": 0.0004021945916564751, "loss": 3.5129, "step": 70415 }, { "epoch": 4.7846174752004345, "grad_norm": 1.8724178075790405, "learning_rate": 0.0004021521266476423, "loss": 3.3372, "step": 70420 }, { "epoch": 4.7849571952710965, "grad_norm": 1.7921134233474731, "learning_rate": 0.00040210966163880965, "loss": 3.1654, "step": 70425 }, { "epoch": 4.785296915341759, "grad_norm": 1.652788519859314, "learning_rate": 0.00040206719662997693, "loss": 3.5335, "step": 70430 }, { "epoch": 4.78563663541242, "grad_norm": 1.8845878839492798, "learning_rate": 0.00040202473162114416, "loss": 3.4357, "step": 70435 }, { "epoch": 4.785976355483082, "grad_norm": 2.3442163467407227, "learning_rate": 0.00040198226661231144, "loss": 3.1721, "step": 70440 }, { "epoch": 4.786316075553744, "grad_norm": 1.969162106513977, "learning_rate": 0.0004019398016034788, "loss": 3.3224, "step": 70445 }, { "epoch": 4.786655795624405, "grad_norm": 1.7513467073440552, "learning_rate": 0.000401897336594646, "loss": 3.3925, "step": 70450 }, { "epoch": 4.786995515695067, "grad_norm": 1.748099684715271, "learning_rate": 0.0004018548715858133, "loss": 3.1311, "step": 70455 }, { "epoch": 4.787335235765729, "grad_norm": 1.9364237785339355, "learning_rate": 0.0004018124065769806, "loss": 3.372, "step": 70460 }, { "epoch": 4.7876749558363905, "grad_norm": 1.9458074569702148, "learning_rate": 0.00040176994156814784, "loss": 3.3097, "step": 70465 }, { "epoch": 4.7880146759070525, "grad_norm": 1.7122211456298828, "learning_rate": 0.0004017274765593151, "loss": 3.4227, "step": 70470 }, { "epoch": 4.788354395977715, "grad_norm": 1.9930899143218994, "learning_rate": 0.0004016850115504824, "loss": 3.3453, "step": 70475 }, { "epoch": 4.788694116048376, "grad_norm": 1.5634546279907227, "learning_rate": 0.0004016425465416497, "loss": 3.1939, "step": 70480 }, { "epoch": 4.789033836119038, "grad_norm": 1.7313501834869385, "learning_rate": 0.00040160008153281696, "loss": 3.3627, "step": 70485 }, { "epoch": 4.7893735561897, "grad_norm": 1.5719484090805054, "learning_rate": 0.00040155761652398424, "loss": 3.2157, "step": 70490 }, { "epoch": 4.789713276260361, "grad_norm": 2.064927339553833, "learning_rate": 0.0004015151515151515, "loss": 3.2405, "step": 70495 }, { "epoch": 4.790052996331023, "grad_norm": 2.159146547317505, "learning_rate": 0.0004014726865063188, "loss": 3.6072, "step": 70500 }, { "epoch": 4.790392716401685, "grad_norm": 1.5237030982971191, "learning_rate": 0.0004014302214974861, "loss": 3.1449, "step": 70505 }, { "epoch": 4.7907324364723465, "grad_norm": 1.518599271774292, "learning_rate": 0.0004013877564886533, "loss": 3.3721, "step": 70510 }, { "epoch": 4.7910721565430086, "grad_norm": 1.735109567642212, "learning_rate": 0.00040134529147982064, "loss": 3.2792, "step": 70515 }, { "epoch": 4.791411876613671, "grad_norm": 1.5567899942398071, "learning_rate": 0.0004013028264709879, "loss": 3.7036, "step": 70520 }, { "epoch": 4.791751596684332, "grad_norm": 1.592443823814392, "learning_rate": 0.00040126036146215515, "loss": 3.3426, "step": 70525 }, { "epoch": 4.792091316754994, "grad_norm": 2.1764333248138428, "learning_rate": 0.0004012178964533225, "loss": 3.2016, "step": 70530 }, { "epoch": 4.792431036825656, "grad_norm": 2.1156821250915527, "learning_rate": 0.00040117543144448976, "loss": 3.5518, "step": 70535 }, { "epoch": 4.792770756896317, "grad_norm": 1.9223405122756958, "learning_rate": 0.00040113296643565704, "loss": 3.2046, "step": 70540 }, { "epoch": 4.793110476966979, "grad_norm": 2.2568042278289795, "learning_rate": 0.0004010905014268243, "loss": 3.2631, "step": 70545 }, { "epoch": 4.793450197037641, "grad_norm": 2.175814390182495, "learning_rate": 0.0004010480364179916, "loss": 3.462, "step": 70550 }, { "epoch": 4.7937899171083025, "grad_norm": 1.7141932249069214, "learning_rate": 0.0004010055714091589, "loss": 3.566, "step": 70555 }, { "epoch": 4.794129637178965, "grad_norm": 1.908143401145935, "learning_rate": 0.0004009631064003261, "loss": 3.5706, "step": 70560 }, { "epoch": 4.794469357249627, "grad_norm": 2.0446183681488037, "learning_rate": 0.00040092064139149344, "loss": 3.454, "step": 70565 }, { "epoch": 4.794809077320288, "grad_norm": 2.091768980026245, "learning_rate": 0.0004008781763826607, "loss": 3.6097, "step": 70570 }, { "epoch": 4.79514879739095, "grad_norm": 2.245692014694214, "learning_rate": 0.00040083571137382795, "loss": 3.276, "step": 70575 }, { "epoch": 4.795488517461612, "grad_norm": 1.8804197311401367, "learning_rate": 0.0004007932463649953, "loss": 3.3207, "step": 70580 }, { "epoch": 4.795828237532273, "grad_norm": 1.6129111051559448, "learning_rate": 0.00040075078135616256, "loss": 3.2455, "step": 70585 }, { "epoch": 4.796167957602935, "grad_norm": 1.9344068765640259, "learning_rate": 0.0004007083163473298, "loss": 3.4683, "step": 70590 }, { "epoch": 4.796507677673597, "grad_norm": 1.8579803705215454, "learning_rate": 0.00040066585133849707, "loss": 3.3041, "step": 70595 }, { "epoch": 4.7968473977442585, "grad_norm": 1.75818932056427, "learning_rate": 0.0004006233863296644, "loss": 3.3976, "step": 70600 }, { "epoch": 4.797187117814921, "grad_norm": 1.36759352684021, "learning_rate": 0.00040058092132083163, "loss": 3.3143, "step": 70605 }, { "epoch": 4.797526837885583, "grad_norm": 1.6538139581680298, "learning_rate": 0.0004005384563119989, "loss": 3.5217, "step": 70610 }, { "epoch": 4.797866557956244, "grad_norm": 1.7461581230163574, "learning_rate": 0.00040049599130316624, "loss": 3.2809, "step": 70615 }, { "epoch": 4.798206278026906, "grad_norm": 1.6119167804718018, "learning_rate": 0.00040045352629433347, "loss": 3.4898, "step": 70620 }, { "epoch": 4.798545998097568, "grad_norm": 2.0255300998687744, "learning_rate": 0.00040041106128550075, "loss": 3.444, "step": 70625 }, { "epoch": 4.798885718168229, "grad_norm": 1.8548237085342407, "learning_rate": 0.00040036859627666803, "loss": 3.2225, "step": 70630 }, { "epoch": 4.799225438238891, "grad_norm": 1.7819377183914185, "learning_rate": 0.0004003261312678353, "loss": 3.2752, "step": 70635 }, { "epoch": 4.799565158309553, "grad_norm": 2.2347426414489746, "learning_rate": 0.0004002836662590026, "loss": 3.3513, "step": 70640 }, { "epoch": 4.7999048783802145, "grad_norm": 1.932105302810669, "learning_rate": 0.00040024120125016987, "loss": 3.2452, "step": 70645 }, { "epoch": 4.800244598450877, "grad_norm": 1.8218019008636475, "learning_rate": 0.00040019873624133715, "loss": 3.3787, "step": 70650 }, { "epoch": 4.800584318521539, "grad_norm": 1.91390860080719, "learning_rate": 0.00040015627123250443, "loss": 3.4612, "step": 70655 }, { "epoch": 4.8009240385922, "grad_norm": 1.7271453142166138, "learning_rate": 0.0004001138062236717, "loss": 3.3782, "step": 70660 }, { "epoch": 4.801263758662862, "grad_norm": 1.427840232849121, "learning_rate": 0.00040007134121483893, "loss": 3.4741, "step": 70665 }, { "epoch": 4.801603478733524, "grad_norm": 1.8792039155960083, "learning_rate": 0.00040002887620600627, "loss": 3.1629, "step": 70670 }, { "epoch": 4.801943198804185, "grad_norm": 1.6916913986206055, "learning_rate": 0.00039998641119717355, "loss": 3.6579, "step": 70675 }, { "epoch": 4.802282918874847, "grad_norm": 1.484236717224121, "learning_rate": 0.0003999439461883408, "loss": 3.4654, "step": 70680 }, { "epoch": 4.802622638945509, "grad_norm": 1.4585983753204346, "learning_rate": 0.0003999014811795081, "loss": 3.3177, "step": 70685 }, { "epoch": 4.8029623590161705, "grad_norm": 2.0631253719329834, "learning_rate": 0.0003998590161706754, "loss": 3.3696, "step": 70690 }, { "epoch": 4.803302079086833, "grad_norm": 1.7895272970199585, "learning_rate": 0.0003998165511618426, "loss": 3.5752, "step": 70695 }, { "epoch": 4.803641799157495, "grad_norm": 2.163339138031006, "learning_rate": 0.0003997740861530099, "loss": 3.2205, "step": 70700 }, { "epoch": 4.803981519228156, "grad_norm": 1.9994348287582397, "learning_rate": 0.00039973162114417723, "loss": 3.2832, "step": 70705 }, { "epoch": 4.804321239298818, "grad_norm": 1.7240687608718872, "learning_rate": 0.0003996891561353445, "loss": 3.3826, "step": 70710 }, { "epoch": 4.80466095936948, "grad_norm": 1.9584097862243652, "learning_rate": 0.00039964669112651174, "loss": 3.3065, "step": 70715 }, { "epoch": 4.805000679440141, "grad_norm": 1.3059161901474, "learning_rate": 0.00039960422611767907, "loss": 3.3026, "step": 70720 }, { "epoch": 4.805340399510803, "grad_norm": 1.440798282623291, "learning_rate": 0.00039956176110884635, "loss": 3.1788, "step": 70725 }, { "epoch": 4.805680119581465, "grad_norm": 1.7806947231292725, "learning_rate": 0.0003995192961000136, "loss": 3.5749, "step": 70730 }, { "epoch": 4.8060198396521265, "grad_norm": 1.6215945482254028, "learning_rate": 0.00039947683109118086, "loss": 3.3256, "step": 70735 }, { "epoch": 4.806359559722789, "grad_norm": 1.7728132009506226, "learning_rate": 0.0003994343660823482, "loss": 3.3283, "step": 70740 }, { "epoch": 4.806699279793451, "grad_norm": 1.7964229583740234, "learning_rate": 0.0003993919010735154, "loss": 3.455, "step": 70745 }, { "epoch": 4.807038999864112, "grad_norm": 1.9551197290420532, "learning_rate": 0.0003993494360646827, "loss": 3.4772, "step": 70750 }, { "epoch": 4.807378719934774, "grad_norm": 1.610550045967102, "learning_rate": 0.00039930697105585003, "loss": 3.3549, "step": 70755 }, { "epoch": 4.807718440005435, "grad_norm": 2.245117664337158, "learning_rate": 0.00039926450604701726, "loss": 3.2926, "step": 70760 }, { "epoch": 4.808058160076097, "grad_norm": 1.608376383781433, "learning_rate": 0.00039922204103818454, "loss": 3.567, "step": 70765 }, { "epoch": 4.808397880146759, "grad_norm": 2.208846092224121, "learning_rate": 0.0003991795760293518, "loss": 3.4585, "step": 70770 }, { "epoch": 4.8087376002174205, "grad_norm": 1.6282532215118408, "learning_rate": 0.0003991371110205191, "loss": 3.43, "step": 70775 }, { "epoch": 4.8090773202880825, "grad_norm": 1.5065149068832397, "learning_rate": 0.0003990946460116864, "loss": 3.1389, "step": 70780 }, { "epoch": 4.809417040358745, "grad_norm": 1.316772222518921, "learning_rate": 0.00039905218100285366, "loss": 3.3579, "step": 70785 }, { "epoch": 4.809756760429406, "grad_norm": 1.8366997241973877, "learning_rate": 0.00039900971599402094, "loss": 3.2876, "step": 70790 }, { "epoch": 4.810096480500068, "grad_norm": 1.9052733182907104, "learning_rate": 0.0003989672509851882, "loss": 3.1955, "step": 70795 }, { "epoch": 4.81043620057073, "grad_norm": 1.7072105407714844, "learning_rate": 0.0003989247859763555, "loss": 3.2429, "step": 70800 }, { "epoch": 4.810775920641391, "grad_norm": 1.709524154663086, "learning_rate": 0.0003988823209675227, "loss": 3.3975, "step": 70805 }, { "epoch": 4.811115640712053, "grad_norm": 1.6612021923065186, "learning_rate": 0.00039883985595869006, "loss": 3.4561, "step": 70810 }, { "epoch": 4.811455360782715, "grad_norm": 2.022697687149048, "learning_rate": 0.00039879739094985734, "loss": 3.4979, "step": 70815 }, { "epoch": 4.8117950808533765, "grad_norm": 1.7967076301574707, "learning_rate": 0.00039875492594102456, "loss": 3.4026, "step": 70820 }, { "epoch": 4.812134800924039, "grad_norm": 1.753774881362915, "learning_rate": 0.0003987124609321919, "loss": 3.3855, "step": 70825 }, { "epoch": 4.812474520994701, "grad_norm": 1.7649736404418945, "learning_rate": 0.0003986699959233592, "loss": 3.5276, "step": 70830 }, { "epoch": 4.812814241065362, "grad_norm": 2.1983675956726074, "learning_rate": 0.0003986275309145264, "loss": 3.474, "step": 70835 }, { "epoch": 4.813153961136024, "grad_norm": 2.365051507949829, "learning_rate": 0.00039858506590569374, "loss": 3.4967, "step": 70840 }, { "epoch": 4.813493681206686, "grad_norm": 1.9604145288467407, "learning_rate": 0.000398542600896861, "loss": 3.742, "step": 70845 }, { "epoch": 4.813833401277347, "grad_norm": 1.4139678478240967, "learning_rate": 0.00039850013588802824, "loss": 3.3233, "step": 70850 }, { "epoch": 4.814173121348009, "grad_norm": 1.8813035488128662, "learning_rate": 0.0003984576708791955, "loss": 3.6126, "step": 70855 }, { "epoch": 4.814512841418671, "grad_norm": 1.4138692617416382, "learning_rate": 0.00039841520587036286, "loss": 3.2172, "step": 70860 }, { "epoch": 4.8148525614893325, "grad_norm": 1.7803316116333008, "learning_rate": 0.0003983727408615301, "loss": 3.3855, "step": 70865 }, { "epoch": 4.815192281559995, "grad_norm": 1.5726977586746216, "learning_rate": 0.00039833027585269736, "loss": 3.3665, "step": 70870 }, { "epoch": 4.815532001630657, "grad_norm": 1.65326726436615, "learning_rate": 0.0003982878108438647, "loss": 3.2054, "step": 70875 }, { "epoch": 4.815871721701318, "grad_norm": 1.8961166143417358, "learning_rate": 0.000398245345835032, "loss": 3.5003, "step": 70880 }, { "epoch": 4.81621144177198, "grad_norm": 1.5326956510543823, "learning_rate": 0.0003982028808261992, "loss": 3.3804, "step": 70885 }, { "epoch": 4.816551161842642, "grad_norm": 2.4377171993255615, "learning_rate": 0.0003981604158173665, "loss": 3.5688, "step": 70890 }, { "epoch": 4.816890881913303, "grad_norm": 1.4051406383514404, "learning_rate": 0.0003981179508085338, "loss": 3.5986, "step": 70895 }, { "epoch": 4.817230601983965, "grad_norm": 1.847503423690796, "learning_rate": 0.00039807548579970104, "loss": 3.5131, "step": 70900 }, { "epoch": 4.817570322054627, "grad_norm": 2.2971251010894775, "learning_rate": 0.0003980330207908683, "loss": 3.6141, "step": 70905 }, { "epoch": 4.8179100421252885, "grad_norm": 1.959134817123413, "learning_rate": 0.00039799055578203566, "loss": 3.6782, "step": 70910 }, { "epoch": 4.818249762195951, "grad_norm": 2.051253318786621, "learning_rate": 0.0003979480907732029, "loss": 3.3691, "step": 70915 }, { "epoch": 4.818589482266613, "grad_norm": 1.7106537818908691, "learning_rate": 0.00039790562576437016, "loss": 3.1457, "step": 70920 }, { "epoch": 4.818929202337274, "grad_norm": 2.16164231300354, "learning_rate": 0.00039786316075553744, "loss": 3.3088, "step": 70925 }, { "epoch": 4.819268922407936, "grad_norm": 2.1238226890563965, "learning_rate": 0.0003978206957467047, "loss": 3.3763, "step": 70930 }, { "epoch": 4.819608642478598, "grad_norm": 1.967244029045105, "learning_rate": 0.000397778230737872, "loss": 3.3068, "step": 70935 }, { "epoch": 4.819948362549259, "grad_norm": 1.5993958711624146, "learning_rate": 0.0003977357657290393, "loss": 3.5016, "step": 70940 }, { "epoch": 4.820288082619921, "grad_norm": 1.7769982814788818, "learning_rate": 0.00039769330072020656, "loss": 3.5242, "step": 70945 }, { "epoch": 4.820627802690583, "grad_norm": 1.6105399131774902, "learning_rate": 0.00039765083571137384, "loss": 3.3776, "step": 70950 }, { "epoch": 4.8209675227612445, "grad_norm": 1.6887646913528442, "learning_rate": 0.0003976083707025411, "loss": 3.2718, "step": 70955 }, { "epoch": 4.821307242831907, "grad_norm": 1.5648696422576904, "learning_rate": 0.00039756590569370835, "loss": 3.3746, "step": 70960 }, { "epoch": 4.821646962902569, "grad_norm": 1.6733651161193848, "learning_rate": 0.0003975234406848757, "loss": 3.5097, "step": 70965 }, { "epoch": 4.82198668297323, "grad_norm": 1.7184678316116333, "learning_rate": 0.00039748097567604296, "loss": 3.5397, "step": 70970 }, { "epoch": 4.822326403043892, "grad_norm": 1.7004374265670776, "learning_rate": 0.0003974385106672102, "loss": 3.3332, "step": 70975 }, { "epoch": 4.822666123114553, "grad_norm": 1.6945629119873047, "learning_rate": 0.0003973960456583775, "loss": 3.0622, "step": 70980 }, { "epoch": 4.823005843185215, "grad_norm": 2.124119758605957, "learning_rate": 0.0003973535806495448, "loss": 3.3659, "step": 70985 }, { "epoch": 4.823345563255877, "grad_norm": 1.6172804832458496, "learning_rate": 0.00039731111564071203, "loss": 3.2691, "step": 70990 }, { "epoch": 4.8236852833265385, "grad_norm": 1.950289011001587, "learning_rate": 0.0003972686506318793, "loss": 3.4334, "step": 70995 }, { "epoch": 4.8240250033972005, "grad_norm": 2.150622606277466, "learning_rate": 0.00039722618562304665, "loss": 3.4809, "step": 71000 }, { "epoch": 4.824364723467863, "grad_norm": 1.627596378326416, "learning_rate": 0.00039718372061421387, "loss": 3.3131, "step": 71005 }, { "epoch": 4.824704443538524, "grad_norm": 2.318082809448242, "learning_rate": 0.00039714125560538115, "loss": 3.4047, "step": 71010 }, { "epoch": 4.825044163609186, "grad_norm": 2.0967342853546143, "learning_rate": 0.0003970987905965485, "loss": 2.8841, "step": 71015 }, { "epoch": 4.825383883679848, "grad_norm": 1.6645256280899048, "learning_rate": 0.0003970563255877157, "loss": 3.2773, "step": 71020 }, { "epoch": 4.825723603750509, "grad_norm": 2.0096747875213623, "learning_rate": 0.000397013860578883, "loss": 3.4591, "step": 71025 }, { "epoch": 4.826063323821171, "grad_norm": 1.8339701890945435, "learning_rate": 0.00039697139557005027, "loss": 3.4903, "step": 71030 }, { "epoch": 4.826403043891833, "grad_norm": 2.216491460800171, "learning_rate": 0.00039692893056121755, "loss": 3.5755, "step": 71035 }, { "epoch": 4.8267427639624945, "grad_norm": 1.819140911102295, "learning_rate": 0.00039688646555238483, "loss": 3.3004, "step": 71040 }, { "epoch": 4.8270824840331565, "grad_norm": 2.0058465003967285, "learning_rate": 0.0003968440005435521, "loss": 3.2259, "step": 71045 }, { "epoch": 4.827422204103819, "grad_norm": 1.7292659282684326, "learning_rate": 0.00039680153553471945, "loss": 3.4092, "step": 71050 }, { "epoch": 4.82776192417448, "grad_norm": 1.8100894689559937, "learning_rate": 0.00039675907052588667, "loss": 3.2736, "step": 71055 }, { "epoch": 4.828101644245142, "grad_norm": 1.66883385181427, "learning_rate": 0.00039671660551705395, "loss": 3.4333, "step": 71060 }, { "epoch": 4.828441364315804, "grad_norm": 1.953956127166748, "learning_rate": 0.0003966741405082213, "loss": 3.5151, "step": 71065 }, { "epoch": 4.828781084386465, "grad_norm": 1.7984910011291504, "learning_rate": 0.0003966316754993885, "loss": 3.3252, "step": 71070 }, { "epoch": 4.829120804457127, "grad_norm": 2.0371146202087402, "learning_rate": 0.0003965892104905558, "loss": 3.3577, "step": 71075 }, { "epoch": 4.829460524527789, "grad_norm": 1.8036487102508545, "learning_rate": 0.00039654674548172307, "loss": 3.2669, "step": 71080 }, { "epoch": 4.8298002445984505, "grad_norm": 1.699299931526184, "learning_rate": 0.00039650428047289035, "loss": 3.4943, "step": 71085 }, { "epoch": 4.8301399646691126, "grad_norm": 1.616120457649231, "learning_rate": 0.00039646181546405763, "loss": 3.5098, "step": 71090 }, { "epoch": 4.830479684739775, "grad_norm": 2.2769267559051514, "learning_rate": 0.0003964193504552249, "loss": 3.3521, "step": 71095 }, { "epoch": 4.830819404810436, "grad_norm": 1.6781002283096313, "learning_rate": 0.0003963768854463922, "loss": 3.4064, "step": 71100 }, { "epoch": 4.831159124881098, "grad_norm": 2.1268653869628906, "learning_rate": 0.00039633442043755947, "loss": 3.3235, "step": 71105 }, { "epoch": 4.83149884495176, "grad_norm": 1.8876529932022095, "learning_rate": 0.00039629195542872675, "loss": 3.4938, "step": 71110 }, { "epoch": 4.831838565022421, "grad_norm": 1.838180422782898, "learning_rate": 0.000396249490419894, "loss": 3.45, "step": 71115 }, { "epoch": 4.832178285093083, "grad_norm": 1.6430147886276245, "learning_rate": 0.0003962070254110613, "loss": 3.2361, "step": 71120 }, { "epoch": 4.832518005163745, "grad_norm": 1.7626413106918335, "learning_rate": 0.0003961645604022286, "loss": 3.5962, "step": 71125 }, { "epoch": 4.8328577252344065, "grad_norm": 1.8951014280319214, "learning_rate": 0.0003961220953933958, "loss": 3.213, "step": 71130 }, { "epoch": 4.833197445305069, "grad_norm": 1.4090813398361206, "learning_rate": 0.00039607963038456315, "loss": 3.5195, "step": 71135 }, { "epoch": 4.833537165375731, "grad_norm": 1.816053867340088, "learning_rate": 0.00039603716537573043, "loss": 3.0248, "step": 71140 }, { "epoch": 4.833876885446392, "grad_norm": 1.7343320846557617, "learning_rate": 0.00039599470036689766, "loss": 3.377, "step": 71145 }, { "epoch": 4.834216605517054, "grad_norm": 1.4451149702072144, "learning_rate": 0.00039595223535806494, "loss": 3.4735, "step": 71150 }, { "epoch": 4.834556325587716, "grad_norm": 2.0395424365997314, "learning_rate": 0.0003959097703492323, "loss": 3.2447, "step": 71155 }, { "epoch": 4.834896045658377, "grad_norm": 1.734691858291626, "learning_rate": 0.0003958673053403995, "loss": 3.2519, "step": 71160 }, { "epoch": 4.835235765729039, "grad_norm": 1.8735147714614868, "learning_rate": 0.0003958248403315668, "loss": 3.6015, "step": 71165 }, { "epoch": 4.835575485799701, "grad_norm": 1.610447645187378, "learning_rate": 0.0003957823753227341, "loss": 3.4291, "step": 71170 }, { "epoch": 4.8359152058703625, "grad_norm": 2.580195665359497, "learning_rate": 0.00039573991031390134, "loss": 3.4152, "step": 71175 }, { "epoch": 4.836254925941025, "grad_norm": 1.3433228731155396, "learning_rate": 0.0003956974453050686, "loss": 3.3125, "step": 71180 }, { "epoch": 4.836594646011687, "grad_norm": 2.213654041290283, "learning_rate": 0.0003956549802962359, "loss": 3.5351, "step": 71185 }, { "epoch": 4.836934366082348, "grad_norm": 1.9284281730651855, "learning_rate": 0.0003956125152874032, "loss": 3.2718, "step": 71190 }, { "epoch": 4.83727408615301, "grad_norm": 1.9379935264587402, "learning_rate": 0.00039557005027857046, "loss": 3.2997, "step": 71195 }, { "epoch": 4.837613806223672, "grad_norm": 1.8828028440475464, "learning_rate": 0.00039552758526973774, "loss": 3.2823, "step": 71200 }, { "epoch": 4.837953526294333, "grad_norm": 2.0656321048736572, "learning_rate": 0.000395485120260905, "loss": 3.3859, "step": 71205 }, { "epoch": 4.838293246364995, "grad_norm": 2.30643892288208, "learning_rate": 0.0003954426552520723, "loss": 3.4156, "step": 71210 }, { "epoch": 4.838632966435657, "grad_norm": 1.89243483543396, "learning_rate": 0.0003954001902432396, "loss": 3.4463, "step": 71215 }, { "epoch": 4.8389726865063185, "grad_norm": 1.6263313293457031, "learning_rate": 0.00039535772523440686, "loss": 3.2645, "step": 71220 }, { "epoch": 4.839312406576981, "grad_norm": 1.6085234880447388, "learning_rate": 0.00039531526022557414, "loss": 3.22, "step": 71225 }, { "epoch": 4.839652126647643, "grad_norm": 1.60134756565094, "learning_rate": 0.0003952727952167414, "loss": 3.0676, "step": 71230 }, { "epoch": 4.839991846718304, "grad_norm": 1.918100357055664, "learning_rate": 0.0003952303302079087, "loss": 3.3714, "step": 71235 }, { "epoch": 4.840331566788966, "grad_norm": 1.7329442501068115, "learning_rate": 0.000395187865199076, "loss": 3.4879, "step": 71240 }, { "epoch": 4.840671286859628, "grad_norm": 2.178896188735962, "learning_rate": 0.00039514540019024326, "loss": 3.2632, "step": 71245 }, { "epoch": 4.841011006930289, "grad_norm": 1.5578231811523438, "learning_rate": 0.00039510293518141054, "loss": 3.4739, "step": 71250 }, { "epoch": 4.841350727000951, "grad_norm": 1.8978382349014282, "learning_rate": 0.00039506047017257777, "loss": 3.4753, "step": 71255 }, { "epoch": 4.841690447071613, "grad_norm": 1.7428185939788818, "learning_rate": 0.0003950180051637451, "loss": 3.3596, "step": 71260 }, { "epoch": 4.8420301671422745, "grad_norm": 1.8958107233047485, "learning_rate": 0.0003949755401549124, "loss": 3.5514, "step": 71265 }, { "epoch": 4.842369887212937, "grad_norm": 1.5903362035751343, "learning_rate": 0.0003949330751460796, "loss": 3.2049, "step": 71270 }, { "epoch": 4.842709607283599, "grad_norm": 1.7259116172790527, "learning_rate": 0.00039489061013724694, "loss": 3.4267, "step": 71275 }, { "epoch": 4.84304932735426, "grad_norm": 1.6279112100601196, "learning_rate": 0.0003948481451284142, "loss": 3.3464, "step": 71280 }, { "epoch": 4.843389047424922, "grad_norm": 1.8152310848236084, "learning_rate": 0.00039480568011958145, "loss": 3.3416, "step": 71285 }, { "epoch": 4.843728767495584, "grad_norm": 2.045149803161621, "learning_rate": 0.00039476321511074873, "loss": 3.3252, "step": 71290 }, { "epoch": 4.844068487566245, "grad_norm": 1.6099982261657715, "learning_rate": 0.00039472075010191606, "loss": 3.253, "step": 71295 }, { "epoch": 4.844408207636907, "grad_norm": 1.509825587272644, "learning_rate": 0.0003946782850930833, "loss": 3.1441, "step": 71300 }, { "epoch": 4.844747927707569, "grad_norm": 1.4300129413604736, "learning_rate": 0.00039463582008425057, "loss": 3.6243, "step": 71305 }, { "epoch": 4.8450876477782305, "grad_norm": 1.857804536819458, "learning_rate": 0.0003945933550754179, "loss": 3.3011, "step": 71310 }, { "epoch": 4.845427367848893, "grad_norm": 1.9784531593322754, "learning_rate": 0.00039455089006658513, "loss": 3.4912, "step": 71315 }, { "epoch": 4.845767087919555, "grad_norm": 1.723212718963623, "learning_rate": 0.0003945084250577524, "loss": 3.5775, "step": 71320 }, { "epoch": 4.846106807990216, "grad_norm": 1.7465101480484009, "learning_rate": 0.0003944659600489197, "loss": 3.6004, "step": 71325 }, { "epoch": 4.846446528060878, "grad_norm": 2.5548417568206787, "learning_rate": 0.00039442349504008697, "loss": 3.5431, "step": 71330 }, { "epoch": 4.84678624813154, "grad_norm": 2.1500401496887207, "learning_rate": 0.00039438103003125425, "loss": 3.5055, "step": 71335 }, { "epoch": 4.847125968202201, "grad_norm": 1.8284662961959839, "learning_rate": 0.00039433856502242153, "loss": 3.2557, "step": 71340 }, { "epoch": 4.847465688272863, "grad_norm": 1.5457260608673096, "learning_rate": 0.0003942961000135888, "loss": 3.5381, "step": 71345 }, { "epoch": 4.847805408343525, "grad_norm": 1.6446726322174072, "learning_rate": 0.0003942536350047561, "loss": 3.4013, "step": 71350 }, { "epoch": 4.8481451284141865, "grad_norm": 2.1077990531921387, "learning_rate": 0.00039421116999592337, "loss": 3.6596, "step": 71355 }, { "epoch": 4.848484848484849, "grad_norm": 1.5848459005355835, "learning_rate": 0.0003941687049870906, "loss": 3.2295, "step": 71360 }, { "epoch": 4.848824568555511, "grad_norm": 1.6007095575332642, "learning_rate": 0.00039412623997825793, "loss": 3.4734, "step": 71365 }, { "epoch": 4.849164288626172, "grad_norm": 1.5986661911010742, "learning_rate": 0.0003940837749694252, "loss": 3.2819, "step": 71370 }, { "epoch": 4.849504008696834, "grad_norm": 1.9109580516815186, "learning_rate": 0.00039404130996059243, "loss": 3.3604, "step": 71375 }, { "epoch": 4.849843728767496, "grad_norm": 1.679940938949585, "learning_rate": 0.00039399884495175977, "loss": 3.4198, "step": 71380 }, { "epoch": 4.850183448838157, "grad_norm": 2.2961223125457764, "learning_rate": 0.00039395637994292705, "loss": 3.2165, "step": 71385 }, { "epoch": 4.850523168908819, "grad_norm": 1.6577867269515991, "learning_rate": 0.00039391391493409433, "loss": 3.3479, "step": 71390 }, { "epoch": 4.850862888979481, "grad_norm": 1.8443717956542969, "learning_rate": 0.0003938714499252616, "loss": 3.1253, "step": 71395 }, { "epoch": 4.8512026090501426, "grad_norm": 1.5777121782302856, "learning_rate": 0.0003938289849164289, "loss": 3.2554, "step": 71400 }, { "epoch": 4.851542329120805, "grad_norm": 2.3198623657226562, "learning_rate": 0.00039378651990759617, "loss": 3.4336, "step": 71405 }, { "epoch": 4.851882049191467, "grad_norm": 2.1383960247039795, "learning_rate": 0.0003937440548987634, "loss": 3.2747, "step": 71410 }, { "epoch": 4.852221769262128, "grad_norm": 1.8161448240280151, "learning_rate": 0.00039370158988993073, "loss": 3.4705, "step": 71415 }, { "epoch": 4.85256148933279, "grad_norm": 1.6934794187545776, "learning_rate": 0.000393659124881098, "loss": 3.2816, "step": 71420 }, { "epoch": 4.852901209403452, "grad_norm": 1.5984702110290527, "learning_rate": 0.00039361665987226524, "loss": 3.2114, "step": 71425 }, { "epoch": 4.853240929474113, "grad_norm": 1.7321416139602661, "learning_rate": 0.00039357419486343257, "loss": 3.5548, "step": 71430 }, { "epoch": 4.853580649544775, "grad_norm": 1.9123120307922363, "learning_rate": 0.00039353172985459985, "loss": 3.5407, "step": 71435 }, { "epoch": 4.8539203696154365, "grad_norm": 1.4606869220733643, "learning_rate": 0.0003934892648457671, "loss": 3.4946, "step": 71440 }, { "epoch": 4.854260089686099, "grad_norm": 2.0357208251953125, "learning_rate": 0.00039344679983693436, "loss": 3.1849, "step": 71445 }, { "epoch": 4.854599809756761, "grad_norm": 1.6973965167999268, "learning_rate": 0.0003934043348281017, "loss": 3.4147, "step": 71450 }, { "epoch": 4.854939529827422, "grad_norm": 1.6491118669509888, "learning_rate": 0.0003933618698192689, "loss": 3.243, "step": 71455 }, { "epoch": 4.855279249898084, "grad_norm": 2.2486937046051025, "learning_rate": 0.0003933194048104362, "loss": 3.2673, "step": 71460 }, { "epoch": 4.855618969968746, "grad_norm": 2.0362536907196045, "learning_rate": 0.00039327693980160353, "loss": 3.2918, "step": 71465 }, { "epoch": 4.855958690039407, "grad_norm": 2.2570810317993164, "learning_rate": 0.00039323447479277076, "loss": 3.4277, "step": 71470 }, { "epoch": 4.856298410110069, "grad_norm": 1.9841012954711914, "learning_rate": 0.00039319200978393804, "loss": 3.5296, "step": 71475 }, { "epoch": 4.856638130180731, "grad_norm": 1.494543433189392, "learning_rate": 0.0003931495447751053, "loss": 3.1297, "step": 71480 }, { "epoch": 4.8569778502513925, "grad_norm": 1.7512651681900024, "learning_rate": 0.0003931070797662726, "loss": 3.6538, "step": 71485 }, { "epoch": 4.857317570322055, "grad_norm": 1.8955179452896118, "learning_rate": 0.0003930646147574399, "loss": 3.4764, "step": 71490 }, { "epoch": 4.857657290392717, "grad_norm": 1.7910124063491821, "learning_rate": 0.00039302214974860716, "loss": 3.1608, "step": 71495 }, { "epoch": 4.857997010463378, "grad_norm": 1.7791197299957275, "learning_rate": 0.00039297968473977444, "loss": 3.4484, "step": 71500 }, { "epoch": 4.85833673053404, "grad_norm": 2.2971768379211426, "learning_rate": 0.0003929372197309417, "loss": 3.1653, "step": 71505 }, { "epoch": 4.858676450604702, "grad_norm": 1.8601658344268799, "learning_rate": 0.000392894754722109, "loss": 3.1153, "step": 71510 }, { "epoch": 4.859016170675363, "grad_norm": 2.154329299926758, "learning_rate": 0.0003928522897132762, "loss": 3.2334, "step": 71515 }, { "epoch": 4.859355890746025, "grad_norm": 1.4907121658325195, "learning_rate": 0.00039280982470444356, "loss": 3.2562, "step": 71520 }, { "epoch": 4.859695610816687, "grad_norm": 1.8728936910629272, "learning_rate": 0.00039276735969561084, "loss": 3.4252, "step": 71525 }, { "epoch": 4.8600353308873485, "grad_norm": 1.9123468399047852, "learning_rate": 0.00039272489468677806, "loss": 3.2017, "step": 71530 }, { "epoch": 4.860375050958011, "grad_norm": 2.1435530185699463, "learning_rate": 0.0003926824296779454, "loss": 3.3626, "step": 71535 }, { "epoch": 4.860714771028673, "grad_norm": 2.160525321960449, "learning_rate": 0.0003926399646691127, "loss": 3.4887, "step": 71540 }, { "epoch": 4.861054491099334, "grad_norm": 1.6862164735794067, "learning_rate": 0.0003925974996602799, "loss": 3.6546, "step": 71545 }, { "epoch": 4.861394211169996, "grad_norm": 1.7551658153533936, "learning_rate": 0.0003925550346514472, "loss": 3.4751, "step": 71550 }, { "epoch": 4.861733931240658, "grad_norm": 1.730958104133606, "learning_rate": 0.0003925125696426145, "loss": 3.547, "step": 71555 }, { "epoch": 4.862073651311319, "grad_norm": 1.9398822784423828, "learning_rate": 0.0003924701046337818, "loss": 3.1025, "step": 71560 }, { "epoch": 4.862413371381981, "grad_norm": 2.3996875286102295, "learning_rate": 0.000392427639624949, "loss": 3.3893, "step": 71565 }, { "epoch": 4.862753091452643, "grad_norm": 2.1070194244384766, "learning_rate": 0.00039238517461611636, "loss": 3.3752, "step": 71570 }, { "epoch": 4.8630928115233045, "grad_norm": 1.7384593486785889, "learning_rate": 0.00039234270960728364, "loss": 3.4431, "step": 71575 }, { "epoch": 4.863432531593967, "grad_norm": 1.8135260343551636, "learning_rate": 0.00039230024459845086, "loss": 3.3846, "step": 71580 }, { "epoch": 4.863772251664629, "grad_norm": 1.7153735160827637, "learning_rate": 0.00039225777958961814, "loss": 3.5356, "step": 71585 }, { "epoch": 4.86411197173529, "grad_norm": 1.7122893333435059, "learning_rate": 0.0003922153145807855, "loss": 3.087, "step": 71590 }, { "epoch": 4.864451691805952, "grad_norm": 1.5278760194778442, "learning_rate": 0.0003921728495719527, "loss": 3.4531, "step": 71595 }, { "epoch": 4.864791411876614, "grad_norm": 1.4620134830474854, "learning_rate": 0.00039213038456312, "loss": 3.2859, "step": 71600 }, { "epoch": 4.865131131947275, "grad_norm": 1.6222683191299438, "learning_rate": 0.0003920879195542873, "loss": 3.5383, "step": 71605 }, { "epoch": 4.865470852017937, "grad_norm": 2.089545726776123, "learning_rate": 0.00039204545454545454, "loss": 3.2126, "step": 71610 }, { "epoch": 4.865810572088599, "grad_norm": 1.9310508966445923, "learning_rate": 0.0003920029895366218, "loss": 3.2642, "step": 71615 }, { "epoch": 4.8661502921592605, "grad_norm": 2.160428524017334, "learning_rate": 0.0003919605245277891, "loss": 3.3802, "step": 71620 }, { "epoch": 4.866490012229923, "grad_norm": 1.6864235401153564, "learning_rate": 0.0003919180595189564, "loss": 3.5694, "step": 71625 }, { "epoch": 4.866829732300585, "grad_norm": 1.9121804237365723, "learning_rate": 0.00039187559451012366, "loss": 3.3485, "step": 71630 }, { "epoch": 4.867169452371246, "grad_norm": 1.6215277910232544, "learning_rate": 0.00039183312950129094, "loss": 3.3783, "step": 71635 }, { "epoch": 4.867509172441908, "grad_norm": 1.5413979291915894, "learning_rate": 0.0003917906644924582, "loss": 3.3728, "step": 71640 }, { "epoch": 4.86784889251257, "grad_norm": 2.1113011837005615, "learning_rate": 0.0003917481994836255, "loss": 3.5628, "step": 71645 }, { "epoch": 4.868188612583231, "grad_norm": 1.4246989488601685, "learning_rate": 0.0003917057344747928, "loss": 3.32, "step": 71650 }, { "epoch": 4.868528332653893, "grad_norm": 1.822664737701416, "learning_rate": 0.00039166326946596, "loss": 3.4743, "step": 71655 }, { "epoch": 4.8688680527245545, "grad_norm": 2.2174322605133057, "learning_rate": 0.00039162080445712734, "loss": 3.1584, "step": 71660 }, { "epoch": 4.8692077727952165, "grad_norm": 1.6300082206726074, "learning_rate": 0.0003915783394482946, "loss": 3.5651, "step": 71665 }, { "epoch": 4.869547492865879, "grad_norm": 1.5129728317260742, "learning_rate": 0.00039153587443946185, "loss": 3.721, "step": 71670 }, { "epoch": 4.86988721293654, "grad_norm": 1.5041232109069824, "learning_rate": 0.0003914934094306292, "loss": 3.4985, "step": 71675 }, { "epoch": 4.870226933007202, "grad_norm": 1.625335693359375, "learning_rate": 0.00039145094442179646, "loss": 3.4202, "step": 71680 }, { "epoch": 4.870566653077864, "grad_norm": 2.36565899848938, "learning_rate": 0.0003914084794129637, "loss": 3.1185, "step": 71685 }, { "epoch": 4.870906373148525, "grad_norm": 1.8480581045150757, "learning_rate": 0.000391366014404131, "loss": 3.3298, "step": 71690 }, { "epoch": 4.871246093219187, "grad_norm": 1.9494006633758545, "learning_rate": 0.0003913235493952983, "loss": 3.3106, "step": 71695 }, { "epoch": 4.871585813289849, "grad_norm": 1.8951267004013062, "learning_rate": 0.00039128108438646553, "loss": 3.3759, "step": 71700 }, { "epoch": 4.8719255333605105, "grad_norm": 1.5682475566864014, "learning_rate": 0.0003912386193776328, "loss": 3.2043, "step": 71705 }, { "epoch": 4.872265253431173, "grad_norm": 2.21138072013855, "learning_rate": 0.00039119615436880015, "loss": 3.0321, "step": 71710 }, { "epoch": 4.872604973501835, "grad_norm": 1.9538112878799438, "learning_rate": 0.00039115368935996737, "loss": 3.1326, "step": 71715 }, { "epoch": 4.872944693572496, "grad_norm": 1.6242120265960693, "learning_rate": 0.00039111122435113465, "loss": 3.5797, "step": 71720 }, { "epoch": 4.873284413643158, "grad_norm": 2.0767924785614014, "learning_rate": 0.000391068759342302, "loss": 3.3855, "step": 71725 }, { "epoch": 4.87362413371382, "grad_norm": 1.6546565294265747, "learning_rate": 0.00039102629433346927, "loss": 3.4966, "step": 71730 }, { "epoch": 4.873963853784481, "grad_norm": 1.9293224811553955, "learning_rate": 0.0003909838293246365, "loss": 3.6124, "step": 71735 }, { "epoch": 4.874303573855143, "grad_norm": 1.5161093473434448, "learning_rate": 0.00039094136431580377, "loss": 3.3794, "step": 71740 }, { "epoch": 4.874643293925805, "grad_norm": 1.78041410446167, "learning_rate": 0.0003908988993069711, "loss": 3.5188, "step": 71745 }, { "epoch": 4.8749830139964665, "grad_norm": 1.7606016397476196, "learning_rate": 0.00039085643429813833, "loss": 3.349, "step": 71750 }, { "epoch": 4.875322734067129, "grad_norm": 2.0204668045043945, "learning_rate": 0.0003908139692893056, "loss": 3.2167, "step": 71755 }, { "epoch": 4.875662454137791, "grad_norm": 1.5700238943099976, "learning_rate": 0.00039077150428047295, "loss": 3.2747, "step": 71760 }, { "epoch": 4.876002174208452, "grad_norm": 1.3098210096359253, "learning_rate": 0.00039072903927164017, "loss": 3.5733, "step": 71765 }, { "epoch": 4.876341894279114, "grad_norm": 2.103365421295166, "learning_rate": 0.00039068657426280745, "loss": 3.2947, "step": 71770 }, { "epoch": 4.876681614349776, "grad_norm": 1.7623512744903564, "learning_rate": 0.00039064410925397473, "loss": 3.4421, "step": 71775 }, { "epoch": 4.877021334420437, "grad_norm": 1.420796513557434, "learning_rate": 0.000390601644245142, "loss": 3.5554, "step": 71780 }, { "epoch": 4.877361054491099, "grad_norm": 1.5091921091079712, "learning_rate": 0.0003905591792363093, "loss": 3.4638, "step": 71785 }, { "epoch": 4.877700774561761, "grad_norm": 1.9845436811447144, "learning_rate": 0.00039051671422747657, "loss": 3.3609, "step": 71790 }, { "epoch": 4.8780404946324225, "grad_norm": 1.7845203876495361, "learning_rate": 0.00039047424921864385, "loss": 3.1132, "step": 71795 }, { "epoch": 4.878380214703085, "grad_norm": 1.5504143238067627, "learning_rate": 0.00039043178420981113, "loss": 3.4309, "step": 71800 }, { "epoch": 4.878719934773747, "grad_norm": 1.9707919359207153, "learning_rate": 0.0003903893192009784, "loss": 3.3388, "step": 71805 }, { "epoch": 4.879059654844408, "grad_norm": 1.963502287864685, "learning_rate": 0.00039034685419214564, "loss": 3.5057, "step": 71810 }, { "epoch": 4.87939937491507, "grad_norm": 2.0947375297546387, "learning_rate": 0.00039030438918331297, "loss": 3.3715, "step": 71815 }, { "epoch": 4.879739094985732, "grad_norm": 2.24479603767395, "learning_rate": 0.00039026192417448025, "loss": 3.4716, "step": 71820 }, { "epoch": 4.880078815056393, "grad_norm": 1.3189482688903809, "learning_rate": 0.0003902194591656475, "loss": 3.4009, "step": 71825 }, { "epoch": 4.880418535127055, "grad_norm": 2.008924722671509, "learning_rate": 0.0003901769941568148, "loss": 3.5076, "step": 71830 }, { "epoch": 4.880758255197717, "grad_norm": 1.9039124250411987, "learning_rate": 0.0003901345291479821, "loss": 3.5676, "step": 71835 }, { "epoch": 4.8810979752683785, "grad_norm": 1.7410331964492798, "learning_rate": 0.0003900920641391493, "loss": 3.4329, "step": 71840 }, { "epoch": 4.881437695339041, "grad_norm": 1.9344024658203125, "learning_rate": 0.0003900495991303166, "loss": 3.4907, "step": 71845 }, { "epoch": 4.881777415409703, "grad_norm": 2.0923221111297607, "learning_rate": 0.00039000713412148393, "loss": 3.3632, "step": 71850 }, { "epoch": 4.882117135480364, "grad_norm": 1.6905696392059326, "learning_rate": 0.00038996466911265116, "loss": 3.2856, "step": 71855 }, { "epoch": 4.882456855551026, "grad_norm": 1.781692624092102, "learning_rate": 0.00038992220410381844, "loss": 3.4764, "step": 71860 }, { "epoch": 4.882796575621688, "grad_norm": 1.636707067489624, "learning_rate": 0.0003898797390949858, "loss": 3.7093, "step": 71865 }, { "epoch": 4.883136295692349, "grad_norm": 1.8124487400054932, "learning_rate": 0.000389837274086153, "loss": 3.5675, "step": 71870 }, { "epoch": 4.883476015763011, "grad_norm": 2.236144542694092, "learning_rate": 0.0003897948090773203, "loss": 3.3865, "step": 71875 }, { "epoch": 4.883815735833673, "grad_norm": 1.84345543384552, "learning_rate": 0.00038975234406848756, "loss": 3.5437, "step": 71880 }, { "epoch": 4.8841554559043345, "grad_norm": 2.174048900604248, "learning_rate": 0.00038970987905965484, "loss": 3.3718, "step": 71885 }, { "epoch": 4.884495175974997, "grad_norm": 1.76081120967865, "learning_rate": 0.0003896674140508221, "loss": 3.3225, "step": 71890 }, { "epoch": 4.884834896045659, "grad_norm": 2.1273906230926514, "learning_rate": 0.0003896249490419894, "loss": 3.4223, "step": 71895 }, { "epoch": 4.88517461611632, "grad_norm": 2.018049478530884, "learning_rate": 0.00038958248403315673, "loss": 3.2206, "step": 71900 }, { "epoch": 4.885514336186982, "grad_norm": 1.552228331565857, "learning_rate": 0.00038954001902432396, "loss": 3.3968, "step": 71905 }, { "epoch": 4.885854056257644, "grad_norm": 1.9925707578659058, "learning_rate": 0.00038949755401549124, "loss": 3.2854, "step": 71910 }, { "epoch": 4.886193776328305, "grad_norm": 1.5087828636169434, "learning_rate": 0.0003894550890066586, "loss": 3.322, "step": 71915 }, { "epoch": 4.886533496398967, "grad_norm": 1.5777597427368164, "learning_rate": 0.0003894126239978258, "loss": 3.4659, "step": 71920 }, { "epoch": 4.886873216469629, "grad_norm": 2.1434385776519775, "learning_rate": 0.0003893701589889931, "loss": 3.3199, "step": 71925 }, { "epoch": 4.8872129365402905, "grad_norm": 1.8940515518188477, "learning_rate": 0.00038932769398016036, "loss": 3.4354, "step": 71930 }, { "epoch": 4.887552656610953, "grad_norm": 1.8063440322875977, "learning_rate": 0.00038928522897132764, "loss": 3.3078, "step": 71935 }, { "epoch": 4.887892376681615, "grad_norm": 1.578704833984375, "learning_rate": 0.0003892427639624949, "loss": 3.6431, "step": 71940 }, { "epoch": 4.888232096752276, "grad_norm": 1.7657949924468994, "learning_rate": 0.0003892002989536622, "loss": 3.405, "step": 71945 }, { "epoch": 4.888571816822938, "grad_norm": 1.4856421947479248, "learning_rate": 0.0003891578339448295, "loss": 3.4045, "step": 71950 }, { "epoch": 4.8889115368936, "grad_norm": 1.817564606666565, "learning_rate": 0.00038911536893599676, "loss": 3.5289, "step": 71955 }, { "epoch": 4.889251256964261, "grad_norm": 1.8076424598693848, "learning_rate": 0.00038907290392716404, "loss": 3.1147, "step": 71960 }, { "epoch": 4.889590977034923, "grad_norm": 1.6813615560531616, "learning_rate": 0.00038903043891833127, "loss": 3.2759, "step": 71965 }, { "epoch": 4.889930697105585, "grad_norm": 1.7314112186431885, "learning_rate": 0.0003889879739094986, "loss": 3.1481, "step": 71970 }, { "epoch": 4.8902704171762466, "grad_norm": 1.6938385963439941, "learning_rate": 0.0003889455089006659, "loss": 3.6156, "step": 71975 }, { "epoch": 4.890610137246909, "grad_norm": 2.029823064804077, "learning_rate": 0.0003889030438918331, "loss": 3.3833, "step": 71980 }, { "epoch": 4.890949857317571, "grad_norm": 1.58371102809906, "learning_rate": 0.00038886057888300044, "loss": 3.6097, "step": 71985 }, { "epoch": 4.891289577388232, "grad_norm": 1.9443928003311157, "learning_rate": 0.0003888181138741677, "loss": 3.4851, "step": 71990 }, { "epoch": 4.891629297458894, "grad_norm": 1.9206123352050781, "learning_rate": 0.00038877564886533495, "loss": 3.5702, "step": 71995 }, { "epoch": 4.891969017529556, "grad_norm": 1.3868075609207153, "learning_rate": 0.00038873318385650223, "loss": 3.431, "step": 72000 }, { "epoch": 4.892308737600217, "grad_norm": 1.895655870437622, "learning_rate": 0.00038869071884766956, "loss": 3.4005, "step": 72005 }, { "epoch": 4.892648457670879, "grad_norm": 2.3197994232177734, "learning_rate": 0.0003886482538388368, "loss": 3.2229, "step": 72010 }, { "epoch": 4.892988177741541, "grad_norm": 2.0606589317321777, "learning_rate": 0.00038860578883000407, "loss": 3.4887, "step": 72015 }, { "epoch": 4.893327897812203, "grad_norm": 2.1462392807006836, "learning_rate": 0.0003885633238211714, "loss": 3.5309, "step": 72020 }, { "epoch": 4.893667617882865, "grad_norm": 1.6831456422805786, "learning_rate": 0.00038852085881233863, "loss": 3.2578, "step": 72025 }, { "epoch": 4.894007337953527, "grad_norm": 2.0158438682556152, "learning_rate": 0.0003884783938035059, "loss": 3.4918, "step": 72030 }, { "epoch": 4.894347058024188, "grad_norm": 1.4099231958389282, "learning_rate": 0.0003884359287946732, "loss": 3.0177, "step": 72035 }, { "epoch": 4.89468677809485, "grad_norm": 1.9255582094192505, "learning_rate": 0.00038839346378584047, "loss": 3.6137, "step": 72040 }, { "epoch": 4.895026498165512, "grad_norm": 2.3371737003326416, "learning_rate": 0.00038835099877700775, "loss": 3.5108, "step": 72045 }, { "epoch": 4.895366218236173, "grad_norm": 1.9040367603302002, "learning_rate": 0.00038830853376817503, "loss": 3.3112, "step": 72050 }, { "epoch": 4.895705938306835, "grad_norm": 1.8578389883041382, "learning_rate": 0.0003882660687593423, "loss": 3.4057, "step": 72055 }, { "epoch": 4.896045658377497, "grad_norm": 1.9837771654129028, "learning_rate": 0.0003882236037505096, "loss": 3.3363, "step": 72060 }, { "epoch": 4.896385378448159, "grad_norm": 1.9557881355285645, "learning_rate": 0.00038818113874167687, "loss": 3.4706, "step": 72065 }, { "epoch": 4.896725098518821, "grad_norm": 2.221620798110962, "learning_rate": 0.00038813867373284415, "loss": 3.5124, "step": 72070 }, { "epoch": 4.897064818589483, "grad_norm": 1.4689735174179077, "learning_rate": 0.00038809620872401143, "loss": 3.3667, "step": 72075 }, { "epoch": 4.897404538660144, "grad_norm": 2.231180191040039, "learning_rate": 0.0003880537437151787, "loss": 3.4818, "step": 72080 }, { "epoch": 4.897744258730806, "grad_norm": 1.4857863187789917, "learning_rate": 0.000388011278706346, "loss": 3.5566, "step": 72085 }, { "epoch": 4.898083978801468, "grad_norm": 2.1615452766418457, "learning_rate": 0.00038796881369751327, "loss": 3.5341, "step": 72090 }, { "epoch": 4.898423698872129, "grad_norm": 2.3294246196746826, "learning_rate": 0.00038792634868868055, "loss": 3.3266, "step": 72095 }, { "epoch": 4.898763418942791, "grad_norm": 1.7072179317474365, "learning_rate": 0.00038788388367984783, "loss": 3.3063, "step": 72100 }, { "epoch": 4.899103139013453, "grad_norm": 1.8940528631210327, "learning_rate": 0.00038784141867101505, "loss": 3.5764, "step": 72105 }, { "epoch": 4.899442859084115, "grad_norm": 2.0527162551879883, "learning_rate": 0.0003877989536621824, "loss": 3.4027, "step": 72110 }, { "epoch": 4.899782579154777, "grad_norm": 2.0475547313690186, "learning_rate": 0.00038775648865334967, "loss": 3.0937, "step": 72115 }, { "epoch": 4.900122299225439, "grad_norm": 1.7249189615249634, "learning_rate": 0.0003877140236445169, "loss": 3.3558, "step": 72120 }, { "epoch": 4.9004620192961, "grad_norm": 1.418219804763794, "learning_rate": 0.00038767155863568423, "loss": 3.258, "step": 72125 }, { "epoch": 4.900801739366762, "grad_norm": 1.7602959871292114, "learning_rate": 0.0003876290936268515, "loss": 3.5929, "step": 72130 }, { "epoch": 4.901141459437423, "grad_norm": 1.578770399093628, "learning_rate": 0.00038758662861801874, "loss": 3.5009, "step": 72135 }, { "epoch": 4.901481179508085, "grad_norm": 2.3358583450317383, "learning_rate": 0.000387544163609186, "loss": 3.3175, "step": 72140 }, { "epoch": 4.901820899578747, "grad_norm": 1.7719348669052124, "learning_rate": 0.00038750169860035335, "loss": 3.3959, "step": 72145 }, { "epoch": 4.9021606196494085, "grad_norm": 1.7496562004089355, "learning_rate": 0.0003874592335915206, "loss": 3.4525, "step": 72150 }, { "epoch": 4.902500339720071, "grad_norm": 1.780760407447815, "learning_rate": 0.00038741676858268786, "loss": 3.2571, "step": 72155 }, { "epoch": 4.902840059790733, "grad_norm": 1.652483582496643, "learning_rate": 0.0003873743035738552, "loss": 3.4683, "step": 72160 }, { "epoch": 4.903179779861394, "grad_norm": 1.7951232194900513, "learning_rate": 0.0003873318385650224, "loss": 3.2665, "step": 72165 }, { "epoch": 4.903519499932056, "grad_norm": 1.9329833984375, "learning_rate": 0.0003872893735561897, "loss": 3.3616, "step": 72170 }, { "epoch": 4.903859220002718, "grad_norm": 1.558402180671692, "learning_rate": 0.000387246908547357, "loss": 3.4045, "step": 72175 }, { "epoch": 4.904198940073379, "grad_norm": 1.3824518918991089, "learning_rate": 0.00038720444353852426, "loss": 3.3788, "step": 72180 }, { "epoch": 4.904538660144041, "grad_norm": 2.064798593521118, "learning_rate": 0.00038716197852969154, "loss": 3.4734, "step": 72185 }, { "epoch": 4.904878380214703, "grad_norm": 1.455141305923462, "learning_rate": 0.0003871195135208588, "loss": 3.6574, "step": 72190 }, { "epoch": 4.9052181002853645, "grad_norm": 2.116706132888794, "learning_rate": 0.0003870770485120261, "loss": 3.3903, "step": 72195 }, { "epoch": 4.905557820356027, "grad_norm": 1.9967516660690308, "learning_rate": 0.0003870345835031934, "loss": 3.3591, "step": 72200 }, { "epoch": 4.905897540426689, "grad_norm": 1.734663963317871, "learning_rate": 0.00038699211849436066, "loss": 3.3749, "step": 72205 }, { "epoch": 4.90623726049735, "grad_norm": 1.9867500066757202, "learning_rate": 0.0003869496534855279, "loss": 3.5255, "step": 72210 }, { "epoch": 4.906576980568012, "grad_norm": 1.514984369277954, "learning_rate": 0.0003869071884766952, "loss": 3.4234, "step": 72215 }, { "epoch": 4.906916700638674, "grad_norm": 2.150242805480957, "learning_rate": 0.0003868647234678625, "loss": 3.2701, "step": 72220 }, { "epoch": 4.907256420709335, "grad_norm": 1.7587038278579712, "learning_rate": 0.0003868222584590297, "loss": 3.4293, "step": 72225 }, { "epoch": 4.907596140779997, "grad_norm": 2.0232465267181396, "learning_rate": 0.00038677979345019706, "loss": 3.1648, "step": 72230 }, { "epoch": 4.907935860850659, "grad_norm": 1.7804361581802368, "learning_rate": 0.00038673732844136434, "loss": 3.1624, "step": 72235 }, { "epoch": 4.9082755809213205, "grad_norm": 1.8546221256256104, "learning_rate": 0.0003866948634325316, "loss": 3.467, "step": 72240 }, { "epoch": 4.908615300991983, "grad_norm": 1.8060879707336426, "learning_rate": 0.0003866523984236989, "loss": 3.4804, "step": 72245 }, { "epoch": 4.908955021062645, "grad_norm": 2.207315444946289, "learning_rate": 0.0003866099334148662, "loss": 3.2634, "step": 72250 }, { "epoch": 4.909294741133306, "grad_norm": 2.2895333766937256, "learning_rate": 0.00038656746840603346, "loss": 3.298, "step": 72255 }, { "epoch": 4.909634461203968, "grad_norm": 2.192243814468384, "learning_rate": 0.0003865250033972007, "loss": 3.5808, "step": 72260 }, { "epoch": 4.90997418127463, "grad_norm": 1.8163211345672607, "learning_rate": 0.000386482538388368, "loss": 3.4426, "step": 72265 }, { "epoch": 4.910313901345291, "grad_norm": 2.0433926582336426, "learning_rate": 0.0003864400733795353, "loss": 3.2229, "step": 72270 }, { "epoch": 4.910653621415953, "grad_norm": 2.823953151702881, "learning_rate": 0.0003863976083707025, "loss": 3.3885, "step": 72275 }, { "epoch": 4.910993341486615, "grad_norm": 1.7114007472991943, "learning_rate": 0.00038635514336186986, "loss": 3.2185, "step": 72280 }, { "epoch": 4.911333061557277, "grad_norm": 2.0819287300109863, "learning_rate": 0.00038631267835303714, "loss": 3.3291, "step": 72285 }, { "epoch": 4.911672781627939, "grad_norm": 1.9137327671051025, "learning_rate": 0.00038627021334420436, "loss": 3.0319, "step": 72290 }, { "epoch": 4.912012501698601, "grad_norm": 1.7253834009170532, "learning_rate": 0.00038622774833537164, "loss": 3.2277, "step": 72295 }, { "epoch": 4.912352221769262, "grad_norm": 1.4457392692565918, "learning_rate": 0.000386185283326539, "loss": 3.2157, "step": 72300 }, { "epoch": 4.912691941839924, "grad_norm": 1.544785737991333, "learning_rate": 0.0003861428183177062, "loss": 3.4559, "step": 72305 }, { "epoch": 4.913031661910586, "grad_norm": 1.9952003955841064, "learning_rate": 0.0003861003533088735, "loss": 3.3574, "step": 72310 }, { "epoch": 4.913371381981247, "grad_norm": 2.330022096633911, "learning_rate": 0.0003860578883000408, "loss": 3.2741, "step": 72315 }, { "epoch": 4.913711102051909, "grad_norm": 1.6276423931121826, "learning_rate": 0.00038601542329120804, "loss": 3.4299, "step": 72320 }, { "epoch": 4.914050822122571, "grad_norm": 1.9363372325897217, "learning_rate": 0.0003859729582823753, "loss": 3.3275, "step": 72325 }, { "epoch": 4.914390542193233, "grad_norm": 1.8273922204971313, "learning_rate": 0.0003859304932735426, "loss": 3.2667, "step": 72330 }, { "epoch": 4.914730262263895, "grad_norm": 2.04535174369812, "learning_rate": 0.0003858880282647099, "loss": 3.3445, "step": 72335 }, { "epoch": 4.915069982334556, "grad_norm": 1.6247973442077637, "learning_rate": 0.00038584556325587716, "loss": 3.5122, "step": 72340 }, { "epoch": 4.915409702405218, "grad_norm": 1.997570276260376, "learning_rate": 0.00038580309824704444, "loss": 3.3279, "step": 72345 }, { "epoch": 4.91574942247588, "grad_norm": 2.3641316890716553, "learning_rate": 0.0003857606332382117, "loss": 3.2832, "step": 72350 }, { "epoch": 4.916089142546541, "grad_norm": 1.3764311075210571, "learning_rate": 0.000385718168229379, "loss": 3.3855, "step": 72355 }, { "epoch": 4.916428862617203, "grad_norm": 1.6675548553466797, "learning_rate": 0.0003856757032205463, "loss": 3.3369, "step": 72360 }, { "epoch": 4.916768582687865, "grad_norm": 1.8840479850769043, "learning_rate": 0.0003856332382117135, "loss": 3.4099, "step": 72365 }, { "epoch": 4.9171083027585265, "grad_norm": 1.7894585132598877, "learning_rate": 0.00038559077320288084, "loss": 3.2745, "step": 72370 }, { "epoch": 4.917448022829189, "grad_norm": 1.6465108394622803, "learning_rate": 0.0003855483081940481, "loss": 3.5175, "step": 72375 }, { "epoch": 4.917787742899851, "grad_norm": 2.023118495941162, "learning_rate": 0.00038550584318521535, "loss": 3.2999, "step": 72380 }, { "epoch": 4.918127462970512, "grad_norm": 2.87481689453125, "learning_rate": 0.0003854633781763827, "loss": 3.2542, "step": 72385 }, { "epoch": 4.918467183041174, "grad_norm": 1.878147006034851, "learning_rate": 0.00038542091316754996, "loss": 3.3959, "step": 72390 }, { "epoch": 4.918806903111836, "grad_norm": 2.4392337799072266, "learning_rate": 0.0003853784481587172, "loss": 3.4272, "step": 72395 }, { "epoch": 4.919146623182497, "grad_norm": 1.43849778175354, "learning_rate": 0.00038533598314988447, "loss": 3.4977, "step": 72400 }, { "epoch": 4.919486343253159, "grad_norm": 1.6938731670379639, "learning_rate": 0.0003852935181410518, "loss": 3.2819, "step": 72405 }, { "epoch": 4.919826063323821, "grad_norm": 2.1635384559631348, "learning_rate": 0.0003852510531322191, "loss": 3.4926, "step": 72410 }, { "epoch": 4.9201657833944825, "grad_norm": 2.216423273086548, "learning_rate": 0.0003852085881233863, "loss": 3.3506, "step": 72415 }, { "epoch": 4.920505503465145, "grad_norm": 1.5343101024627686, "learning_rate": 0.00038516612311455365, "loss": 3.2305, "step": 72420 }, { "epoch": 4.920845223535807, "grad_norm": 2.1405248641967773, "learning_rate": 0.0003851236581057209, "loss": 3.187, "step": 72425 }, { "epoch": 4.921184943606468, "grad_norm": 2.05816650390625, "learning_rate": 0.00038508119309688815, "loss": 3.2418, "step": 72430 }, { "epoch": 4.92152466367713, "grad_norm": 1.6557295322418213, "learning_rate": 0.00038503872808805543, "loss": 3.1791, "step": 72435 }, { "epoch": 4.921864383747792, "grad_norm": 1.6657475233078003, "learning_rate": 0.00038499626307922277, "loss": 3.3233, "step": 72440 }, { "epoch": 4.922204103818453, "grad_norm": 2.020869016647339, "learning_rate": 0.00038495379807039, "loss": 3.497, "step": 72445 }, { "epoch": 4.922543823889115, "grad_norm": 2.017573356628418, "learning_rate": 0.00038491133306155727, "loss": 3.181, "step": 72450 }, { "epoch": 4.922883543959777, "grad_norm": 1.9021371603012085, "learning_rate": 0.0003848688680527246, "loss": 3.4263, "step": 72455 }, { "epoch": 4.9232232640304385, "grad_norm": 1.4188510179519653, "learning_rate": 0.00038482640304389183, "loss": 3.2977, "step": 72460 }, { "epoch": 4.923562984101101, "grad_norm": 1.7646139860153198, "learning_rate": 0.0003847839380350591, "loss": 3.3966, "step": 72465 }, { "epoch": 4.923902704171763, "grad_norm": 1.474118947982788, "learning_rate": 0.0003847414730262264, "loss": 3.5244, "step": 72470 }, { "epoch": 4.924242424242424, "grad_norm": 1.9271328449249268, "learning_rate": 0.00038469900801739367, "loss": 3.4726, "step": 72475 }, { "epoch": 4.924582144313086, "grad_norm": 1.5760681629180908, "learning_rate": 0.00038465654300856095, "loss": 3.2212, "step": 72480 }, { "epoch": 4.924921864383748, "grad_norm": 2.0699100494384766, "learning_rate": 0.00038461407799972823, "loss": 3.1975, "step": 72485 }, { "epoch": 4.925261584454409, "grad_norm": 1.8716931343078613, "learning_rate": 0.0003845716129908955, "loss": 3.2389, "step": 72490 }, { "epoch": 4.925601304525071, "grad_norm": 2.4955902099609375, "learning_rate": 0.0003845291479820628, "loss": 3.2268, "step": 72495 }, { "epoch": 4.925941024595733, "grad_norm": 1.7779520750045776, "learning_rate": 0.00038448668297323007, "loss": 3.3428, "step": 72500 }, { "epoch": 4.9262807446663945, "grad_norm": 2.446364641189575, "learning_rate": 0.0003844442179643973, "loss": 3.2236, "step": 72505 }, { "epoch": 4.926620464737057, "grad_norm": 2.2631943225860596, "learning_rate": 0.00038440175295556463, "loss": 3.6875, "step": 72510 }, { "epoch": 4.926960184807719, "grad_norm": 1.9850956201553345, "learning_rate": 0.0003843592879467319, "loss": 3.7892, "step": 72515 }, { "epoch": 4.92729990487838, "grad_norm": 1.7296538352966309, "learning_rate": 0.00038431682293789914, "loss": 3.4034, "step": 72520 }, { "epoch": 4.927639624949042, "grad_norm": 1.9688680171966553, "learning_rate": 0.00038427435792906647, "loss": 3.3878, "step": 72525 }, { "epoch": 4.927979345019704, "grad_norm": 1.6566617488861084, "learning_rate": 0.00038423189292023375, "loss": 3.1785, "step": 72530 }, { "epoch": 4.928319065090365, "grad_norm": 1.3532354831695557, "learning_rate": 0.000384189427911401, "loss": 3.3079, "step": 72535 }, { "epoch": 4.928658785161027, "grad_norm": 1.5362879037857056, "learning_rate": 0.0003841469629025683, "loss": 3.5346, "step": 72540 }, { "epoch": 4.928998505231689, "grad_norm": 1.647782802581787, "learning_rate": 0.0003841044978937356, "loss": 3.2074, "step": 72545 }, { "epoch": 4.9293382253023506, "grad_norm": 1.9092339277267456, "learning_rate": 0.0003840620328849028, "loss": 3.3604, "step": 72550 }, { "epoch": 4.929677945373013, "grad_norm": 1.3131674528121948, "learning_rate": 0.0003840195678760701, "loss": 3.3464, "step": 72555 }, { "epoch": 4.930017665443675, "grad_norm": 1.626425862312317, "learning_rate": 0.00038397710286723743, "loss": 3.3519, "step": 72560 }, { "epoch": 4.930357385514336, "grad_norm": 2.0146450996398926, "learning_rate": 0.00038393463785840466, "loss": 3.3944, "step": 72565 }, { "epoch": 4.930697105584998, "grad_norm": 1.8836498260498047, "learning_rate": 0.00038389217284957194, "loss": 3.4508, "step": 72570 }, { "epoch": 4.93103682565566, "grad_norm": 2.260982036590576, "learning_rate": 0.0003838497078407393, "loss": 3.5288, "step": 72575 }, { "epoch": 4.931376545726321, "grad_norm": 1.8738253116607666, "learning_rate": 0.00038380724283190655, "loss": 3.4761, "step": 72580 }, { "epoch": 4.931716265796983, "grad_norm": 1.7400401830673218, "learning_rate": 0.0003837647778230738, "loss": 3.3419, "step": 72585 }, { "epoch": 4.932055985867645, "grad_norm": 2.065992832183838, "learning_rate": 0.00038372231281424106, "loss": 3.3884, "step": 72590 }, { "epoch": 4.932395705938307, "grad_norm": 1.6516510248184204, "learning_rate": 0.0003836798478054084, "loss": 3.4164, "step": 72595 }, { "epoch": 4.932735426008969, "grad_norm": 2.0719118118286133, "learning_rate": 0.0003836373827965756, "loss": 3.3579, "step": 72600 }, { "epoch": 4.933075146079631, "grad_norm": 1.564707636833191, "learning_rate": 0.0003835949177877429, "loss": 3.6837, "step": 72605 }, { "epoch": 4.933414866150292, "grad_norm": 1.7694005966186523, "learning_rate": 0.00038355245277891023, "loss": 3.5678, "step": 72610 }, { "epoch": 4.933754586220954, "grad_norm": 1.7960090637207031, "learning_rate": 0.00038350998777007746, "loss": 3.3442, "step": 72615 }, { "epoch": 4.934094306291616, "grad_norm": 1.5812692642211914, "learning_rate": 0.00038346752276124474, "loss": 3.6375, "step": 72620 }, { "epoch": 4.934434026362277, "grad_norm": 1.4376654624938965, "learning_rate": 0.000383425057752412, "loss": 3.0239, "step": 72625 }, { "epoch": 4.934773746432939, "grad_norm": 1.6595464944839478, "learning_rate": 0.0003833825927435793, "loss": 3.5061, "step": 72630 }, { "epoch": 4.935113466503601, "grad_norm": 1.385933756828308, "learning_rate": 0.0003833401277347466, "loss": 2.9956, "step": 72635 }, { "epoch": 4.935453186574263, "grad_norm": 1.659674048423767, "learning_rate": 0.00038329766272591386, "loss": 3.3677, "step": 72640 }, { "epoch": 4.935792906644925, "grad_norm": 1.7960880994796753, "learning_rate": 0.00038325519771708114, "loss": 3.4277, "step": 72645 }, { "epoch": 4.936132626715587, "grad_norm": 1.9584380388259888, "learning_rate": 0.0003832127327082484, "loss": 3.4358, "step": 72650 }, { "epoch": 4.936472346786248, "grad_norm": 1.6119130849838257, "learning_rate": 0.0003831702676994157, "loss": 3.135, "step": 72655 }, { "epoch": 4.93681206685691, "grad_norm": 1.7712265253067017, "learning_rate": 0.0003831278026905829, "loss": 3.5909, "step": 72660 }, { "epoch": 4.937151786927572, "grad_norm": 2.01755428314209, "learning_rate": 0.00038308533768175026, "loss": 3.3291, "step": 72665 }, { "epoch": 4.937491506998233, "grad_norm": 1.3865610361099243, "learning_rate": 0.00038304287267291754, "loss": 3.3229, "step": 72670 }, { "epoch": 4.937831227068895, "grad_norm": 1.6142551898956299, "learning_rate": 0.00038300040766408477, "loss": 3.2175, "step": 72675 }, { "epoch": 4.938170947139557, "grad_norm": 1.4148895740509033, "learning_rate": 0.0003829579426552521, "loss": 3.2949, "step": 72680 }, { "epoch": 4.938510667210219, "grad_norm": 1.5985878705978394, "learning_rate": 0.0003829154776464194, "loss": 3.1368, "step": 72685 }, { "epoch": 4.938850387280881, "grad_norm": 1.7083669900894165, "learning_rate": 0.0003828730126375866, "loss": 3.4645, "step": 72690 }, { "epoch": 4.939190107351543, "grad_norm": 2.081256628036499, "learning_rate": 0.0003828305476287539, "loss": 3.2009, "step": 72695 }, { "epoch": 4.939529827422204, "grad_norm": 1.989554524421692, "learning_rate": 0.0003827880826199212, "loss": 3.4731, "step": 72700 }, { "epoch": 4.939869547492866, "grad_norm": 1.6124186515808105, "learning_rate": 0.00038274561761108845, "loss": 3.3693, "step": 72705 }, { "epoch": 4.940209267563528, "grad_norm": 1.6006786823272705, "learning_rate": 0.0003827031526022557, "loss": 3.1948, "step": 72710 }, { "epoch": 4.940548987634189, "grad_norm": 1.4235960245132446, "learning_rate": 0.00038266068759342306, "loss": 3.3109, "step": 72715 }, { "epoch": 4.940888707704851, "grad_norm": 1.7873414754867554, "learning_rate": 0.0003826182225845903, "loss": 3.4536, "step": 72720 }, { "epoch": 4.941228427775513, "grad_norm": 1.3856672048568726, "learning_rate": 0.00038257575757575757, "loss": 3.3593, "step": 72725 }, { "epoch": 4.941568147846175, "grad_norm": 1.9792695045471191, "learning_rate": 0.00038253329256692485, "loss": 3.1785, "step": 72730 }, { "epoch": 4.941907867916837, "grad_norm": 1.9314366579055786, "learning_rate": 0.00038249082755809213, "loss": 3.3078, "step": 72735 }, { "epoch": 4.942247587987499, "grad_norm": 1.954538345336914, "learning_rate": 0.0003824483625492594, "loss": 3.4567, "step": 72740 }, { "epoch": 4.94258730805816, "grad_norm": 1.6573176383972168, "learning_rate": 0.0003824058975404267, "loss": 3.2912, "step": 72745 }, { "epoch": 4.942927028128822, "grad_norm": 1.4882183074951172, "learning_rate": 0.000382363432531594, "loss": 3.3264, "step": 72750 }, { "epoch": 4.943266748199484, "grad_norm": 2.087289571762085, "learning_rate": 0.00038232096752276125, "loss": 3.5197, "step": 72755 }, { "epoch": 4.943606468270145, "grad_norm": 1.8190124034881592, "learning_rate": 0.00038227850251392853, "loss": 3.3792, "step": 72760 }, { "epoch": 4.943946188340807, "grad_norm": 1.619680643081665, "learning_rate": 0.0003822360375050958, "loss": 3.4989, "step": 72765 }, { "epoch": 4.944285908411469, "grad_norm": 1.9681711196899414, "learning_rate": 0.0003821935724962631, "loss": 3.3797, "step": 72770 }, { "epoch": 4.944625628482131, "grad_norm": 2.037263870239258, "learning_rate": 0.00038215110748743037, "loss": 3.4944, "step": 72775 }, { "epoch": 4.944965348552793, "grad_norm": 1.9669312238693237, "learning_rate": 0.00038210864247859765, "loss": 3.6039, "step": 72780 }, { "epoch": 4.945305068623455, "grad_norm": 1.9662717580795288, "learning_rate": 0.00038206617746976493, "loss": 3.2088, "step": 72785 }, { "epoch": 4.945644788694116, "grad_norm": 2.2664036750793457, "learning_rate": 0.0003820237124609322, "loss": 3.526, "step": 72790 }, { "epoch": 4.945984508764778, "grad_norm": 1.4167437553405762, "learning_rate": 0.0003819812474520995, "loss": 3.3469, "step": 72795 }, { "epoch": 4.94632422883544, "grad_norm": 1.7878835201263428, "learning_rate": 0.0003819387824432667, "loss": 3.6791, "step": 72800 }, { "epoch": 4.946663948906101, "grad_norm": 1.546369194984436, "learning_rate": 0.00038189631743443405, "loss": 3.1552, "step": 72805 }, { "epoch": 4.947003668976763, "grad_norm": 2.0197789669036865, "learning_rate": 0.00038185385242560133, "loss": 3.2403, "step": 72810 }, { "epoch": 4.9473433890474245, "grad_norm": 1.637296199798584, "learning_rate": 0.00038181138741676855, "loss": 3.4981, "step": 72815 }, { "epoch": 4.947683109118087, "grad_norm": 1.5705792903900146, "learning_rate": 0.0003817689224079359, "loss": 3.0362, "step": 72820 }, { "epoch": 4.948022829188749, "grad_norm": 2.0323925018310547, "learning_rate": 0.00038172645739910317, "loss": 3.2762, "step": 72825 }, { "epoch": 4.94836254925941, "grad_norm": 2.019887685775757, "learning_rate": 0.0003816839923902704, "loss": 3.2829, "step": 72830 }, { "epoch": 4.948702269330072, "grad_norm": 2.0798346996307373, "learning_rate": 0.00038164152738143773, "loss": 3.4507, "step": 72835 }, { "epoch": 4.949041989400734, "grad_norm": 1.5407196283340454, "learning_rate": 0.000381599062372605, "loss": 3.3775, "step": 72840 }, { "epoch": 4.949381709471395, "grad_norm": 1.8459396362304688, "learning_rate": 0.00038155659736377223, "loss": 3.4345, "step": 72845 }, { "epoch": 4.949721429542057, "grad_norm": 1.7655551433563232, "learning_rate": 0.0003815141323549395, "loss": 3.4857, "step": 72850 }, { "epoch": 4.950061149612719, "grad_norm": 1.7240321636199951, "learning_rate": 0.00038147166734610685, "loss": 3.065, "step": 72855 }, { "epoch": 4.9504008696833806, "grad_norm": 1.7707489728927612, "learning_rate": 0.0003814292023372741, "loss": 3.5084, "step": 72860 }, { "epoch": 4.950740589754043, "grad_norm": 1.8953170776367188, "learning_rate": 0.00038138673732844136, "loss": 3.5833, "step": 72865 }, { "epoch": 4.951080309824705, "grad_norm": 1.5749115943908691, "learning_rate": 0.0003813442723196087, "loss": 3.5988, "step": 72870 }, { "epoch": 4.951420029895366, "grad_norm": 1.7744985818862915, "learning_rate": 0.0003813018073107759, "loss": 3.4124, "step": 72875 }, { "epoch": 4.951759749966028, "grad_norm": 1.5799208879470825, "learning_rate": 0.0003812593423019432, "loss": 3.316, "step": 72880 }, { "epoch": 4.95209947003669, "grad_norm": 1.7975547313690186, "learning_rate": 0.0003812168772931105, "loss": 3.3833, "step": 72885 }, { "epoch": 4.952439190107351, "grad_norm": 1.6308326721191406, "learning_rate": 0.00038117441228427776, "loss": 3.2648, "step": 72890 }, { "epoch": 4.952778910178013, "grad_norm": 1.6663508415222168, "learning_rate": 0.00038113194727544504, "loss": 3.4719, "step": 72895 }, { "epoch": 4.953118630248675, "grad_norm": 1.9165623188018799, "learning_rate": 0.0003810894822666123, "loss": 3.0776, "step": 72900 }, { "epoch": 4.953458350319337, "grad_norm": 1.8198362588882446, "learning_rate": 0.0003810470172577796, "loss": 3.154, "step": 72905 }, { "epoch": 4.953798070389999, "grad_norm": 1.5771396160125732, "learning_rate": 0.0003810045522489469, "loss": 3.1614, "step": 72910 }, { "epoch": 4.954137790460661, "grad_norm": 1.5062596797943115, "learning_rate": 0.00038096208724011416, "loss": 3.6243, "step": 72915 }, { "epoch": 4.954477510531322, "grad_norm": 2.246084213256836, "learning_rate": 0.00038091962223128144, "loss": 3.463, "step": 72920 }, { "epoch": 4.954817230601984, "grad_norm": 1.936685562133789, "learning_rate": 0.0003808771572224487, "loss": 3.5208, "step": 72925 }, { "epoch": 4.955156950672646, "grad_norm": 1.5305299758911133, "learning_rate": 0.000380834692213616, "loss": 3.4712, "step": 72930 }, { "epoch": 4.955496670743307, "grad_norm": 2.1022026538848877, "learning_rate": 0.0003807922272047833, "loss": 3.3872, "step": 72935 }, { "epoch": 4.955836390813969, "grad_norm": NaN, "learning_rate": 0.00038075825519771705, "loss": 3.5211, "step": 72940 }, { "epoch": 4.956176110884631, "grad_norm": 2.223264455795288, "learning_rate": 0.0003807157901888844, "loss": 3.3048, "step": 72945 }, { "epoch": 4.956515830955293, "grad_norm": 2.3991763591766357, "learning_rate": 0.00038067332518005166, "loss": 3.4297, "step": 72950 }, { "epoch": 4.956855551025955, "grad_norm": 1.8227790594100952, "learning_rate": 0.00038063086017121894, "loss": 3.21, "step": 72955 }, { "epoch": 4.957195271096617, "grad_norm": 1.3868756294250488, "learning_rate": 0.00038058839516238617, "loss": 3.392, "step": 72960 }, { "epoch": 4.957534991167278, "grad_norm": 2.1779720783233643, "learning_rate": 0.0003805459301535535, "loss": 3.5739, "step": 72965 }, { "epoch": 4.95787471123794, "grad_norm": 1.905752420425415, "learning_rate": 0.0003805034651447208, "loss": 3.4913, "step": 72970 }, { "epoch": 4.958214431308602, "grad_norm": 1.5276044607162476, "learning_rate": 0.000380461000135888, "loss": 3.5359, "step": 72975 }, { "epoch": 4.958554151379263, "grad_norm": 1.6870580911636353, "learning_rate": 0.00038041853512705534, "loss": 3.473, "step": 72980 }, { "epoch": 4.958893871449925, "grad_norm": 1.956404447555542, "learning_rate": 0.0003803760701182226, "loss": 3.4968, "step": 72985 }, { "epoch": 4.959233591520587, "grad_norm": 1.8544752597808838, "learning_rate": 0.00038033360510938985, "loss": 3.1612, "step": 72990 }, { "epoch": 4.959573311591249, "grad_norm": 1.8659942150115967, "learning_rate": 0.0003802911401005571, "loss": 3.2097, "step": 72995 }, { "epoch": 4.959913031661911, "grad_norm": 2.175889730453491, "learning_rate": 0.00038024867509172446, "loss": 3.3587, "step": 73000 }, { "epoch": 4.960252751732573, "grad_norm": 1.5478904247283936, "learning_rate": 0.0003802062100828917, "loss": 3.315, "step": 73005 }, { "epoch": 4.960592471803234, "grad_norm": 1.8505334854125977, "learning_rate": 0.00038016374507405897, "loss": 3.5198, "step": 73010 }, { "epoch": 4.960932191873896, "grad_norm": 1.5294690132141113, "learning_rate": 0.0003801212800652263, "loss": 3.2336, "step": 73015 }, { "epoch": 4.961271911944557, "grad_norm": 1.8004875183105469, "learning_rate": 0.00038007881505639353, "loss": 3.6438, "step": 73020 }, { "epoch": 4.961611632015219, "grad_norm": 1.7769298553466797, "learning_rate": 0.0003800363500475608, "loss": 3.2095, "step": 73025 }, { "epoch": 4.961951352085881, "grad_norm": 1.7455341815948486, "learning_rate": 0.00037999388503872814, "loss": 3.3393, "step": 73030 }, { "epoch": 4.9622910721565425, "grad_norm": 1.8766119480133057, "learning_rate": 0.00037995142002989537, "loss": 3.7406, "step": 73035 }, { "epoch": 4.962630792227205, "grad_norm": 1.5792840719223022, "learning_rate": 0.00037990895502106265, "loss": 3.4523, "step": 73040 }, { "epoch": 4.962970512297867, "grad_norm": 1.814279556274414, "learning_rate": 0.00037986649001222993, "loss": 3.5308, "step": 73045 }, { "epoch": 4.963310232368528, "grad_norm": 1.9157239198684692, "learning_rate": 0.0003798240250033972, "loss": 3.372, "step": 73050 }, { "epoch": 4.96364995243919, "grad_norm": 1.8548235893249512, "learning_rate": 0.0003797815599945645, "loss": 3.2022, "step": 73055 }, { "epoch": 4.963989672509852, "grad_norm": 1.9816230535507202, "learning_rate": 0.00037973909498573177, "loss": 3.3301, "step": 73060 }, { "epoch": 4.964329392580513, "grad_norm": 1.797029972076416, "learning_rate": 0.00037969662997689905, "loss": 3.136, "step": 73065 }, { "epoch": 4.964669112651175, "grad_norm": 1.8474624156951904, "learning_rate": 0.00037965416496806633, "loss": 3.539, "step": 73070 }, { "epoch": 4.965008832721837, "grad_norm": 1.8003861904144287, "learning_rate": 0.0003796116999592336, "loss": 3.3061, "step": 73075 }, { "epoch": 4.9653485527924985, "grad_norm": 1.5742844343185425, "learning_rate": 0.00037956923495040083, "loss": 3.3384, "step": 73080 }, { "epoch": 4.965688272863161, "grad_norm": 1.4209532737731934, "learning_rate": 0.00037952676994156817, "loss": 3.4058, "step": 73085 }, { "epoch": 4.966027992933823, "grad_norm": 1.5407168865203857, "learning_rate": 0.00037948430493273545, "loss": 3.3977, "step": 73090 }, { "epoch": 4.966367713004484, "grad_norm": 2.1098361015319824, "learning_rate": 0.0003794418399239027, "loss": 3.3662, "step": 73095 }, { "epoch": 4.966707433075146, "grad_norm": 2.021880626678467, "learning_rate": 0.00037939937491507, "loss": 3.2215, "step": 73100 }, { "epoch": 4.967047153145808, "grad_norm": 1.2814512252807617, "learning_rate": 0.0003793569099062373, "loss": 3.5123, "step": 73105 }, { "epoch": 4.967386873216469, "grad_norm": 2.136425495147705, "learning_rate": 0.0003793144448974045, "loss": 3.2764, "step": 73110 }, { "epoch": 4.967726593287131, "grad_norm": 1.5976506471633911, "learning_rate": 0.0003792719798885718, "loss": 3.3362, "step": 73115 }, { "epoch": 4.968066313357793, "grad_norm": 1.553388237953186, "learning_rate": 0.00037922951487973913, "loss": 3.5877, "step": 73120 }, { "epoch": 4.9684060334284545, "grad_norm": 1.9968669414520264, "learning_rate": 0.0003791870498709064, "loss": 3.4811, "step": 73125 }, { "epoch": 4.968745753499117, "grad_norm": 1.516719102859497, "learning_rate": 0.00037914458486207363, "loss": 3.2401, "step": 73130 }, { "epoch": 4.969085473569779, "grad_norm": 1.5972405672073364, "learning_rate": 0.00037910211985324097, "loss": 3.2596, "step": 73135 }, { "epoch": 4.96942519364044, "grad_norm": 1.7527849674224854, "learning_rate": 0.00037905965484440825, "loss": 3.5209, "step": 73140 }, { "epoch": 4.969764913711102, "grad_norm": 1.6718820333480835, "learning_rate": 0.0003790171898355755, "loss": 3.1339, "step": 73145 }, { "epoch": 4.970104633781764, "grad_norm": 1.7527118921279907, "learning_rate": 0.00037897472482674276, "loss": 3.7293, "step": 73150 }, { "epoch": 4.970444353852425, "grad_norm": 1.6053848266601562, "learning_rate": 0.0003789322598179101, "loss": 3.3926, "step": 73155 }, { "epoch": 4.970784073923087, "grad_norm": 1.3512247800827026, "learning_rate": 0.0003788897948090773, "loss": 3.4876, "step": 73160 }, { "epoch": 4.971123793993749, "grad_norm": 2.002493381500244, "learning_rate": 0.0003788473298002446, "loss": 3.5362, "step": 73165 }, { "epoch": 4.971463514064411, "grad_norm": 1.5421149730682373, "learning_rate": 0.00037880486479141193, "loss": 3.3617, "step": 73170 }, { "epoch": 4.971803234135073, "grad_norm": 1.8736984729766846, "learning_rate": 0.00037876239978257916, "loss": 3.4762, "step": 73175 }, { "epoch": 4.972142954205735, "grad_norm": 1.6512194871902466, "learning_rate": 0.00037871993477374644, "loss": 3.2823, "step": 73180 }, { "epoch": 4.972482674276396, "grad_norm": 1.5445479154586792, "learning_rate": 0.0003786774697649137, "loss": 3.2564, "step": 73185 }, { "epoch": 4.972822394347058, "grad_norm": 1.740591049194336, "learning_rate": 0.000378635004756081, "loss": 3.4808, "step": 73190 }, { "epoch": 4.97316211441772, "grad_norm": 1.7423839569091797, "learning_rate": 0.0003785925397472483, "loss": 3.4508, "step": 73195 }, { "epoch": 4.973501834488381, "grad_norm": 1.7662869691848755, "learning_rate": 0.00037855007473841556, "loss": 3.4151, "step": 73200 }, { "epoch": 4.973841554559043, "grad_norm": 1.773769497871399, "learning_rate": 0.00037850760972958284, "loss": 3.456, "step": 73205 }, { "epoch": 4.974181274629705, "grad_norm": 1.6478825807571411, "learning_rate": 0.0003784651447207501, "loss": 3.4216, "step": 73210 }, { "epoch": 4.974520994700367, "grad_norm": 1.5521436929702759, "learning_rate": 0.0003784226797119174, "loss": 3.2986, "step": 73215 }, { "epoch": 4.974860714771029, "grad_norm": 1.6442623138427734, "learning_rate": 0.0003783802147030846, "loss": 3.3981, "step": 73220 }, { "epoch": 4.975200434841691, "grad_norm": 1.7895108461380005, "learning_rate": 0.00037833774969425196, "loss": 3.2863, "step": 73225 }, { "epoch": 4.975540154912352, "grad_norm": 1.873352289199829, "learning_rate": 0.00037829528468541924, "loss": 3.0021, "step": 73230 }, { "epoch": 4.975879874983014, "grad_norm": 1.635680079460144, "learning_rate": 0.00037825281967658646, "loss": 3.2648, "step": 73235 }, { "epoch": 4.976219595053676, "grad_norm": 1.6155720949172974, "learning_rate": 0.0003782103546677538, "loss": 3.1584, "step": 73240 }, { "epoch": 4.976559315124337, "grad_norm": 2.0712978839874268, "learning_rate": 0.0003781678896589211, "loss": 3.3862, "step": 73245 }, { "epoch": 4.976899035194999, "grad_norm": 1.5744365453720093, "learning_rate": 0.0003781254246500883, "loss": 3.6384, "step": 73250 }, { "epoch": 4.977238755265661, "grad_norm": 1.3427118062973022, "learning_rate": 0.0003780829596412556, "loss": 3.4102, "step": 73255 }, { "epoch": 4.977578475336323, "grad_norm": 1.5421967506408691, "learning_rate": 0.0003780404946324229, "loss": 3.4545, "step": 73260 }, { "epoch": 4.977918195406985, "grad_norm": 1.4362705945968628, "learning_rate": 0.00037799802962359014, "loss": 3.4972, "step": 73265 }, { "epoch": 4.978257915477647, "grad_norm": 2.0657730102539062, "learning_rate": 0.0003779555646147574, "loss": 3.2766, "step": 73270 }, { "epoch": 4.978597635548308, "grad_norm": 2.212162971496582, "learning_rate": 0.00037791309960592476, "loss": 3.5274, "step": 73275 }, { "epoch": 4.97893735561897, "grad_norm": 1.8271232843399048, "learning_rate": 0.000377870634597092, "loss": 3.2836, "step": 73280 }, { "epoch": 4.979277075689632, "grad_norm": 1.7281579971313477, "learning_rate": 0.00037782816958825926, "loss": 3.2357, "step": 73285 }, { "epoch": 4.979616795760293, "grad_norm": 1.7681670188903809, "learning_rate": 0.00037778570457942654, "loss": 3.0764, "step": 73290 }, { "epoch": 4.979956515830955, "grad_norm": 2.4756455421447754, "learning_rate": 0.0003777432395705939, "loss": 3.2866, "step": 73295 }, { "epoch": 4.980296235901617, "grad_norm": 1.682576060295105, "learning_rate": 0.0003777007745617611, "loss": 3.107, "step": 73300 }, { "epoch": 4.980635955972279, "grad_norm": 1.5752830505371094, "learning_rate": 0.0003776583095529284, "loss": 3.4578, "step": 73305 }, { "epoch": 4.980975676042941, "grad_norm": 1.566399335861206, "learning_rate": 0.0003776158445440957, "loss": 3.4474, "step": 73310 }, { "epoch": 4.981315396113603, "grad_norm": 1.8469915390014648, "learning_rate": 0.00037757337953526294, "loss": 3.371, "step": 73315 }, { "epoch": 4.981655116184264, "grad_norm": 2.036876916885376, "learning_rate": 0.0003775309145264302, "loss": 3.4469, "step": 73320 }, { "epoch": 4.981994836254926, "grad_norm": 1.6119827032089233, "learning_rate": 0.00037748844951759756, "loss": 3.3915, "step": 73325 }, { "epoch": 4.982334556325588, "grad_norm": 1.624871850013733, "learning_rate": 0.0003774459845087648, "loss": 3.1733, "step": 73330 }, { "epoch": 4.982674276396249, "grad_norm": 1.689909815788269, "learning_rate": 0.00037740351949993206, "loss": 3.4754, "step": 73335 }, { "epoch": 4.983013996466911, "grad_norm": 1.871010422706604, "learning_rate": 0.00037736105449109934, "loss": 3.4588, "step": 73340 }, { "epoch": 4.983353716537573, "grad_norm": 2.016789197921753, "learning_rate": 0.0003773185894822666, "loss": 3.1169, "step": 73345 }, { "epoch": 4.983693436608235, "grad_norm": 1.8025873899459839, "learning_rate": 0.0003772761244734339, "loss": 3.3316, "step": 73350 }, { "epoch": 4.984033156678897, "grad_norm": 1.5492334365844727, "learning_rate": 0.0003772336594646012, "loss": 3.2719, "step": 73355 }, { "epoch": 4.984372876749559, "grad_norm": 1.9668134450912476, "learning_rate": 0.00037719119445576846, "loss": 3.2726, "step": 73360 }, { "epoch": 4.98471259682022, "grad_norm": 1.6048696041107178, "learning_rate": 0.00037714872944693574, "loss": 3.5934, "step": 73365 }, { "epoch": 4.985052316890882, "grad_norm": 1.492011547088623, "learning_rate": 0.000377106264438103, "loss": 3.3905, "step": 73370 }, { "epoch": 4.985392036961544, "grad_norm": 1.679071068763733, "learning_rate": 0.00037706379942927025, "loss": 3.4784, "step": 73375 }, { "epoch": 4.985731757032205, "grad_norm": 2.0622034072875977, "learning_rate": 0.0003770213344204376, "loss": 3.2628, "step": 73380 }, { "epoch": 4.986071477102867, "grad_norm": 2.1627697944641113, "learning_rate": 0.00037697886941160486, "loss": 3.0959, "step": 73385 }, { "epoch": 4.986411197173529, "grad_norm": 1.7421908378601074, "learning_rate": 0.0003769364044027721, "loss": 3.3126, "step": 73390 }, { "epoch": 4.986750917244191, "grad_norm": 1.6491514444351196, "learning_rate": 0.0003768939393939394, "loss": 3.2266, "step": 73395 }, { "epoch": 4.987090637314853, "grad_norm": 1.7375932931900024, "learning_rate": 0.0003768514743851067, "loss": 3.2828, "step": 73400 }, { "epoch": 4.987430357385515, "grad_norm": 1.4989697933197021, "learning_rate": 0.00037680900937627393, "loss": 3.298, "step": 73405 }, { "epoch": 4.987770077456176, "grad_norm": 1.8449463844299316, "learning_rate": 0.0003767665443674412, "loss": 3.4483, "step": 73410 }, { "epoch": 4.988109797526838, "grad_norm": 1.9706778526306152, "learning_rate": 0.00037672407935860855, "loss": 3.1384, "step": 73415 }, { "epoch": 4.9884495175975, "grad_norm": 1.6901249885559082, "learning_rate": 0.00037668161434977577, "loss": 3.4231, "step": 73420 }, { "epoch": 4.988789237668161, "grad_norm": 1.988167643547058, "learning_rate": 0.00037663914934094305, "loss": 3.5106, "step": 73425 }, { "epoch": 4.989128957738823, "grad_norm": 2.0546743869781494, "learning_rate": 0.0003765966843321104, "loss": 3.3877, "step": 73430 }, { "epoch": 4.989468677809485, "grad_norm": 2.043308734893799, "learning_rate": 0.0003765542193232776, "loss": 3.4553, "step": 73435 }, { "epoch": 4.989808397880147, "grad_norm": 1.54188871383667, "learning_rate": 0.0003765117543144449, "loss": 3.4587, "step": 73440 }, { "epoch": 4.990148117950809, "grad_norm": 1.9179596900939941, "learning_rate": 0.00037646928930561217, "loss": 3.5298, "step": 73445 }, { "epoch": 4.990487838021471, "grad_norm": 1.3273043632507324, "learning_rate": 0.00037642682429677945, "loss": 3.1293, "step": 73450 }, { "epoch": 4.990827558092132, "grad_norm": 1.935859203338623, "learning_rate": 0.00037638435928794673, "loss": 3.2773, "step": 73455 }, { "epoch": 4.991167278162794, "grad_norm": 1.5186550617218018, "learning_rate": 0.000376341894279114, "loss": 3.4468, "step": 73460 }, { "epoch": 4.991506998233456, "grad_norm": 2.222853899002075, "learning_rate": 0.00037629942927028135, "loss": 3.0917, "step": 73465 }, { "epoch": 4.991846718304117, "grad_norm": 1.8531568050384521, "learning_rate": 0.00037625696426144857, "loss": 3.3594, "step": 73470 }, { "epoch": 4.992186438374779, "grad_norm": 1.8205024003982544, "learning_rate": 0.00037621449925261585, "loss": 3.3451, "step": 73475 }, { "epoch": 4.9925261584454415, "grad_norm": 1.4679303169250488, "learning_rate": 0.00037617203424378313, "loss": 3.4649, "step": 73480 }, { "epoch": 4.992865878516103, "grad_norm": 1.442957878112793, "learning_rate": 0.0003761295692349504, "loss": 3.4416, "step": 73485 }, { "epoch": 4.993205598586765, "grad_norm": 2.351824998855591, "learning_rate": 0.0003760871042261177, "loss": 3.5268, "step": 73490 }, { "epoch": 4.993545318657426, "grad_norm": 1.3886038064956665, "learning_rate": 0.00037604463921728497, "loss": 3.4746, "step": 73495 }, { "epoch": 4.993885038728088, "grad_norm": 1.7042545080184937, "learning_rate": 0.00037600217420845225, "loss": 3.1967, "step": 73500 }, { "epoch": 4.99422475879875, "grad_norm": 2.0425477027893066, "learning_rate": 0.00037595970919961953, "loss": 3.2996, "step": 73505 }, { "epoch": 4.994564478869411, "grad_norm": 1.8829805850982666, "learning_rate": 0.0003759172441907868, "loss": 3.4469, "step": 73510 }, { "epoch": 4.994904198940073, "grad_norm": 2.261314868927002, "learning_rate": 0.00037587477918195404, "loss": 3.3996, "step": 73515 }, { "epoch": 4.995243919010735, "grad_norm": 2.0451016426086426, "learning_rate": 0.00037583231417312137, "loss": 3.3556, "step": 73520 }, { "epoch": 4.995583639081397, "grad_norm": 1.8406164646148682, "learning_rate": 0.00037578984916428865, "loss": 3.2862, "step": 73525 }, { "epoch": 4.995923359152059, "grad_norm": 1.8108711242675781, "learning_rate": 0.0003757473841554559, "loss": 3.4598, "step": 73530 }, { "epoch": 4.996263079222721, "grad_norm": 1.9104024171829224, "learning_rate": 0.0003757049191466232, "loss": 3.4364, "step": 73535 }, { "epoch": 4.996602799293382, "grad_norm": 1.6937060356140137, "learning_rate": 0.0003756624541377905, "loss": 3.419, "step": 73540 }, { "epoch": 4.996942519364044, "grad_norm": 1.7383060455322266, "learning_rate": 0.0003756199891289577, "loss": 3.4313, "step": 73545 }, { "epoch": 4.997282239434706, "grad_norm": 1.8331775665283203, "learning_rate": 0.000375577524120125, "loss": 3.5187, "step": 73550 }, { "epoch": 4.997621959505367, "grad_norm": 1.6760107278823853, "learning_rate": 0.00037553505911129233, "loss": 3.3279, "step": 73555 }, { "epoch": 4.997961679576029, "grad_norm": 1.7689152956008911, "learning_rate": 0.00037549259410245956, "loss": 3.5546, "step": 73560 }, { "epoch": 4.998301399646691, "grad_norm": 2.3061094284057617, "learning_rate": 0.00037545012909362684, "loss": 3.1823, "step": 73565 }, { "epoch": 4.998641119717353, "grad_norm": 1.5560898780822754, "learning_rate": 0.0003754076640847942, "loss": 3.4013, "step": 73570 }, { "epoch": 4.998980839788015, "grad_norm": 2.0386505126953125, "learning_rate": 0.0003753651990759614, "loss": 3.3394, "step": 73575 }, { "epoch": 4.999320559858677, "grad_norm": 1.6105237007141113, "learning_rate": 0.0003753227340671287, "loss": 3.3767, "step": 73580 }, { "epoch": 4.999660279929338, "grad_norm": 2.0479207038879395, "learning_rate": 0.000375280269058296, "loss": 3.5512, "step": 73585 }, { "epoch": 5.0, "grad_norm": 5.030941009521484, "learning_rate": 0.00037523780404946324, "loss": 3.3787, "step": 73590 }, { "epoch": 5.0, "eval_bertscore": { "f1": 0.8384843847642793, "precision": 0.8369866649255883, "recall": 0.840848951466105 }, "eval_bleu_4": 0.01222781892357284, "eval_exact_match": 0.00019381723035177828, "eval_loss": 3.3904659748077393, "eval_meteor": 0.10641952025883096, "eval_rouge": { "rouge1": 0.12826218767579461, "rouge2": 0.01462005254620015, "rougeL": 0.10464203756307153, "rougeLsum": 0.10462346336674269 }, "eval_runtime": 1454.839, "eval_samples_per_second": 7.093, "eval_steps_per_second": 0.887, "step": 73590 } ], "logging_steps": 5, "max_steps": 117744, "num_input_tokens_seen": 0, "num_train_epochs": 8, "save_steps": 500, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 1.8534114006663168e+17, "train_batch_size": 8, "trial_name": null, "trial_params": null }