{ "best_metric": 0.02093923396756236, "best_model_checkpoint": "./results-cc/code-t5/codet5_lora_official_0.001/checkpoint-88308", "epoch": 6.0, "eval_steps": 500, "global_step": 88308, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0003397200706617747, "grad_norm": 2.4493682384490967, "learning_rate": 0.0009999660279929338, "loss": 8.9461, "step": 5 }, { "epoch": 0.0006794401413235494, "grad_norm": 1.8883044719696045, "learning_rate": 0.0009999235629841012, "loss": 5.5244, "step": 10 }, { "epoch": 0.0010191602119853241, "grad_norm": 0.8480541706085205, "learning_rate": 0.0009998810979752683, "loss": 4.9151, "step": 15 }, { "epoch": 0.001358880282647099, "grad_norm": 0.7808103561401367, "learning_rate": 0.0009998386329664356, "loss": 4.0293, "step": 20 }, { "epoch": 0.0016986003533088735, "grad_norm": 0.7991061210632324, "learning_rate": 0.000999796167957603, "loss": 4.1878, "step": 25 }, { "epoch": 0.0020383204239706482, "grad_norm": 0.6301679015159607, "learning_rate": 0.0009997537029487703, "loss": 4.048, "step": 30 }, { "epoch": 0.002378040494632423, "grad_norm": 0.7757200002670288, "learning_rate": 0.0009997112379399374, "loss": 3.8586, "step": 35 }, { "epoch": 0.002717760565294198, "grad_norm": 0.5695300698280334, "learning_rate": 0.0009996687729311048, "loss": 4.0869, "step": 40 }, { "epoch": 0.0030574806359559724, "grad_norm": 0.6456375122070312, "learning_rate": 0.000999626307922272, "loss": 4.2999, "step": 45 }, { "epoch": 0.003397200706617747, "grad_norm": 0.7631058692932129, "learning_rate": 0.0009995838429134394, "loss": 3.8813, "step": 50 }, { "epoch": 0.0037369207772795215, "grad_norm": 0.6284775733947754, "learning_rate": 0.0009995413779046065, "loss": 4.0022, "step": 55 }, { "epoch": 0.0040766408479412965, "grad_norm": 0.861068844795227, "learning_rate": 0.0009994989128957739, "loss": 3.9738, "step": 60 }, { "epoch": 0.0044163609186030715, "grad_norm": 0.5616914629936218, "learning_rate": 0.0009994564478869412, "loss": 3.9699, "step": 65 }, { "epoch": 0.004756080989264846, "grad_norm": 1.2797105312347412, "learning_rate": 0.0009994139828781085, "loss": 4.2002, "step": 70 }, { "epoch": 0.005095801059926621, "grad_norm": 0.6161890625953674, "learning_rate": 0.0009993715178692757, "loss": 4.1096, "step": 75 }, { "epoch": 0.005435521130588396, "grad_norm": 0.5995295643806458, "learning_rate": 0.000999329052860443, "loss": 3.8588, "step": 80 }, { "epoch": 0.00577524120125017, "grad_norm": 0.5041489601135254, "learning_rate": 0.0009992865878516103, "loss": 3.858, "step": 85 }, { "epoch": 0.006114961271911945, "grad_norm": 0.6281498074531555, "learning_rate": 0.0009992441228427774, "loss": 4.0204, "step": 90 }, { "epoch": 0.006454681342573719, "grad_norm": 0.5768001079559326, "learning_rate": 0.000999201657833945, "loss": 4.182, "step": 95 }, { "epoch": 0.006794401413235494, "grad_norm": 1.0207140445709229, "learning_rate": 0.0009991591928251121, "loss": 4.0643, "step": 100 }, { "epoch": 0.007134121483897269, "grad_norm": 0.5038891434669495, "learning_rate": 0.0009991167278162794, "loss": 3.5981, "step": 105 }, { "epoch": 0.007473841554559043, "grad_norm": 0.8742058873176575, "learning_rate": 0.0009990742628074468, "loss": 4.0484, "step": 110 }, { "epoch": 0.007813561625220818, "grad_norm": 0.7109525203704834, "learning_rate": 0.000999031797798614, "loss": 4.0474, "step": 115 }, { "epoch": 0.008153281695882593, "grad_norm": 0.7153136730194092, "learning_rate": 0.0009989893327897812, "loss": 4.2012, "step": 120 }, { "epoch": 0.008493001766544368, "grad_norm": 0.5598511099815369, "learning_rate": 0.0009989468677809486, "loss": 3.9894, "step": 125 }, { "epoch": 0.008832721837206143, "grad_norm": 0.696860671043396, "learning_rate": 0.000998904402772116, "loss": 3.7795, "step": 130 }, { "epoch": 0.009172441907867916, "grad_norm": 0.8827444314956665, "learning_rate": 0.000998861937763283, "loss": 3.8057, "step": 135 }, { "epoch": 0.009512161978529691, "grad_norm": 0.7022817730903625, "learning_rate": 0.0009988194727544504, "loss": 3.8606, "step": 140 }, { "epoch": 0.009851882049191466, "grad_norm": 0.5722019076347351, "learning_rate": 0.0009987770077456177, "loss": 3.8019, "step": 145 }, { "epoch": 0.010191602119853241, "grad_norm": 0.6837615966796875, "learning_rate": 0.0009987345427367848, "loss": 4.0166, "step": 150 }, { "epoch": 0.010531322190515016, "grad_norm": 0.6816505789756775, "learning_rate": 0.0009986920777279521, "loss": 3.8674, "step": 155 }, { "epoch": 0.010871042261176791, "grad_norm": 0.8885841369628906, "learning_rate": 0.0009986496127191195, "loss": 3.8981, "step": 160 }, { "epoch": 0.011210762331838564, "grad_norm": 0.5710607767105103, "learning_rate": 0.0009986071477102868, "loss": 3.9024, "step": 165 }, { "epoch": 0.01155048240250034, "grad_norm": 0.63449627161026, "learning_rate": 0.0009985646827014541, "loss": 3.8214, "step": 170 }, { "epoch": 0.011890202473162114, "grad_norm": 0.6011857390403748, "learning_rate": 0.0009985222176926213, "loss": 3.856, "step": 175 }, { "epoch": 0.01222992254382389, "grad_norm": 0.7394192814826965, "learning_rate": 0.0009984797526837886, "loss": 3.8992, "step": 180 }, { "epoch": 0.012569642614485664, "grad_norm": 1.1866347789764404, "learning_rate": 0.000998437287674956, "loss": 3.961, "step": 185 }, { "epoch": 0.012909362685147438, "grad_norm": 2.1563827991485596, "learning_rate": 0.000998394822666123, "loss": 3.804, "step": 190 }, { "epoch": 0.013249082755809213, "grad_norm": 0.6507862210273743, "learning_rate": 0.0009983523576572904, "loss": 4.1155, "step": 195 }, { "epoch": 0.013588802826470988, "grad_norm": 0.6575655341148376, "learning_rate": 0.0009983098926484577, "loss": 3.9381, "step": 200 }, { "epoch": 0.013928522897132763, "grad_norm": 0.661180853843689, "learning_rate": 0.000998267427639625, "loss": 4.0532, "step": 205 }, { "epoch": 0.014268242967794538, "grad_norm": 0.7378032803535461, "learning_rate": 0.0009982249626307922, "loss": 3.7624, "step": 210 }, { "epoch": 0.014607963038456313, "grad_norm": 0.49182239174842834, "learning_rate": 0.0009981824976219595, "loss": 3.8228, "step": 215 }, { "epoch": 0.014947683109118086, "grad_norm": 0.5906103849411011, "learning_rate": 0.0009981400326131268, "loss": 3.9758, "step": 220 }, { "epoch": 0.015287403179779861, "grad_norm": 0.6311669945716858, "learning_rate": 0.000998097567604294, "loss": 3.8291, "step": 225 }, { "epoch": 0.015627123250441636, "grad_norm": 0.53919917345047, "learning_rate": 0.0009980551025954615, "loss": 3.8204, "step": 230 }, { "epoch": 0.01596684332110341, "grad_norm": 2.3344688415527344, "learning_rate": 0.0009980126375866286, "loss": 3.8806, "step": 235 }, { "epoch": 0.016306563391765186, "grad_norm": 0.9298954010009766, "learning_rate": 0.000997970172577796, "loss": 3.8008, "step": 240 }, { "epoch": 0.01664628346242696, "grad_norm": 0.5423773527145386, "learning_rate": 0.0009979277075689633, "loss": 3.8053, "step": 245 }, { "epoch": 0.016986003533088736, "grad_norm": 0.5865753293037415, "learning_rate": 0.0009978852425601304, "loss": 3.9511, "step": 250 }, { "epoch": 0.01732572360375051, "grad_norm": 0.743493378162384, "learning_rate": 0.0009978427775512977, "loss": 3.7797, "step": 255 }, { "epoch": 0.017665443674412286, "grad_norm": 0.7558332681655884, "learning_rate": 0.000997800312542465, "loss": 3.9625, "step": 260 }, { "epoch": 0.01800516374507406, "grad_norm": 0.6390789151191711, "learning_rate": 0.0009977578475336324, "loss": 4.0425, "step": 265 }, { "epoch": 0.018344883815735832, "grad_norm": 0.8005325794219971, "learning_rate": 0.0009977153825247995, "loss": 3.8161, "step": 270 }, { "epoch": 0.01868460388639761, "grad_norm": 0.6689093112945557, "learning_rate": 0.0009976729175159669, "loss": 3.7674, "step": 275 }, { "epoch": 0.019024323957059382, "grad_norm": 0.5696629285812378, "learning_rate": 0.0009976304525071342, "loss": 3.8443, "step": 280 }, { "epoch": 0.01936404402772116, "grad_norm": 1.4880982637405396, "learning_rate": 0.0009975879874983013, "loss": 3.9159, "step": 285 }, { "epoch": 0.019703764098382932, "grad_norm": 0.6166170239448547, "learning_rate": 0.0009975455224894686, "loss": 3.9076, "step": 290 }, { "epoch": 0.020043484169044706, "grad_norm": 0.5892542004585266, "learning_rate": 0.000997503057480636, "loss": 3.8456, "step": 295 }, { "epoch": 0.020383204239706482, "grad_norm": 0.7772103548049927, "learning_rate": 0.0009974605924718033, "loss": 3.6958, "step": 300 }, { "epoch": 0.020722924310368256, "grad_norm": 1.1581958532333374, "learning_rate": 0.0009974181274629706, "loss": 3.8976, "step": 305 }, { "epoch": 0.021062644381030032, "grad_norm": 0.6160619854927063, "learning_rate": 0.0009973756624541378, "loss": 3.6929, "step": 310 }, { "epoch": 0.021402364451691806, "grad_norm": 0.6125484108924866, "learning_rate": 0.000997333197445305, "loss": 4.107, "step": 315 }, { "epoch": 0.021742084522353582, "grad_norm": 0.7578718662261963, "learning_rate": 0.0009972907324364724, "loss": 3.93, "step": 320 }, { "epoch": 0.022081804593015356, "grad_norm": 0.6966389417648315, "learning_rate": 0.0009972482674276396, "loss": 3.6796, "step": 325 }, { "epoch": 0.02242152466367713, "grad_norm": 0.8457727432250977, "learning_rate": 0.0009972058024188069, "loss": 3.5622, "step": 330 }, { "epoch": 0.022761244734338906, "grad_norm": 0.6424957513809204, "learning_rate": 0.0009971633374099742, "loss": 4.0048, "step": 335 }, { "epoch": 0.02310096480500068, "grad_norm": 0.709048867225647, "learning_rate": 0.0009971208724011416, "loss": 3.9275, "step": 340 }, { "epoch": 0.023440684875662456, "grad_norm": 0.8504447937011719, "learning_rate": 0.0009970784073923087, "loss": 3.9017, "step": 345 }, { "epoch": 0.02378040494632423, "grad_norm": 1.8175883293151855, "learning_rate": 0.000997035942383476, "loss": 3.6786, "step": 350 }, { "epoch": 0.024120125016986002, "grad_norm": 1.0008809566497803, "learning_rate": 0.0009969934773746433, "loss": 4.1405, "step": 355 }, { "epoch": 0.02445984508764778, "grad_norm": 0.7660126090049744, "learning_rate": 0.0009969510123658105, "loss": 3.9945, "step": 360 }, { "epoch": 0.024799565158309552, "grad_norm": 0.7501752376556396, "learning_rate": 0.0009969085473569778, "loss": 3.8554, "step": 365 }, { "epoch": 0.02513928522897133, "grad_norm": 0.7360669374465942, "learning_rate": 0.0009968660823481451, "loss": 3.9913, "step": 370 }, { "epoch": 0.025479005299633102, "grad_norm": 0.7204135060310364, "learning_rate": 0.0009968236173393125, "loss": 3.7927, "step": 375 }, { "epoch": 0.025818725370294875, "grad_norm": 0.752693235874176, "learning_rate": 0.0009967811523304798, "loss": 4.0625, "step": 380 }, { "epoch": 0.026158445440956652, "grad_norm": 1.2407736778259277, "learning_rate": 0.000996738687321647, "loss": 3.9684, "step": 385 }, { "epoch": 0.026498165511618425, "grad_norm": 0.6855944991111755, "learning_rate": 0.0009966962223128142, "loss": 3.7485, "step": 390 }, { "epoch": 0.026837885582280202, "grad_norm": 1.143729329109192, "learning_rate": 0.0009966537573039816, "loss": 3.4991, "step": 395 }, { "epoch": 0.027177605652941975, "grad_norm": 0.7017676830291748, "learning_rate": 0.0009966112922951487, "loss": 3.7361, "step": 400 }, { "epoch": 0.027517325723603752, "grad_norm": 0.7491458654403687, "learning_rate": 0.0009965688272863163, "loss": 3.6034, "step": 405 }, { "epoch": 0.027857045794265525, "grad_norm": 0.6952882409095764, "learning_rate": 0.0009965263622774834, "loss": 3.8946, "step": 410 }, { "epoch": 0.0281967658649273, "grad_norm": 1.0218085050582886, "learning_rate": 0.0009964923902704172, "loss": 4.0107, "step": 415 }, { "epoch": 0.028536485935589075, "grad_norm": 0.8610690832138062, "learning_rate": 0.0009964499252615845, "loss": 3.7804, "step": 420 }, { "epoch": 0.02887620600625085, "grad_norm": 0.7023824453353882, "learning_rate": 0.0009964074602527516, "loss": 4.0759, "step": 425 }, { "epoch": 0.029215926076912625, "grad_norm": 1.0086021423339844, "learning_rate": 0.000996364995243919, "loss": 3.902, "step": 430 }, { "epoch": 0.0295556461475744, "grad_norm": 0.6787441968917847, "learning_rate": 0.0009963225302350863, "loss": 3.9347, "step": 435 }, { "epoch": 0.029895366218236172, "grad_norm": 0.6123949289321899, "learning_rate": 0.0009962800652262536, "loss": 3.7943, "step": 440 }, { "epoch": 0.03023508628889795, "grad_norm": 1.6120132207870483, "learning_rate": 0.0009962376002174208, "loss": 3.8031, "step": 445 }, { "epoch": 0.030574806359559722, "grad_norm": 1.0000611543655396, "learning_rate": 0.000996195135208588, "loss": 3.5905, "step": 450 }, { "epoch": 0.0309145264302215, "grad_norm": 0.6701758503913879, "learning_rate": 0.0009961526701997554, "loss": 3.8148, "step": 455 }, { "epoch": 0.03125424650088327, "grad_norm": 0.8623583316802979, "learning_rate": 0.0009961102051909226, "loss": 4.2202, "step": 460 }, { "epoch": 0.03159396657154505, "grad_norm": 2.920541286468506, "learning_rate": 0.00099606774018209, "loss": 3.7864, "step": 465 }, { "epoch": 0.03193368664220682, "grad_norm": 0.7114104628562927, "learning_rate": 0.0009960252751732572, "loss": 3.8087, "step": 470 }, { "epoch": 0.032273406712868595, "grad_norm": 0.7429534196853638, "learning_rate": 0.0009959828101644246, "loss": 3.9977, "step": 475 }, { "epoch": 0.03261312678353037, "grad_norm": 0.7831437587738037, "learning_rate": 0.0009959403451555919, "loss": 3.6709, "step": 480 }, { "epoch": 0.03295284685419215, "grad_norm": 5.341725826263428, "learning_rate": 0.000995897880146759, "loss": 3.6172, "step": 485 }, { "epoch": 0.03329256692485392, "grad_norm": 6.343393325805664, "learning_rate": 0.0009958554151379263, "loss": 3.7971, "step": 490 }, { "epoch": 0.033632286995515695, "grad_norm": 2.8381640911102295, "learning_rate": 0.0009958129501290937, "loss": 3.9297, "step": 495 }, { "epoch": 0.03397200706617747, "grad_norm": 2.0658438205718994, "learning_rate": 0.000995770485120261, "loss": 3.6864, "step": 500 }, { "epoch": 0.03431172713683924, "grad_norm": 0.7084658741950989, "learning_rate": 0.0009957280201114281, "loss": 3.7996, "step": 505 }, { "epoch": 0.03465144720750102, "grad_norm": 0.8822794556617737, "learning_rate": 0.000995694048104362, "loss": 3.7756, "step": 510 }, { "epoch": 0.034991167278162795, "grad_norm": 0.9149061441421509, "learning_rate": 0.0009956515830955293, "loss": 3.7757, "step": 515 }, { "epoch": 0.03533088734882457, "grad_norm": 2.1458308696746826, "learning_rate": 0.0009956091180866966, "loss": 3.9065, "step": 520 }, { "epoch": 0.03567060741948634, "grad_norm": 0.6019435524940491, "learning_rate": 0.000995566653077864, "loss": 3.7308, "step": 525 }, { "epoch": 0.03601032749014812, "grad_norm": 0.7739963531494141, "learning_rate": 0.000995524188069031, "loss": 3.5997, "step": 530 }, { "epoch": 0.036350047560809895, "grad_norm": 0.9704915285110474, "learning_rate": 0.0009954817230601984, "loss": 3.8345, "step": 535 }, { "epoch": 0.036689767631471665, "grad_norm": 0.708666980266571, "learning_rate": 0.0009954392580513657, "loss": 3.7339, "step": 540 }, { "epoch": 0.03702948770213344, "grad_norm": 3.0435092449188232, "learning_rate": 0.0009953967930425329, "loss": 3.7423, "step": 545 }, { "epoch": 0.03736920777279522, "grad_norm": 0.6545500159263611, "learning_rate": 0.0009953543280337002, "loss": 4.1361, "step": 550 }, { "epoch": 0.03770892784345699, "grad_norm": 0.7264711856842041, "learning_rate": 0.0009953118630248675, "loss": 3.8293, "step": 555 }, { "epoch": 0.038048647914118765, "grad_norm": 0.7517651915550232, "learning_rate": 0.0009952693980160349, "loss": 3.8654, "step": 560 }, { "epoch": 0.03838836798478054, "grad_norm": 0.8548659086227417, "learning_rate": 0.0009952269330072022, "loss": 3.9084, "step": 565 }, { "epoch": 0.03872808805544232, "grad_norm": 0.7396035194396973, "learning_rate": 0.0009951844679983693, "loss": 4.0147, "step": 570 }, { "epoch": 0.03906780812610409, "grad_norm": 0.6667044758796692, "learning_rate": 0.0009951420029895366, "loss": 3.957, "step": 575 }, { "epoch": 0.039407528196765865, "grad_norm": 0.8517290949821472, "learning_rate": 0.000995099537980704, "loss": 3.7249, "step": 580 }, { "epoch": 0.03974724826742764, "grad_norm": 0.728651225566864, "learning_rate": 0.000995057072971871, "loss": 3.7024, "step": 585 }, { "epoch": 0.04008696833808941, "grad_norm": 5.29792594909668, "learning_rate": 0.0009950146079630384, "loss": 3.6549, "step": 590 }, { "epoch": 0.04042668840875119, "grad_norm": 0.7174215316772461, "learning_rate": 0.0009949721429542058, "loss": 3.6244, "step": 595 }, { "epoch": 0.040766408479412965, "grad_norm": 0.7276250123977661, "learning_rate": 0.000994929677945373, "loss": 3.8376, "step": 600 }, { "epoch": 0.04110612855007474, "grad_norm": 0.8475777506828308, "learning_rate": 0.0009948872129365402, "loss": 3.7318, "step": 605 }, { "epoch": 0.04144584862073651, "grad_norm": 1.211615800857544, "learning_rate": 0.0009948447479277076, "loss": 3.885, "step": 610 }, { "epoch": 0.04178556869139829, "grad_norm": 0.6988597512245178, "learning_rate": 0.0009948022829188749, "loss": 3.7525, "step": 615 }, { "epoch": 0.042125288762060065, "grad_norm": 0.7887908816337585, "learning_rate": 0.000994759817910042, "loss": 3.5916, "step": 620 }, { "epoch": 0.042465008832721834, "grad_norm": 0.5786921381950378, "learning_rate": 0.0009947173529012096, "loss": 3.3332, "step": 625 }, { "epoch": 0.04280472890338361, "grad_norm": 0.7741575837135315, "learning_rate": 0.0009946748878923767, "loss": 3.5711, "step": 630 }, { "epoch": 0.04314444897404539, "grad_norm": 0.8903182744979858, "learning_rate": 0.000994632422883544, "loss": 3.7417, "step": 635 }, { "epoch": 0.043484169044707165, "grad_norm": 0.6596760749816895, "learning_rate": 0.0009945899578747113, "loss": 3.6869, "step": 640 }, { "epoch": 0.043823889115368934, "grad_norm": 0.6631510257720947, "learning_rate": 0.0009945474928658785, "loss": 4.0542, "step": 645 }, { "epoch": 0.04416360918603071, "grad_norm": 0.6210035085678101, "learning_rate": 0.0009945050278570458, "loss": 3.9088, "step": 650 }, { "epoch": 0.04450332925669249, "grad_norm": 0.8876652121543884, "learning_rate": 0.0009944625628482131, "loss": 3.7699, "step": 655 }, { "epoch": 0.04484304932735426, "grad_norm": 0.7899766564369202, "learning_rate": 0.0009944200978393805, "loss": 3.66, "step": 660 }, { "epoch": 0.045182769398016034, "grad_norm": 0.7255972027778625, "learning_rate": 0.0009943776328305476, "loss": 3.6946, "step": 665 }, { "epoch": 0.04552248946867781, "grad_norm": 0.7508257627487183, "learning_rate": 0.000994335167821715, "loss": 3.9068, "step": 670 }, { "epoch": 0.04586220953933958, "grad_norm": 0.7828338146209717, "learning_rate": 0.0009942927028128822, "loss": 3.6788, "step": 675 }, { "epoch": 0.04620192961000136, "grad_norm": 0.7975988984107971, "learning_rate": 0.0009942502378040494, "loss": 3.7859, "step": 680 }, { "epoch": 0.046541649680663134, "grad_norm": 1.98026704788208, "learning_rate": 0.0009942077727952167, "loss": 3.8918, "step": 685 }, { "epoch": 0.04688136975132491, "grad_norm": 0.7566472887992859, "learning_rate": 0.000994165307786384, "loss": 3.8669, "step": 690 }, { "epoch": 0.04722108982198668, "grad_norm": 0.7103679180145264, "learning_rate": 0.0009941228427775514, "loss": 3.7938, "step": 695 }, { "epoch": 0.04756080989264846, "grad_norm": 0.6346674561500549, "learning_rate": 0.0009940803777687187, "loss": 3.947, "step": 700 }, { "epoch": 0.047900529963310234, "grad_norm": 3.586427688598633, "learning_rate": 0.0009940379127598858, "loss": 3.8982, "step": 705 }, { "epoch": 0.048240250033972004, "grad_norm": 1.1526113748550415, "learning_rate": 0.0009939954477510532, "loss": 3.7963, "step": 710 }, { "epoch": 0.04857997010463378, "grad_norm": 0.6072191596031189, "learning_rate": 0.0009939529827422205, "loss": 3.6451, "step": 715 }, { "epoch": 0.04891969017529556, "grad_norm": 0.6346344351768494, "learning_rate": 0.0009939105177333876, "loss": 3.81, "step": 720 }, { "epoch": 0.049259410245957334, "grad_norm": 0.7704496383666992, "learning_rate": 0.000993868052724555, "loss": 3.7004, "step": 725 }, { "epoch": 0.049599130316619104, "grad_norm": 0.7567043304443359, "learning_rate": 0.0009938255877157223, "loss": 3.7435, "step": 730 }, { "epoch": 0.04993885038728088, "grad_norm": 1.031459093093872, "learning_rate": 0.0009937831227068896, "loss": 3.64, "step": 735 }, { "epoch": 0.05027857045794266, "grad_norm": 0.878155529499054, "learning_rate": 0.0009937406576980567, "loss": 3.9853, "step": 740 }, { "epoch": 0.05061829052860443, "grad_norm": 0.7704818248748779, "learning_rate": 0.000993698192689224, "loss": 3.5825, "step": 745 }, { "epoch": 0.050958010599266204, "grad_norm": 0.8314939737319946, "learning_rate": 0.0009936557276803914, "loss": 3.8442, "step": 750 }, { "epoch": 0.05129773066992798, "grad_norm": 0.7763983607292175, "learning_rate": 0.0009936132626715585, "loss": 3.8417, "step": 755 }, { "epoch": 0.05163745074058975, "grad_norm": 0.933583676815033, "learning_rate": 0.000993570797662726, "loss": 3.8935, "step": 760 }, { "epoch": 0.05197717081125153, "grad_norm": 0.8582895994186401, "learning_rate": 0.0009935283326538932, "loss": 3.6834, "step": 765 }, { "epoch": 0.052316890881913304, "grad_norm": 0.9917904138565063, "learning_rate": 0.0009934858676450605, "loss": 3.7319, "step": 770 }, { "epoch": 0.05265661095257508, "grad_norm": 0.8738341927528381, "learning_rate": 0.0009934434026362278, "loss": 3.9219, "step": 775 }, { "epoch": 0.05299633102323685, "grad_norm": 0.7828960418701172, "learning_rate": 0.000993400937627395, "loss": 3.9288, "step": 780 }, { "epoch": 0.05333605109389863, "grad_norm": 0.985833466053009, "learning_rate": 0.0009933584726185623, "loss": 3.6827, "step": 785 }, { "epoch": 0.053675771164560404, "grad_norm": 0.7724753022193909, "learning_rate": 0.0009933160076097296, "loss": 3.8456, "step": 790 }, { "epoch": 0.054015491235222174, "grad_norm": 0.7660916447639465, "learning_rate": 0.000993273542600897, "loss": 3.6957, "step": 795 }, { "epoch": 0.05435521130588395, "grad_norm": 0.9296520352363586, "learning_rate": 0.0009932310775920643, "loss": 3.9613, "step": 800 }, { "epoch": 0.05469493137654573, "grad_norm": 0.688552975654602, "learning_rate": 0.0009931886125832314, "loss": 4.0053, "step": 805 }, { "epoch": 0.055034651447207504, "grad_norm": 0.6575900912284851, "learning_rate": 0.0009931461475743988, "loss": 3.9485, "step": 810 }, { "epoch": 0.055374371517869274, "grad_norm": 0.9900052547454834, "learning_rate": 0.000993103682565566, "loss": 3.8569, "step": 815 }, { "epoch": 0.05571409158853105, "grad_norm": 0.8874855637550354, "learning_rate": 0.0009930612175567332, "loss": 3.7324, "step": 820 }, { "epoch": 0.05605381165919283, "grad_norm": 0.5782872438430786, "learning_rate": 0.0009930187525479005, "loss": 3.6455, "step": 825 }, { "epoch": 0.0563935317298546, "grad_norm": 0.7115147709846497, "learning_rate": 0.0009929762875390679, "loss": 3.672, "step": 830 }, { "epoch": 0.056733251800516374, "grad_norm": 0.9197357296943665, "learning_rate": 0.0009929338225302352, "loss": 3.8119, "step": 835 }, { "epoch": 0.05707297187117815, "grad_norm": 0.7522328495979309, "learning_rate": 0.0009928913575214023, "loss": 4.0775, "step": 840 }, { "epoch": 0.05741269194183993, "grad_norm": 0.9321434497833252, "learning_rate": 0.0009928488925125697, "loss": 3.9725, "step": 845 }, { "epoch": 0.0577524120125017, "grad_norm": 0.9094996452331543, "learning_rate": 0.000992806427503737, "loss": 3.8618, "step": 850 }, { "epoch": 0.058092132083163474, "grad_norm": 1.000096082687378, "learning_rate": 0.0009927639624949041, "loss": 3.7574, "step": 855 }, { "epoch": 0.05843185215382525, "grad_norm": 0.7474831342697144, "learning_rate": 0.0009927214974860714, "loss": 3.4815, "step": 860 }, { "epoch": 0.05877157222448702, "grad_norm": 0.807633638381958, "learning_rate": 0.0009926790324772388, "loss": 3.8109, "step": 865 }, { "epoch": 0.0591112922951488, "grad_norm": 0.8246572613716125, "learning_rate": 0.0009926365674684061, "loss": 3.8575, "step": 870 }, { "epoch": 0.059451012365810574, "grad_norm": 0.7414162755012512, "learning_rate": 0.0009925941024595734, "loss": 3.5456, "step": 875 }, { "epoch": 0.059790732436472344, "grad_norm": 0.8609886765480042, "learning_rate": 0.0009925516374507406, "loss": 3.7127, "step": 880 }, { "epoch": 0.06013045250713412, "grad_norm": 1.048481822013855, "learning_rate": 0.000992509172441908, "loss": 3.7542, "step": 885 }, { "epoch": 0.0604701725777959, "grad_norm": 1.1003047227859497, "learning_rate": 0.0009924667074330752, "loss": 3.9256, "step": 890 }, { "epoch": 0.060809892648457674, "grad_norm": 0.8072906732559204, "learning_rate": 0.0009924242424242424, "loss": 3.8702, "step": 895 }, { "epoch": 0.061149612719119444, "grad_norm": 0.8739524483680725, "learning_rate": 0.0009923817774154097, "loss": 3.8894, "step": 900 }, { "epoch": 0.06148933278978122, "grad_norm": 0.8673437237739563, "learning_rate": 0.000992339312406577, "loss": 3.7243, "step": 905 }, { "epoch": 0.061829052860443, "grad_norm": 1.1381993293762207, "learning_rate": 0.0009922968473977444, "loss": 3.6808, "step": 910 }, { "epoch": 0.06216877293110477, "grad_norm": 0.8593356609344482, "learning_rate": 0.0009922543823889115, "loss": 3.9482, "step": 915 }, { "epoch": 0.06250849300176654, "grad_norm": 0.8123809099197388, "learning_rate": 0.0009922119173800788, "loss": 3.7449, "step": 920 }, { "epoch": 0.06284821307242831, "grad_norm": 0.7861368060112, "learning_rate": 0.0009921694523712461, "loss": 3.8537, "step": 925 }, { "epoch": 0.0631879331430901, "grad_norm": 1.5643202066421509, "learning_rate": 0.0009921269873624133, "loss": 3.8334, "step": 930 }, { "epoch": 0.06352765321375187, "grad_norm": 0.7443520426750183, "learning_rate": 0.0009920845223535808, "loss": 3.9733, "step": 935 }, { "epoch": 0.06386737328441364, "grad_norm": 0.6338669061660767, "learning_rate": 0.000992042057344748, "loss": 3.5986, "step": 940 }, { "epoch": 0.06420709335507542, "grad_norm": 0.7218958139419556, "learning_rate": 0.0009919995923359153, "loss": 3.7054, "step": 945 }, { "epoch": 0.06454681342573719, "grad_norm": 0.7437672019004822, "learning_rate": 0.0009919571273270826, "loss": 3.8277, "step": 950 }, { "epoch": 0.06488653349639897, "grad_norm": 0.9110792279243469, "learning_rate": 0.0009919146623182497, "loss": 3.7324, "step": 955 }, { "epoch": 0.06522625356706074, "grad_norm": 0.9134807586669922, "learning_rate": 0.000991872197309417, "loss": 3.8943, "step": 960 }, { "epoch": 0.06556597363772251, "grad_norm": 0.7716910243034363, "learning_rate": 0.0009918297323005844, "loss": 3.7446, "step": 965 }, { "epoch": 0.0659056937083843, "grad_norm": 1.4088274240493774, "learning_rate": 0.0009917872672917517, "loss": 4.0563, "step": 970 }, { "epoch": 0.06624541377904607, "grad_norm": 0.7899318933486938, "learning_rate": 0.0009917448022829188, "loss": 3.966, "step": 975 }, { "epoch": 0.06658513384970784, "grad_norm": 0.7678764462471008, "learning_rate": 0.0009917023372740862, "loss": 3.8197, "step": 980 }, { "epoch": 0.06692485392036962, "grad_norm": 0.9906265735626221, "learning_rate": 0.0009916598722652535, "loss": 3.8889, "step": 985 }, { "epoch": 0.06726457399103139, "grad_norm": 0.819016695022583, "learning_rate": 0.0009916174072564206, "loss": 3.8358, "step": 990 }, { "epoch": 0.06760429406169316, "grad_norm": 0.6557233333587646, "learning_rate": 0.000991574942247588, "loss": 3.6639, "step": 995 }, { "epoch": 0.06794401413235494, "grad_norm": 0.9023678302764893, "learning_rate": 0.0009915324772387553, "loss": 3.7433, "step": 1000 }, { "epoch": 0.06828373420301671, "grad_norm": 0.9478949904441833, "learning_rate": 0.0009914900122299226, "loss": 3.6994, "step": 1005 }, { "epoch": 0.06862345427367848, "grad_norm": 0.8473591804504395, "learning_rate": 0.00099144754722109, "loss": 3.5828, "step": 1010 }, { "epoch": 0.06896317434434027, "grad_norm": 0.8457006812095642, "learning_rate": 0.000991405082212257, "loss": 3.837, "step": 1015 }, { "epoch": 0.06930289441500204, "grad_norm": 0.6492158770561218, "learning_rate": 0.0009913626172034244, "loss": 3.6678, "step": 1020 }, { "epoch": 0.0696426144856638, "grad_norm": 0.7624703049659729, "learning_rate": 0.0009913201521945917, "loss": 3.4705, "step": 1025 }, { "epoch": 0.06998233455632559, "grad_norm": 0.9796258807182312, "learning_rate": 0.0009912776871857589, "loss": 3.9911, "step": 1030 }, { "epoch": 0.07032205462698736, "grad_norm": 1.2776588201522827, "learning_rate": 0.0009912352221769262, "loss": 3.6767, "step": 1035 }, { "epoch": 0.07066177469764914, "grad_norm": 12.404556274414062, "learning_rate": 0.0009911927571680935, "loss": 3.768, "step": 1040 }, { "epoch": 0.07100149476831091, "grad_norm": 1.8704136610031128, "learning_rate": 0.0009911502921592609, "loss": 3.7307, "step": 1045 }, { "epoch": 0.07134121483897268, "grad_norm": 0.856852114200592, "learning_rate": 0.000991107827150428, "loss": 4.0409, "step": 1050 }, { "epoch": 0.07168093490963447, "grad_norm": 4.296896457672119, "learning_rate": 0.0009910653621415953, "loss": 3.5281, "step": 1055 }, { "epoch": 0.07202065498029624, "grad_norm": 0.8374874591827393, "learning_rate": 0.0009910228971327626, "loss": 3.5951, "step": 1060 }, { "epoch": 0.072360375050958, "grad_norm": 0.7599070072174072, "learning_rate": 0.0009909804321239298, "loss": 3.5626, "step": 1065 }, { "epoch": 0.07270009512161979, "grad_norm": 0.8562057018280029, "learning_rate": 0.000990937967115097, "loss": 3.9585, "step": 1070 }, { "epoch": 0.07303981519228156, "grad_norm": 0.8893164396286011, "learning_rate": 0.0009908955021062644, "loss": 3.5901, "step": 1075 }, { "epoch": 0.07337953526294333, "grad_norm": 0.9515999555587769, "learning_rate": 0.0009908530370974318, "loss": 3.9606, "step": 1080 }, { "epoch": 0.07371925533360511, "grad_norm": 1.0364067554473877, "learning_rate": 0.000990810572088599, "loss": 3.675, "step": 1085 }, { "epoch": 0.07405897540426688, "grad_norm": 1.2571669816970825, "learning_rate": 0.0009907681070797662, "loss": 3.7761, "step": 1090 }, { "epoch": 0.07439869547492865, "grad_norm": 0.8824610710144043, "learning_rate": 0.0009907256420709336, "loss": 3.9062, "step": 1095 }, { "epoch": 0.07473841554559044, "grad_norm": 0.890859067440033, "learning_rate": 0.0009906831770621009, "loss": 3.3897, "step": 1100 }, { "epoch": 0.0750781356162522, "grad_norm": 0.8173443675041199, "learning_rate": 0.000990640712053268, "loss": 3.993, "step": 1105 }, { "epoch": 0.07541785568691398, "grad_norm": 0.8903078436851501, "learning_rate": 0.0009905982470444353, "loss": 3.699, "step": 1110 }, { "epoch": 0.07575757575757576, "grad_norm": 0.738021194934845, "learning_rate": 0.0009905557820356027, "loss": 3.8033, "step": 1115 }, { "epoch": 0.07609729582823753, "grad_norm": 0.6888086795806885, "learning_rate": 0.00099051331702677, "loss": 3.7134, "step": 1120 }, { "epoch": 0.07643701589889931, "grad_norm": 0.7198616862297058, "learning_rate": 0.0009904708520179371, "loss": 3.8802, "step": 1125 }, { "epoch": 0.07677673596956108, "grad_norm": 0.9630284905433655, "learning_rate": 0.0009904283870091045, "loss": 3.6282, "step": 1130 }, { "epoch": 0.07711645604022285, "grad_norm": 1.0574638843536377, "learning_rate": 0.0009903859220002718, "loss": 3.8039, "step": 1135 }, { "epoch": 0.07745617611088464, "grad_norm": 0.9752181768417358, "learning_rate": 0.0009903434569914391, "loss": 3.4804, "step": 1140 }, { "epoch": 0.0777958961815464, "grad_norm": 1.334782600402832, "learning_rate": 0.0009903009919826065, "loss": 3.7378, "step": 1145 }, { "epoch": 0.07813561625220818, "grad_norm": 1.9561891555786133, "learning_rate": 0.0009902585269737736, "loss": 3.6782, "step": 1150 }, { "epoch": 0.07847533632286996, "grad_norm": 0.8632113933563232, "learning_rate": 0.000990216061964941, "loss": 3.8068, "step": 1155 }, { "epoch": 0.07881505639353173, "grad_norm": 0.9060015082359314, "learning_rate": 0.0009901735969561083, "loss": 3.6075, "step": 1160 }, { "epoch": 0.0791547764641935, "grad_norm": 1.7545231580734253, "learning_rate": 0.0009901311319472754, "loss": 3.8398, "step": 1165 }, { "epoch": 0.07949449653485528, "grad_norm": 1.700802206993103, "learning_rate": 0.0009900886669384427, "loss": 3.8608, "step": 1170 }, { "epoch": 0.07983421660551705, "grad_norm": 0.9124142527580261, "learning_rate": 0.00099004620192961, "loss": 3.6128, "step": 1175 }, { "epoch": 0.08017393667617882, "grad_norm": 0.8943872451782227, "learning_rate": 0.0009900037369207774, "loss": 4.1011, "step": 1180 }, { "epoch": 0.0805136567468406, "grad_norm": 2.9984893798828125, "learning_rate": 0.0009899612719119447, "loss": 3.8071, "step": 1185 }, { "epoch": 0.08085337681750238, "grad_norm": 0.7909166216850281, "learning_rate": 0.0009899188069031118, "loss": 3.9357, "step": 1190 }, { "epoch": 0.08119309688816416, "grad_norm": 0.772480845451355, "learning_rate": 0.0009898763418942792, "loss": 3.5053, "step": 1195 }, { "epoch": 0.08153281695882593, "grad_norm": 0.9136141538619995, "learning_rate": 0.0009898338768854465, "loss": 3.8633, "step": 1200 }, { "epoch": 0.0818725370294877, "grad_norm": 1.1152701377868652, "learning_rate": 0.0009897914118766136, "loss": 3.9823, "step": 1205 }, { "epoch": 0.08221225710014948, "grad_norm": 1.7145951986312866, "learning_rate": 0.000989748946867781, "loss": 3.5545, "step": 1210 }, { "epoch": 0.08255197717081125, "grad_norm": 0.9226241111755371, "learning_rate": 0.0009897064818589483, "loss": 3.6389, "step": 1215 }, { "epoch": 0.08289169724147302, "grad_norm": 0.9822276830673218, "learning_rate": 0.0009896640168501156, "loss": 3.6279, "step": 1220 }, { "epoch": 0.0832314173121348, "grad_norm": 0.7141802310943604, "learning_rate": 0.0009896215518412827, "loss": 3.6348, "step": 1225 }, { "epoch": 0.08357113738279658, "grad_norm": 0.8988625407218933, "learning_rate": 0.00098957908683245, "loss": 3.8623, "step": 1230 }, { "epoch": 0.08391085745345835, "grad_norm": 1.111480951309204, "learning_rate": 0.0009895366218236174, "loss": 3.6635, "step": 1235 }, { "epoch": 0.08425057752412013, "grad_norm": 2.281259298324585, "learning_rate": 0.0009894941568147845, "loss": 3.7415, "step": 1240 }, { "epoch": 0.0845902975947819, "grad_norm": 0.9362971186637878, "learning_rate": 0.000989451691805952, "loss": 3.4877, "step": 1245 }, { "epoch": 0.08493001766544367, "grad_norm": 1.362188458442688, "learning_rate": 0.0009894092267971192, "loss": 3.8064, "step": 1250 }, { "epoch": 0.08526973773610545, "grad_norm": 1.8082140684127808, "learning_rate": 0.0009893667617882865, "loss": 3.931, "step": 1255 }, { "epoch": 0.08560945780676722, "grad_norm": 1.323808193206787, "learning_rate": 0.0009893242967794539, "loss": 3.5872, "step": 1260 }, { "epoch": 0.08594917787742899, "grad_norm": 0.8788350224494934, "learning_rate": 0.000989281831770621, "loss": 3.6909, "step": 1265 }, { "epoch": 0.08628889794809078, "grad_norm": 0.8858904838562012, "learning_rate": 0.0009892393667617883, "loss": 3.7136, "step": 1270 }, { "epoch": 0.08662861801875255, "grad_norm": 0.9549212455749512, "learning_rate": 0.0009891969017529556, "loss": 3.7499, "step": 1275 }, { "epoch": 0.08696833808941433, "grad_norm": 0.740705132484436, "learning_rate": 0.000989154436744123, "loss": 3.8848, "step": 1280 }, { "epoch": 0.0873080581600761, "grad_norm": 0.862654983997345, "learning_rate": 0.00098911197173529, "loss": 4.2804, "step": 1285 }, { "epoch": 0.08764777823073787, "grad_norm": 0.7785455584526062, "learning_rate": 0.0009890695067264574, "loss": 3.7363, "step": 1290 }, { "epoch": 0.08798749830139965, "grad_norm": 0.8337371349334717, "learning_rate": 0.0009890270417176248, "loss": 3.7323, "step": 1295 }, { "epoch": 0.08832721837206142, "grad_norm": 0.8959690928459167, "learning_rate": 0.0009889845767087919, "loss": 3.9642, "step": 1300 }, { "epoch": 0.08866693844272319, "grad_norm": 0.8264482617378235, "learning_rate": 0.0009889421116999592, "loss": 3.7875, "step": 1305 }, { "epoch": 0.08900665851338498, "grad_norm": 0.8109356164932251, "learning_rate": 0.0009888996466911265, "loss": 3.8195, "step": 1310 }, { "epoch": 0.08934637858404675, "grad_norm": 1.2459375858306885, "learning_rate": 0.0009888571816822939, "loss": 3.6398, "step": 1315 }, { "epoch": 0.08968609865470852, "grad_norm": 0.7455396056175232, "learning_rate": 0.0009888147166734612, "loss": 3.4594, "step": 1320 }, { "epoch": 0.0900258187253703, "grad_norm": 0.9231400489807129, "learning_rate": 0.0009887722516646283, "loss": 3.4865, "step": 1325 }, { "epoch": 0.09036553879603207, "grad_norm": 1.488303780555725, "learning_rate": 0.0009887297866557957, "loss": 3.8084, "step": 1330 }, { "epoch": 0.09070525886669384, "grad_norm": 0.9516574740409851, "learning_rate": 0.000988687321646963, "loss": 3.755, "step": 1335 }, { "epoch": 0.09104497893735562, "grad_norm": 0.8424415588378906, "learning_rate": 0.0009886448566381301, "loss": 3.9466, "step": 1340 }, { "epoch": 0.09138469900801739, "grad_norm": 0.7749091982841492, "learning_rate": 0.0009886023916292975, "loss": 3.5022, "step": 1345 }, { "epoch": 0.09172441907867916, "grad_norm": 0.8888652324676514, "learning_rate": 0.0009885599266204648, "loss": 3.5713, "step": 1350 }, { "epoch": 0.09206413914934095, "grad_norm": 0.6865494847297668, "learning_rate": 0.0009885174616116321, "loss": 3.8195, "step": 1355 }, { "epoch": 0.09240385922000272, "grad_norm": 1.859156608581543, "learning_rate": 0.0009884749966027992, "loss": 3.7009, "step": 1360 }, { "epoch": 0.0927435792906645, "grad_norm": 1.3464438915252686, "learning_rate": 0.0009884325315939666, "loss": 3.7004, "step": 1365 }, { "epoch": 0.09308329936132627, "grad_norm": 0.7437602281570435, "learning_rate": 0.000988390066585134, "loss": 3.8187, "step": 1370 }, { "epoch": 0.09342301943198804, "grad_norm": 2.26497220993042, "learning_rate": 0.000988347601576301, "loss": 3.825, "step": 1375 }, { "epoch": 0.09376273950264982, "grad_norm": 0.7989596128463745, "learning_rate": 0.0009883051365674684, "loss": 3.6425, "step": 1380 }, { "epoch": 0.09410245957331159, "grad_norm": 1.4595651626586914, "learning_rate": 0.0009882626715586357, "loss": 3.7493, "step": 1385 }, { "epoch": 0.09444217964397336, "grad_norm": 0.9590712785720825, "learning_rate": 0.000988220206549803, "loss": 3.7617, "step": 1390 }, { "epoch": 0.09478189971463515, "grad_norm": 0.8468039035797119, "learning_rate": 0.0009881777415409704, "loss": 3.7232, "step": 1395 }, { "epoch": 0.09512161978529692, "grad_norm": 1.4002819061279297, "learning_rate": 0.0009881352765321375, "loss": 3.7733, "step": 1400 }, { "epoch": 0.09546133985595869, "grad_norm": 0.7018969655036926, "learning_rate": 0.0009880928115233048, "loss": 3.7824, "step": 1405 }, { "epoch": 0.09580105992662047, "grad_norm": 0.7855573296546936, "learning_rate": 0.0009880503465144721, "loss": 3.8372, "step": 1410 }, { "epoch": 0.09614077999728224, "grad_norm": 0.859128475189209, "learning_rate": 0.0009880078815056393, "loss": 3.81, "step": 1415 }, { "epoch": 0.09648050006794401, "grad_norm": 0.8860381841659546, "learning_rate": 0.0009879654164968066, "loss": 3.9377, "step": 1420 }, { "epoch": 0.09682022013860579, "grad_norm": 0.9167178869247437, "learning_rate": 0.000987922951487974, "loss": 3.8068, "step": 1425 }, { "epoch": 0.09715994020926756, "grad_norm": 0.7468544840812683, "learning_rate": 0.0009878804864791413, "loss": 3.7223, "step": 1430 }, { "epoch": 0.09749966027992933, "grad_norm": 0.8998669981956482, "learning_rate": 0.0009878380214703084, "loss": 3.6592, "step": 1435 }, { "epoch": 0.09783938035059112, "grad_norm": 1.0629231929779053, "learning_rate": 0.0009877955564614757, "loss": 3.807, "step": 1440 }, { "epoch": 0.09817910042125289, "grad_norm": 0.7246888875961304, "learning_rate": 0.000987753091452643, "loss": 3.6433, "step": 1445 }, { "epoch": 0.09851882049191467, "grad_norm": 0.7344433069229126, "learning_rate": 0.0009877106264438102, "loss": 3.7726, "step": 1450 }, { "epoch": 0.09885854056257644, "grad_norm": 0.7685291171073914, "learning_rate": 0.0009876681614349777, "loss": 3.8038, "step": 1455 }, { "epoch": 0.09919826063323821, "grad_norm": 0.9896209239959717, "learning_rate": 0.0009876256964261448, "loss": 3.8093, "step": 1460 }, { "epoch": 0.09953798070389999, "grad_norm": 1.2785459756851196, "learning_rate": 0.0009875832314173122, "loss": 3.7351, "step": 1465 }, { "epoch": 0.09987770077456176, "grad_norm": 0.9382307529449463, "learning_rate": 0.0009875407664084795, "loss": 3.9197, "step": 1470 }, { "epoch": 0.10021742084522353, "grad_norm": 0.9357280135154724, "learning_rate": 0.0009874983013996466, "loss": 3.4588, "step": 1475 }, { "epoch": 0.10055714091588532, "grad_norm": 1.1583462953567505, "learning_rate": 0.000987455836390814, "loss": 3.8352, "step": 1480 }, { "epoch": 0.10089686098654709, "grad_norm": 0.7879663705825806, "learning_rate": 0.0009874133713819813, "loss": 3.5996, "step": 1485 }, { "epoch": 0.10123658105720885, "grad_norm": 0.8481245040893555, "learning_rate": 0.0009873709063731486, "loss": 3.6632, "step": 1490 }, { "epoch": 0.10157630112787064, "grad_norm": 0.9569376707077026, "learning_rate": 0.000987328441364316, "loss": 3.6764, "step": 1495 }, { "epoch": 0.10191602119853241, "grad_norm": 0.8419762253761292, "learning_rate": 0.000987285976355483, "loss": 3.8042, "step": 1500 }, { "epoch": 0.10225574126919418, "grad_norm": 0.9063109755516052, "learning_rate": 0.0009872435113466504, "loss": 3.8043, "step": 1505 }, { "epoch": 0.10259546133985596, "grad_norm": 0.8225113153457642, "learning_rate": 0.0009872010463378177, "loss": 3.6731, "step": 1510 }, { "epoch": 0.10293518141051773, "grad_norm": 0.8258070945739746, "learning_rate": 0.0009871585813289849, "loss": 3.7656, "step": 1515 }, { "epoch": 0.1032749014811795, "grad_norm": 0.8679206371307373, "learning_rate": 0.0009871161163201522, "loss": 3.6329, "step": 1520 }, { "epoch": 0.10361462155184128, "grad_norm": 0.8350735306739807, "learning_rate": 0.0009870736513113195, "loss": 3.839, "step": 1525 }, { "epoch": 0.10395434162250305, "grad_norm": 0.8663796186447144, "learning_rate": 0.0009870311863024869, "loss": 3.7562, "step": 1530 }, { "epoch": 0.10429406169316484, "grad_norm": 0.9908185005187988, "learning_rate": 0.000986988721293654, "loss": 3.8996, "step": 1535 }, { "epoch": 0.10463378176382661, "grad_norm": 0.9591052532196045, "learning_rate": 0.0009869462562848213, "loss": 3.7704, "step": 1540 }, { "epoch": 0.10497350183448838, "grad_norm": 0.7848603129386902, "learning_rate": 0.0009869037912759887, "loss": 3.7675, "step": 1545 }, { "epoch": 0.10531322190515016, "grad_norm": 0.9244189858436584, "learning_rate": 0.0009868613262671558, "loss": 3.6957, "step": 1550 }, { "epoch": 0.10565294197581193, "grad_norm": 0.9196615815162659, "learning_rate": 0.000986818861258323, "loss": 3.6481, "step": 1555 }, { "epoch": 0.1059926620464737, "grad_norm": 1.0094982385635376, "learning_rate": 0.0009867763962494904, "loss": 3.7318, "step": 1560 }, { "epoch": 0.10633238211713548, "grad_norm": 1.0959396362304688, "learning_rate": 0.0009867339312406578, "loss": 3.8066, "step": 1565 }, { "epoch": 0.10667210218779725, "grad_norm": 0.9997076988220215, "learning_rate": 0.0009866914662318251, "loss": 3.8032, "step": 1570 }, { "epoch": 0.10701182225845902, "grad_norm": 0.8974732756614685, "learning_rate": 0.0009866490012229922, "loss": 3.9183, "step": 1575 }, { "epoch": 0.10735154232912081, "grad_norm": 0.846490204334259, "learning_rate": 0.0009866065362141596, "loss": 3.8647, "step": 1580 }, { "epoch": 0.10769126239978258, "grad_norm": 1.1947354078292847, "learning_rate": 0.000986564071205327, "loss": 3.7207, "step": 1585 }, { "epoch": 0.10803098247044435, "grad_norm": 1.358474612236023, "learning_rate": 0.000986521606196494, "loss": 3.6404, "step": 1590 }, { "epoch": 0.10837070254110613, "grad_norm": 2.6317670345306396, "learning_rate": 0.0009864791411876613, "loss": 3.9389, "step": 1595 }, { "epoch": 0.1087104226117679, "grad_norm": 0.6671351194381714, "learning_rate": 0.0009864366761788287, "loss": 3.778, "step": 1600 }, { "epoch": 0.10905014268242967, "grad_norm": 0.8585983514785767, "learning_rate": 0.000986394211169996, "loss": 3.7464, "step": 1605 }, { "epoch": 0.10938986275309145, "grad_norm": 1.1288174390792847, "learning_rate": 0.0009863517461611631, "loss": 3.8817, "step": 1610 }, { "epoch": 0.10972958282375322, "grad_norm": 0.8400837182998657, "learning_rate": 0.0009863092811523305, "loss": 3.6986, "step": 1615 }, { "epoch": 0.11006930289441501, "grad_norm": 0.9099103808403015, "learning_rate": 0.0009862668161434978, "loss": 3.6086, "step": 1620 }, { "epoch": 0.11040902296507678, "grad_norm": 0.8050627112388611, "learning_rate": 0.000986224351134665, "loss": 3.6629, "step": 1625 }, { "epoch": 0.11074874303573855, "grad_norm": 0.9488096833229065, "learning_rate": 0.0009861818861258325, "loss": 3.8388, "step": 1630 }, { "epoch": 0.11108846310640033, "grad_norm": 0.8927670121192932, "learning_rate": 0.0009861394211169996, "loss": 3.8022, "step": 1635 }, { "epoch": 0.1114281831770621, "grad_norm": 0.931071400642395, "learning_rate": 0.000986096956108167, "loss": 3.8763, "step": 1640 }, { "epoch": 0.11176790324772387, "grad_norm": 0.810113251209259, "learning_rate": 0.0009860544910993343, "loss": 3.8414, "step": 1645 }, { "epoch": 0.11210762331838565, "grad_norm": 0.7648155689239502, "learning_rate": 0.0009860120260905014, "loss": 3.6125, "step": 1650 }, { "epoch": 0.11244734338904742, "grad_norm": 0.9502131938934326, "learning_rate": 0.0009859695610816687, "loss": 3.8951, "step": 1655 }, { "epoch": 0.1127870634597092, "grad_norm": 0.9022631645202637, "learning_rate": 0.000985927096072836, "loss": 3.7585, "step": 1660 }, { "epoch": 0.11312678353037098, "grad_norm": 1.2253316640853882, "learning_rate": 0.0009858846310640034, "loss": 3.7148, "step": 1665 }, { "epoch": 0.11346650360103275, "grad_norm": 0.8762560486793518, "learning_rate": 0.0009858421660551705, "loss": 3.6876, "step": 1670 }, { "epoch": 0.11380622367169452, "grad_norm": 0.9013214707374573, "learning_rate": 0.0009857997010463378, "loss": 3.8321, "step": 1675 }, { "epoch": 0.1141459437423563, "grad_norm": 0.9819822907447815, "learning_rate": 0.0009857572360375052, "loss": 3.7541, "step": 1680 }, { "epoch": 0.11448566381301807, "grad_norm": 1.1562237739562988, "learning_rate": 0.0009857147710286723, "loss": 3.6825, "step": 1685 }, { "epoch": 0.11482538388367985, "grad_norm": 4.4117326736450195, "learning_rate": 0.0009856723060198396, "loss": 3.4404, "step": 1690 }, { "epoch": 0.11516510395434162, "grad_norm": 0.819749116897583, "learning_rate": 0.000985629841011007, "loss": 3.6752, "step": 1695 }, { "epoch": 0.1155048240250034, "grad_norm": 3.5368049144744873, "learning_rate": 0.0009855873760021743, "loss": 3.8157, "step": 1700 }, { "epoch": 0.11584454409566518, "grad_norm": 1.264100193977356, "learning_rate": 0.0009855449109933416, "loss": 3.489, "step": 1705 }, { "epoch": 0.11618426416632695, "grad_norm": 0.8171218037605286, "learning_rate": 0.0009855024459845087, "loss": 3.9336, "step": 1710 }, { "epoch": 0.11652398423698872, "grad_norm": 1.6298147439956665, "learning_rate": 0.000985459980975676, "loss": 3.4704, "step": 1715 }, { "epoch": 0.1168637043076505, "grad_norm": 1.0205110311508179, "learning_rate": 0.0009854175159668434, "loss": 3.8422, "step": 1720 }, { "epoch": 0.11720342437831227, "grad_norm": 1.4235666990280151, "learning_rate": 0.0009853750509580105, "loss": 3.5537, "step": 1725 }, { "epoch": 0.11754314444897404, "grad_norm": 0.9652959704399109, "learning_rate": 0.0009853325859491779, "loss": 3.6292, "step": 1730 }, { "epoch": 0.11788286451963582, "grad_norm": 1.3239145278930664, "learning_rate": 0.0009852901209403452, "loss": 3.6585, "step": 1735 }, { "epoch": 0.1182225845902976, "grad_norm": 1.226503849029541, "learning_rate": 0.0009852476559315125, "loss": 3.8414, "step": 1740 }, { "epoch": 0.11856230466095936, "grad_norm": 0.9430757164955139, "learning_rate": 0.0009852051909226796, "loss": 3.9673, "step": 1745 }, { "epoch": 0.11890202473162115, "grad_norm": 0.8921589851379395, "learning_rate": 0.000985162725913847, "loss": 3.8353, "step": 1750 }, { "epoch": 0.11924174480228292, "grad_norm": 0.9406383037567139, "learning_rate": 0.0009851202609050143, "loss": 3.8043, "step": 1755 }, { "epoch": 0.11958146487294469, "grad_norm": 0.9224092960357666, "learning_rate": 0.0009850777958961814, "loss": 3.8546, "step": 1760 }, { "epoch": 0.11992118494360647, "grad_norm": 0.9947287440299988, "learning_rate": 0.0009850353308873488, "loss": 3.7377, "step": 1765 }, { "epoch": 0.12026090501426824, "grad_norm": 4.942892551422119, "learning_rate": 0.000984992865878516, "loss": 3.8323, "step": 1770 }, { "epoch": 0.12060062508493002, "grad_norm": 0.896720290184021, "learning_rate": 0.0009849504008696834, "loss": 3.7119, "step": 1775 }, { "epoch": 0.1209403451555918, "grad_norm": 0.8440753817558289, "learning_rate": 0.0009849079358608508, "loss": 3.817, "step": 1780 }, { "epoch": 0.12128006522625356, "grad_norm": 1.0429508686065674, "learning_rate": 0.0009848654708520179, "loss": 3.8501, "step": 1785 }, { "epoch": 0.12161978529691535, "grad_norm": 1.1498671770095825, "learning_rate": 0.0009848230058431852, "loss": 3.8335, "step": 1790 }, { "epoch": 0.12195950536757712, "grad_norm": 2.392529010772705, "learning_rate": 0.0009847805408343525, "loss": 3.5912, "step": 1795 }, { "epoch": 0.12229922543823889, "grad_norm": 0.9909056425094604, "learning_rate": 0.0009847380758255197, "loss": 3.6471, "step": 1800 }, { "epoch": 0.12263894550890067, "grad_norm": 1.0706111192703247, "learning_rate": 0.000984695610816687, "loss": 3.8209, "step": 1805 }, { "epoch": 0.12297866557956244, "grad_norm": 0.9448966979980469, "learning_rate": 0.0009846531458078543, "loss": 3.7984, "step": 1810 }, { "epoch": 0.12331838565022421, "grad_norm": 2.2566635608673096, "learning_rate": 0.0009846106807990217, "loss": 3.7272, "step": 1815 }, { "epoch": 0.123658105720886, "grad_norm": 6.327447414398193, "learning_rate": 0.000984568215790189, "loss": 3.7645, "step": 1820 }, { "epoch": 0.12399782579154776, "grad_norm": 1.339421033859253, "learning_rate": 0.0009845257507813561, "loss": 3.714, "step": 1825 }, { "epoch": 0.12433754586220953, "grad_norm": 0.7758365869522095, "learning_rate": 0.0009844832857725235, "loss": 3.8172, "step": 1830 }, { "epoch": 0.12467726593287132, "grad_norm": 0.9230892062187195, "learning_rate": 0.0009844408207636908, "loss": 3.8503, "step": 1835 }, { "epoch": 0.1250169860035331, "grad_norm": 1.4020636081695557, "learning_rate": 0.0009843983557548581, "loss": 3.8338, "step": 1840 }, { "epoch": 0.12535670607419486, "grad_norm": 1.2434756755828857, "learning_rate": 0.0009843558907460252, "loss": 3.647, "step": 1845 }, { "epoch": 0.12569642614485663, "grad_norm": 8.151143074035645, "learning_rate": 0.0009843134257371926, "loss": 3.8525, "step": 1850 }, { "epoch": 0.12603614621551842, "grad_norm": 0.9581377506256104, "learning_rate": 0.00098427096072836, "loss": 3.7861, "step": 1855 }, { "epoch": 0.1263758662861802, "grad_norm": 1.7839243412017822, "learning_rate": 0.000984228495719527, "loss": 3.7245, "step": 1860 }, { "epoch": 0.12671558635684196, "grad_norm": 0.9004167914390564, "learning_rate": 0.0009841860307106944, "loss": 3.5004, "step": 1865 }, { "epoch": 0.12705530642750373, "grad_norm": 0.8131628036499023, "learning_rate": 0.0009841435657018617, "loss": 3.8869, "step": 1870 }, { "epoch": 0.1273950264981655, "grad_norm": 0.8481993079185486, "learning_rate": 0.000984101100693029, "loss": 3.6441, "step": 1875 }, { "epoch": 0.12773474656882727, "grad_norm": 1.0214792490005493, "learning_rate": 0.0009840586356841964, "loss": 3.7145, "step": 1880 }, { "epoch": 0.12807446663948907, "grad_norm": 0.8244206309318542, "learning_rate": 0.0009840161706753635, "loss": 3.813, "step": 1885 }, { "epoch": 0.12841418671015084, "grad_norm": 2.0562472343444824, "learning_rate": 0.0009839737056665308, "loss": 3.3392, "step": 1890 }, { "epoch": 0.1287539067808126, "grad_norm": 1.0052398443222046, "learning_rate": 0.0009839312406576982, "loss": 3.7567, "step": 1895 }, { "epoch": 0.12909362685147438, "grad_norm": 1.0360190868377686, "learning_rate": 0.0009838887756488653, "loss": 3.639, "step": 1900 }, { "epoch": 0.12943334692213615, "grad_norm": 0.8587487936019897, "learning_rate": 0.0009838463106400326, "loss": 3.5315, "step": 1905 }, { "epoch": 0.12977306699279795, "grad_norm": 0.9683775305747986, "learning_rate": 0.0009838038456312, "loss": 3.7487, "step": 1910 }, { "epoch": 0.13011278706345972, "grad_norm": 1.282637357711792, "learning_rate": 0.0009837613806223673, "loss": 3.8959, "step": 1915 }, { "epoch": 0.1304525071341215, "grad_norm": 0.8685353994369507, "learning_rate": 0.0009837189156135344, "loss": 3.4848, "step": 1920 }, { "epoch": 0.13079222720478326, "grad_norm": 0.9828671216964722, "learning_rate": 0.0009836764506047017, "loss": 3.8562, "step": 1925 }, { "epoch": 0.13113194727544503, "grad_norm": 1.0845454931259155, "learning_rate": 0.000983633985595869, "loss": 3.7217, "step": 1930 }, { "epoch": 0.1314716673461068, "grad_norm": 1.23124361038208, "learning_rate": 0.0009835915205870362, "loss": 3.7246, "step": 1935 }, { "epoch": 0.1318113874167686, "grad_norm": 0.9317628145217896, "learning_rate": 0.0009835490555782037, "loss": 3.3452, "step": 1940 }, { "epoch": 0.13215110748743036, "grad_norm": 1.4222614765167236, "learning_rate": 0.0009835065905693708, "loss": 3.6844, "step": 1945 }, { "epoch": 0.13249082755809213, "grad_norm": 0.9690610766410828, "learning_rate": 0.0009834641255605382, "loss": 3.931, "step": 1950 }, { "epoch": 0.1328305476287539, "grad_norm": 3.070981502532959, "learning_rate": 0.0009834216605517055, "loss": 3.7255, "step": 1955 }, { "epoch": 0.13317026769941567, "grad_norm": 3.982217788696289, "learning_rate": 0.0009833791955428726, "loss": 3.8027, "step": 1960 }, { "epoch": 0.13350998777007744, "grad_norm": 1.5346640348434448, "learning_rate": 0.00098333673053404, "loss": 3.4694, "step": 1965 }, { "epoch": 0.13384970784073924, "grad_norm": 1.0817947387695312, "learning_rate": 0.0009832942655252073, "loss": 3.6565, "step": 1970 }, { "epoch": 0.134189427911401, "grad_norm": 0.8839321136474609, "learning_rate": 0.0009832518005163746, "loss": 3.9581, "step": 1975 }, { "epoch": 0.13452914798206278, "grad_norm": 0.8975902199745178, "learning_rate": 0.0009832093355075418, "loss": 3.6309, "step": 1980 }, { "epoch": 0.13486886805272455, "grad_norm": 0.8173681497573853, "learning_rate": 0.000983166870498709, "loss": 3.8113, "step": 1985 }, { "epoch": 0.13520858812338632, "grad_norm": 7.649469375610352, "learning_rate": 0.0009831244054898764, "loss": 3.7194, "step": 1990 }, { "epoch": 0.13554830819404812, "grad_norm": 1.0095553398132324, "learning_rate": 0.0009830819404810435, "loss": 3.7517, "step": 1995 }, { "epoch": 0.1358880282647099, "grad_norm": 1.6297639608383179, "learning_rate": 0.0009830394754722109, "loss": 3.7012, "step": 2000 }, { "epoch": 0.13622774833537166, "grad_norm": 1.7483009099960327, "learning_rate": 0.0009829970104633782, "loss": 3.6334, "step": 2005 }, { "epoch": 0.13656746840603343, "grad_norm": 3.8151636123657227, "learning_rate": 0.0009829545454545455, "loss": 4.0326, "step": 2010 }, { "epoch": 0.1369071884766952, "grad_norm": 0.7680081129074097, "learning_rate": 0.0009829120804457129, "loss": 3.7788, "step": 2015 }, { "epoch": 0.13724690854735697, "grad_norm": 1.3243752717971802, "learning_rate": 0.00098286961543688, "loss": 3.8952, "step": 2020 }, { "epoch": 0.13758662861801876, "grad_norm": 1.0775601863861084, "learning_rate": 0.0009828271504280473, "loss": 3.5555, "step": 2025 }, { "epoch": 0.13792634868868053, "grad_norm": 1.292656421661377, "learning_rate": 0.0009827846854192147, "loss": 3.6526, "step": 2030 }, { "epoch": 0.1382660687593423, "grad_norm": 0.9375522136688232, "learning_rate": 0.0009827422204103818, "loss": 3.888, "step": 2035 }, { "epoch": 0.13860578883000407, "grad_norm": 3.986186981201172, "learning_rate": 0.0009826997554015491, "loss": 3.8163, "step": 2040 }, { "epoch": 0.13894550890066584, "grad_norm": 1.5529648065567017, "learning_rate": 0.0009826572903927164, "loss": 3.9133, "step": 2045 }, { "epoch": 0.1392852289713276, "grad_norm": 0.9338223934173584, "learning_rate": 0.0009826148253838838, "loss": 3.7221, "step": 2050 }, { "epoch": 0.1396249490419894, "grad_norm": 1.8957241773605347, "learning_rate": 0.0009825808533768176, "loss": 3.8174, "step": 2055 }, { "epoch": 0.13996466911265118, "grad_norm": 0.8655341863632202, "learning_rate": 0.0009825383883679847, "loss": 3.893, "step": 2060 }, { "epoch": 0.14030438918331295, "grad_norm": 1.5160977840423584, "learning_rate": 0.000982495923359152, "loss": 3.9705, "step": 2065 }, { "epoch": 0.14064410925397472, "grad_norm": 1.0823280811309814, "learning_rate": 0.0009824534583503194, "loss": 3.6846, "step": 2070 }, { "epoch": 0.1409838293246365, "grad_norm": 0.9917528629302979, "learning_rate": 0.0009824109933414867, "loss": 3.6039, "step": 2075 }, { "epoch": 0.1413235493952983, "grad_norm": 1.1528750658035278, "learning_rate": 0.0009823685283326538, "loss": 3.9146, "step": 2080 }, { "epoch": 0.14166326946596006, "grad_norm": 0.9987397193908691, "learning_rate": 0.0009823260633238212, "loss": 3.9201, "step": 2085 }, { "epoch": 0.14200298953662183, "grad_norm": 3.388103723526001, "learning_rate": 0.0009822835983149885, "loss": 3.7847, "step": 2090 }, { "epoch": 0.1423427096072836, "grad_norm": 2.958061933517456, "learning_rate": 0.0009822411333061556, "loss": 3.8261, "step": 2095 }, { "epoch": 0.14268242967794537, "grad_norm": 1.0954190492630005, "learning_rate": 0.0009821986682973232, "loss": 3.7465, "step": 2100 }, { "epoch": 0.14302214974860714, "grad_norm": 1.0909405946731567, "learning_rate": 0.0009821562032884903, "loss": 3.841, "step": 2105 }, { "epoch": 0.14336186981926893, "grad_norm": 0.9117960929870605, "learning_rate": 0.0009821137382796576, "loss": 3.8822, "step": 2110 }, { "epoch": 0.1437015898899307, "grad_norm": 1.0005829334259033, "learning_rate": 0.000982071273270825, "loss": 3.388, "step": 2115 }, { "epoch": 0.14404130996059247, "grad_norm": 0.8273672461509705, "learning_rate": 0.000982028808261992, "loss": 3.8661, "step": 2120 }, { "epoch": 0.14438103003125424, "grad_norm": 0.9983308911323547, "learning_rate": 0.0009819863432531594, "loss": 3.725, "step": 2125 }, { "epoch": 0.144720750101916, "grad_norm": 3.3538196086883545, "learning_rate": 0.0009819523712460932, "loss": 3.6801, "step": 2130 }, { "epoch": 0.14506047017257778, "grad_norm": 1.2225327491760254, "learning_rate": 0.0009819099062372606, "loss": 3.7144, "step": 2135 }, { "epoch": 0.14540019024323958, "grad_norm": 0.8632490634918213, "learning_rate": 0.0009818674412284277, "loss": 3.6914, "step": 2140 }, { "epoch": 0.14573991031390135, "grad_norm": 1.2504016160964966, "learning_rate": 0.000981824976219595, "loss": 3.8282, "step": 2145 }, { "epoch": 0.14607963038456312, "grad_norm": 3.1403310298919678, "learning_rate": 0.0009817825112107624, "loss": 3.6254, "step": 2150 }, { "epoch": 0.1464193504552249, "grad_norm": 1.0072838068008423, "learning_rate": 0.0009817400462019295, "loss": 4.0933, "step": 2155 }, { "epoch": 0.14675907052588666, "grad_norm": 0.8260823488235474, "learning_rate": 0.000981697581193097, "loss": 3.7271, "step": 2160 }, { "epoch": 0.14709879059654846, "grad_norm": 1.485294222831726, "learning_rate": 0.0009816551161842641, "loss": 3.663, "step": 2165 }, { "epoch": 0.14743851066721023, "grad_norm": 1.23434579372406, "learning_rate": 0.0009816126511754315, "loss": 3.5008, "step": 2170 }, { "epoch": 0.147778230737872, "grad_norm": 0.926327109336853, "learning_rate": 0.0009815701861665988, "loss": 3.9196, "step": 2175 }, { "epoch": 0.14811795080853377, "grad_norm": 1.2125409841537476, "learning_rate": 0.000981527721157766, "loss": 3.6527, "step": 2180 }, { "epoch": 0.14845767087919554, "grad_norm": 1.115444540977478, "learning_rate": 0.0009814852561489333, "loss": 3.8389, "step": 2185 }, { "epoch": 0.1487973909498573, "grad_norm": 0.9969165921211243, "learning_rate": 0.0009814427911401006, "loss": 3.7869, "step": 2190 }, { "epoch": 0.1491371110205191, "grad_norm": 5.858527183532715, "learning_rate": 0.000981400326131268, "loss": 3.7871, "step": 2195 }, { "epoch": 0.14947683109118087, "grad_norm": 0.9607635736465454, "learning_rate": 0.000981357861122435, "loss": 3.8173, "step": 2200 }, { "epoch": 0.14981655116184264, "grad_norm": 0.9475060105323792, "learning_rate": 0.0009813153961136024, "loss": 3.7473, "step": 2205 }, { "epoch": 0.1501562712325044, "grad_norm": 0.9208068251609802, "learning_rate": 0.0009812729311047697, "loss": 3.8942, "step": 2210 }, { "epoch": 0.15049599130316618, "grad_norm": 0.8789984583854675, "learning_rate": 0.0009812304660959368, "loss": 3.6955, "step": 2215 }, { "epoch": 0.15083571137382795, "grad_norm": 3.6499037742614746, "learning_rate": 0.0009811880010871042, "loss": 3.9092, "step": 2220 }, { "epoch": 0.15117543144448975, "grad_norm": 0.9104393720626831, "learning_rate": 0.0009811455360782715, "loss": 3.9388, "step": 2225 }, { "epoch": 0.15151515151515152, "grad_norm": 1.0234278440475464, "learning_rate": 0.0009811030710694388, "loss": 3.4865, "step": 2230 }, { "epoch": 0.1518548715858133, "grad_norm": 1.0703814029693604, "learning_rate": 0.0009810606060606062, "loss": 3.9071, "step": 2235 }, { "epoch": 0.15219459165647506, "grad_norm": 3.024217367172241, "learning_rate": 0.0009810181410517733, "loss": 3.9006, "step": 2240 }, { "epoch": 0.15253431172713683, "grad_norm": 0.8184770941734314, "learning_rate": 0.0009809756760429406, "loss": 3.8883, "step": 2245 }, { "epoch": 0.15287403179779863, "grad_norm": 1.2970143556594849, "learning_rate": 0.000980933211034108, "loss": 3.6743, "step": 2250 }, { "epoch": 0.1532137518684604, "grad_norm": 1.096877098083496, "learning_rate": 0.000980890746025275, "loss": 3.8647, "step": 2255 }, { "epoch": 0.15355347193912217, "grad_norm": 1.2687857151031494, "learning_rate": 0.0009808482810164426, "loss": 3.9413, "step": 2260 }, { "epoch": 0.15389319200978394, "grad_norm": 0.8756415843963623, "learning_rate": 0.0009808058160076097, "loss": 3.8106, "step": 2265 }, { "epoch": 0.1542329120804457, "grad_norm": 0.8927282094955444, "learning_rate": 0.000980763350998777, "loss": 3.9466, "step": 2270 }, { "epoch": 0.15457263215110748, "grad_norm": 3.157271146774292, "learning_rate": 0.0009807208859899444, "loss": 3.8162, "step": 2275 }, { "epoch": 0.15491235222176927, "grad_norm": 2.2995424270629883, "learning_rate": 0.0009806784209811115, "loss": 3.8071, "step": 2280 }, { "epoch": 0.15525207229243104, "grad_norm": 1.3203309774398804, "learning_rate": 0.0009806359559722789, "loss": 3.7351, "step": 2285 }, { "epoch": 0.1555917923630928, "grad_norm": 1.072256326675415, "learning_rate": 0.0009805934909634462, "loss": 3.6425, "step": 2290 }, { "epoch": 0.15593151243375458, "grad_norm": 1.1517857313156128, "learning_rate": 0.0009805510259546135, "loss": 3.6289, "step": 2295 }, { "epoch": 0.15627123250441635, "grad_norm": 1.386529803276062, "learning_rate": 0.0009805085609457807, "loss": 3.8352, "step": 2300 }, { "epoch": 0.15661095257507815, "grad_norm": 0.9818822741508484, "learning_rate": 0.000980466095936948, "loss": 3.8734, "step": 2305 }, { "epoch": 0.15695067264573992, "grad_norm": 0.986213743686676, "learning_rate": 0.0009804236309281153, "loss": 3.5463, "step": 2310 }, { "epoch": 0.1572903927164017, "grad_norm": 1.1116042137145996, "learning_rate": 0.0009803811659192824, "loss": 3.7944, "step": 2315 }, { "epoch": 0.15763011278706346, "grad_norm": 1.3098955154418945, "learning_rate": 0.0009803387009104498, "loss": 3.7798, "step": 2320 }, { "epoch": 0.15796983285772523, "grad_norm": 3.1325738430023193, "learning_rate": 0.0009802962359016171, "loss": 3.9257, "step": 2325 }, { "epoch": 0.158309552928387, "grad_norm": 7.448719024658203, "learning_rate": 0.0009802537708927844, "loss": 3.6736, "step": 2330 }, { "epoch": 0.1586492729990488, "grad_norm": 0.8241439461708069, "learning_rate": 0.0009802113058839518, "loss": 3.6845, "step": 2335 }, { "epoch": 0.15898899306971057, "grad_norm": 0.9338255524635315, "learning_rate": 0.000980168840875119, "loss": 3.7762, "step": 2340 }, { "epoch": 0.15932871314037234, "grad_norm": 1.609569787979126, "learning_rate": 0.0009801263758662862, "loss": 3.7934, "step": 2345 }, { "epoch": 0.1596684332110341, "grad_norm": 1.4362223148345947, "learning_rate": 0.0009800839108574536, "loss": 3.9315, "step": 2350 }, { "epoch": 0.16000815328169588, "grad_norm": 0.9223277568817139, "learning_rate": 0.0009800414458486207, "loss": 3.7548, "step": 2355 }, { "epoch": 0.16034787335235764, "grad_norm": 1.8061391115188599, "learning_rate": 0.000979998980839788, "loss": 3.8439, "step": 2360 }, { "epoch": 0.16068759342301944, "grad_norm": 1.382835030555725, "learning_rate": 0.0009799565158309553, "loss": 3.5235, "step": 2365 }, { "epoch": 0.1610273134936812, "grad_norm": 7.1331682205200195, "learning_rate": 0.0009799140508221227, "loss": 3.6911, "step": 2370 }, { "epoch": 0.16136703356434298, "grad_norm": 0.9806751608848572, "learning_rate": 0.0009798715858132898, "loss": 3.7754, "step": 2375 }, { "epoch": 0.16170675363500475, "grad_norm": 1.4782449007034302, "learning_rate": 0.0009798291208044571, "loss": 3.6179, "step": 2380 }, { "epoch": 0.16204647370566652, "grad_norm": 2.394925594329834, "learning_rate": 0.0009797866557956245, "loss": 3.7103, "step": 2385 }, { "epoch": 0.16238619377632832, "grad_norm": 1.6098804473876953, "learning_rate": 0.0009797441907867916, "loss": 3.8824, "step": 2390 }, { "epoch": 0.1627259138469901, "grad_norm": 1.6841319799423218, "learning_rate": 0.000979701725777959, "loss": 3.6052, "step": 2395 }, { "epoch": 0.16306563391765186, "grad_norm": 0.9434406757354736, "learning_rate": 0.0009796592607691263, "loss": 3.8283, "step": 2400 }, { "epoch": 0.16340535398831363, "grad_norm": 1.4997507333755493, "learning_rate": 0.0009796167957602936, "loss": 3.6265, "step": 2405 }, { "epoch": 0.1637450740589754, "grad_norm": 1.0215040445327759, "learning_rate": 0.000979574330751461, "loss": 3.9202, "step": 2410 }, { "epoch": 0.16408479412963717, "grad_norm": 0.9753285646438599, "learning_rate": 0.000979531865742628, "loss": 3.9119, "step": 2415 }, { "epoch": 0.16442451420029897, "grad_norm": 1.2793364524841309, "learning_rate": 0.0009794894007337954, "loss": 3.7716, "step": 2420 }, { "epoch": 0.16476423427096074, "grad_norm": 1.156853437423706, "learning_rate": 0.0009794469357249627, "loss": 3.706, "step": 2425 }, { "epoch": 0.1651039543416225, "grad_norm": 1.2804151773452759, "learning_rate": 0.0009794044707161298, "loss": 3.7514, "step": 2430 }, { "epoch": 0.16544367441228428, "grad_norm": 0.9373441934585571, "learning_rate": 0.0009793620057072972, "loss": 3.9168, "step": 2435 }, { "epoch": 0.16578339448294604, "grad_norm": 1.5992696285247803, "learning_rate": 0.0009793195406984645, "loss": 3.748, "step": 2440 }, { "epoch": 0.16612311455360781, "grad_norm": 2.7159674167633057, "learning_rate": 0.0009792770756896318, "loss": 3.7834, "step": 2445 }, { "epoch": 0.1664628346242696, "grad_norm": 1.1414713859558105, "learning_rate": 0.000979234610680799, "loss": 3.6727, "step": 2450 }, { "epoch": 0.16680255469493138, "grad_norm": 1.5436903238296509, "learning_rate": 0.0009791921456719663, "loss": 3.6689, "step": 2455 }, { "epoch": 0.16714227476559315, "grad_norm": 1.2675707340240479, "learning_rate": 0.0009791496806631336, "loss": 3.7599, "step": 2460 }, { "epoch": 0.16748199483625492, "grad_norm": 1.2044775485992432, "learning_rate": 0.0009791072156543007, "loss": 3.6642, "step": 2465 }, { "epoch": 0.1678217149069167, "grad_norm": 1.3783915042877197, "learning_rate": 0.0009790647506454683, "loss": 3.8745, "step": 2470 }, { "epoch": 0.1681614349775785, "grad_norm": 1.3515933752059937, "learning_rate": 0.0009790222856366354, "loss": 3.8872, "step": 2475 }, { "epoch": 0.16850115504824026, "grad_norm": 1.0111433267593384, "learning_rate": 0.0009789798206278027, "loss": 3.741, "step": 2480 }, { "epoch": 0.16884087511890203, "grad_norm": 0.9573282599449158, "learning_rate": 0.00097893735561897, "loss": 3.8242, "step": 2485 }, { "epoch": 0.1691805951895638, "grad_norm": 0.7742440104484558, "learning_rate": 0.0009788948906101372, "loss": 3.8347, "step": 2490 }, { "epoch": 0.16952031526022557, "grad_norm": 0.9897531270980835, "learning_rate": 0.0009788524256013045, "loss": 3.7535, "step": 2495 }, { "epoch": 0.16986003533088734, "grad_norm": 0.9153067469596863, "learning_rate": 0.0009788099605924719, "loss": 3.9184, "step": 2500 }, { "epoch": 0.17019975540154914, "grad_norm": 0.9063498377799988, "learning_rate": 0.0009787674955836392, "loss": 3.8914, "step": 2505 }, { "epoch": 0.1705394754722109, "grad_norm": 0.9240171909332275, "learning_rate": 0.0009787250305748063, "loss": 3.9282, "step": 2510 }, { "epoch": 0.17087919554287267, "grad_norm": 0.834861159324646, "learning_rate": 0.0009786825655659736, "loss": 3.7448, "step": 2515 }, { "epoch": 0.17121891561353444, "grad_norm": 0.7987897992134094, "learning_rate": 0.000978640100557141, "loss": 3.9192, "step": 2520 }, { "epoch": 0.17155863568419621, "grad_norm": 0.894929826259613, "learning_rate": 0.000978597635548308, "loss": 3.9142, "step": 2525 }, { "epoch": 0.17189835575485798, "grad_norm": 1.107903003692627, "learning_rate": 0.0009785551705394754, "loss": 3.7509, "step": 2530 }, { "epoch": 0.17223807582551978, "grad_norm": 1.1137334108352661, "learning_rate": 0.0009785127055306428, "loss": 3.953, "step": 2535 }, { "epoch": 0.17257779589618155, "grad_norm": 0.9571302533149719, "learning_rate": 0.00097847024052181, "loss": 3.6519, "step": 2540 }, { "epoch": 0.17291751596684332, "grad_norm": 2.3584060668945312, "learning_rate": 0.0009784277755129774, "loss": 3.6474, "step": 2545 }, { "epoch": 0.1732572360375051, "grad_norm": 0.8544610142707825, "learning_rate": 0.0009783853105041446, "loss": 3.8408, "step": 2550 }, { "epoch": 0.17359695610816686, "grad_norm": 0.9105238318443298, "learning_rate": 0.0009783428454953119, "loss": 3.6319, "step": 2555 }, { "epoch": 0.17393667617882866, "grad_norm": 2.2374250888824463, "learning_rate": 0.0009783003804864792, "loss": 3.5937, "step": 2560 }, { "epoch": 0.17427639624949043, "grad_norm": 1.2461915016174316, "learning_rate": 0.0009782579154776463, "loss": 3.7096, "step": 2565 }, { "epoch": 0.1746161163201522, "grad_norm": 1.4567878246307373, "learning_rate": 0.0009782154504688137, "loss": 3.8027, "step": 2570 }, { "epoch": 0.17495583639081397, "grad_norm": 1.6885274648666382, "learning_rate": 0.000978172985459981, "loss": 3.9262, "step": 2575 }, { "epoch": 0.17529555646147574, "grad_norm": 1.0545746088027954, "learning_rate": 0.0009781305204511483, "loss": 3.8887, "step": 2580 }, { "epoch": 0.1756352765321375, "grad_norm": 0.9892961978912354, "learning_rate": 0.0009780880554423157, "loss": 3.6468, "step": 2585 }, { "epoch": 0.1759749966027993, "grad_norm": 2.638918876647949, "learning_rate": 0.0009780455904334828, "loss": 3.5822, "step": 2590 }, { "epoch": 0.17631471667346107, "grad_norm": 1.1376413106918335, "learning_rate": 0.0009780031254246501, "loss": 3.6826, "step": 2595 }, { "epoch": 0.17665443674412284, "grad_norm": 1.1181039810180664, "learning_rate": 0.0009779606604158175, "loss": 3.5247, "step": 2600 }, { "epoch": 0.17699415681478461, "grad_norm": 2.557750940322876, "learning_rate": 0.0009779181954069846, "loss": 4.0601, "step": 2605 }, { "epoch": 0.17733387688544638, "grad_norm": 1.0081487894058228, "learning_rate": 0.000977875730398152, "loss": 3.7308, "step": 2610 }, { "epoch": 0.17767359695610815, "grad_norm": 1.083447813987732, "learning_rate": 0.0009778332653893192, "loss": 3.4752, "step": 2615 }, { "epoch": 0.17801331702676995, "grad_norm": 1.0089781284332275, "learning_rate": 0.0009777908003804866, "loss": 3.7016, "step": 2620 }, { "epoch": 0.17835303709743172, "grad_norm": 1.192327857017517, "learning_rate": 0.0009777483353716537, "loss": 3.6756, "step": 2625 }, { "epoch": 0.1786927571680935, "grad_norm": 1.0910143852233887, "learning_rate": 0.000977705870362821, "loss": 3.6284, "step": 2630 }, { "epoch": 0.17903247723875526, "grad_norm": 0.9538121223449707, "learning_rate": 0.0009776634053539884, "loss": 3.7692, "step": 2635 }, { "epoch": 0.17937219730941703, "grad_norm": 1.1228162050247192, "learning_rate": 0.0009776209403451555, "loss": 3.8607, "step": 2640 }, { "epoch": 0.17971191738007883, "grad_norm": 0.779090940952301, "learning_rate": 0.000977578475336323, "loss": 3.7614, "step": 2645 }, { "epoch": 0.1800516374507406, "grad_norm": 1.5266573429107666, "learning_rate": 0.0009775360103274902, "loss": 3.9324, "step": 2650 }, { "epoch": 0.18039135752140237, "grad_norm": 6.9554443359375, "learning_rate": 0.0009774935453186575, "loss": 3.8629, "step": 2655 }, { "epoch": 0.18073107759206414, "grad_norm": 0.86854487657547, "learning_rate": 0.0009774510803098248, "loss": 3.5737, "step": 2660 }, { "epoch": 0.1810707976627259, "grad_norm": 1.1867231130599976, "learning_rate": 0.000977408615300992, "loss": 3.7427, "step": 2665 }, { "epoch": 0.18141051773338768, "grad_norm": 1.3124732971191406, "learning_rate": 0.0009773661502921593, "loss": 3.6951, "step": 2670 }, { "epoch": 0.18175023780404947, "grad_norm": 1.5641621351242065, "learning_rate": 0.0009773236852833266, "loss": 3.7546, "step": 2675 }, { "epoch": 0.18208995787471124, "grad_norm": 1.3718196153640747, "learning_rate": 0.000977281220274494, "loss": 3.8613, "step": 2680 }, { "epoch": 0.18242967794537301, "grad_norm": 0.7548995018005371, "learning_rate": 0.000977238755265661, "loss": 3.7759, "step": 2685 }, { "epoch": 0.18276939801603478, "grad_norm": 1.0932202339172363, "learning_rate": 0.0009771962902568284, "loss": 3.7571, "step": 2690 }, { "epoch": 0.18310911808669655, "grad_norm": 0.9052344560623169, "learning_rate": 0.0009771538252479957, "loss": 3.3339, "step": 2695 }, { "epoch": 0.18344883815735832, "grad_norm": 0.9928195476531982, "learning_rate": 0.0009771113602391628, "loss": 3.9271, "step": 2700 }, { "epoch": 0.18378855822802012, "grad_norm": 0.9526640176773071, "learning_rate": 0.0009770688952303302, "loss": 3.5935, "step": 2705 }, { "epoch": 0.1841282782986819, "grad_norm": 1.4885016679763794, "learning_rate": 0.0009770264302214975, "loss": 3.6338, "step": 2710 }, { "epoch": 0.18446799836934366, "grad_norm": 1.1409516334533691, "learning_rate": 0.0009769839652126648, "loss": 3.7831, "step": 2715 }, { "epoch": 0.18480771844000543, "grad_norm": 1.3963998556137085, "learning_rate": 0.0009769415002038322, "loss": 3.7944, "step": 2720 }, { "epoch": 0.1851474385106672, "grad_norm": 1.2904924154281616, "learning_rate": 0.0009768990351949993, "loss": 3.8231, "step": 2725 }, { "epoch": 0.185487158581329, "grad_norm": 1.1239421367645264, "learning_rate": 0.0009768565701861666, "loss": 3.9729, "step": 2730 }, { "epoch": 0.18582687865199077, "grad_norm": 1.2960641384124756, "learning_rate": 0.000976814105177334, "loss": 3.7999, "step": 2735 }, { "epoch": 0.18616659872265254, "grad_norm": 0.8526661992073059, "learning_rate": 0.000976771640168501, "loss": 3.5701, "step": 2740 }, { "epoch": 0.1865063187933143, "grad_norm": 1.0448435544967651, "learning_rate": 0.0009767291751596684, "loss": 3.6045, "step": 2745 }, { "epoch": 0.18684603886397608, "grad_norm": 1.1888383626937866, "learning_rate": 0.0009766867101508358, "loss": 3.8475, "step": 2750 }, { "epoch": 0.18718575893463785, "grad_norm": 1.1792293787002563, "learning_rate": 0.000976644245142003, "loss": 3.9914, "step": 2755 }, { "epoch": 0.18752547900529964, "grad_norm": 0.9176615476608276, "learning_rate": 0.0009766017801331702, "loss": 3.7687, "step": 2760 }, { "epoch": 0.18786519907596141, "grad_norm": 0.8101356625556946, "learning_rate": 0.0009765593151243376, "loss": 3.7697, "step": 2765 }, { "epoch": 0.18820491914662318, "grad_norm": 0.9384803175926208, "learning_rate": 0.0009765168501155049, "loss": 3.848, "step": 2770 }, { "epoch": 0.18854463921728495, "grad_norm": 1.1290396451950073, "learning_rate": 0.0009764743851066721, "loss": 4.0388, "step": 2775 }, { "epoch": 0.18888435928794672, "grad_norm": 2.2341105937957764, "learning_rate": 0.0009764319200978394, "loss": 3.7882, "step": 2780 }, { "epoch": 0.1892240793586085, "grad_norm": 1.0076885223388672, "learning_rate": 0.0009763894550890067, "loss": 3.4656, "step": 2785 }, { "epoch": 0.1895637994292703, "grad_norm": 1.120369791984558, "learning_rate": 0.0009763469900801739, "loss": 3.7514, "step": 2790 }, { "epoch": 0.18990351949993206, "grad_norm": 1.1124889850616455, "learning_rate": 0.0009763045250713412, "loss": 3.5524, "step": 2795 }, { "epoch": 0.19024323957059383, "grad_norm": 1.6071346998214722, "learning_rate": 0.0009762620600625086, "loss": 3.6405, "step": 2800 }, { "epoch": 0.1905829596412556, "grad_norm": 1.3395817279815674, "learning_rate": 0.0009762195950536758, "loss": 3.7095, "step": 2805 }, { "epoch": 0.19092267971191737, "grad_norm": 0.9652655720710754, "learning_rate": 0.0009761771300448431, "loss": 3.3256, "step": 2810 }, { "epoch": 0.19126239978257917, "grad_norm": 1.9510139226913452, "learning_rate": 0.0009761346650360103, "loss": 3.7513, "step": 2815 }, { "epoch": 0.19160211985324094, "grad_norm": 1.8108229637145996, "learning_rate": 0.0009760922000271776, "loss": 3.927, "step": 2820 }, { "epoch": 0.1919418399239027, "grad_norm": 1.8414236307144165, "learning_rate": 0.0009760497350183449, "loss": 3.6162, "step": 2825 }, { "epoch": 0.19228155999456448, "grad_norm": 1.0390222072601318, "learning_rate": 0.0009760072700095121, "loss": 3.9196, "step": 2830 }, { "epoch": 0.19262128006522625, "grad_norm": 1.0651882886886597, "learning_rate": 0.0009759648050006795, "loss": 3.7384, "step": 2835 }, { "epoch": 0.19296100013588802, "grad_norm": 1.1565048694610596, "learning_rate": 0.0009759223399918468, "loss": 3.6771, "step": 2840 }, { "epoch": 0.19330072020654981, "grad_norm": 1.0209771394729614, "learning_rate": 0.000975879874983014, "loss": 3.7915, "step": 2845 }, { "epoch": 0.19364044027721158, "grad_norm": 0.8734944462776184, "learning_rate": 0.0009758374099741812, "loss": 3.7533, "step": 2850 }, { "epoch": 0.19398016034787335, "grad_norm": 1.24446702003479, "learning_rate": 0.0009757949449653486, "loss": 3.8535, "step": 2855 }, { "epoch": 0.19431988041853512, "grad_norm": 1.0102795362472534, "learning_rate": 0.0009757524799565158, "loss": 3.7478, "step": 2860 }, { "epoch": 0.1946596004891969, "grad_norm": 1.4605876207351685, "learning_rate": 0.000975710014947683, "loss": 3.8051, "step": 2865 }, { "epoch": 0.19499932055985866, "grad_norm": 1.0731106996536255, "learning_rate": 0.0009756675499388505, "loss": 3.5559, "step": 2870 }, { "epoch": 0.19533904063052046, "grad_norm": 1.1022913455963135, "learning_rate": 0.0009756250849300177, "loss": 3.7159, "step": 2875 }, { "epoch": 0.19567876070118223, "grad_norm": 2.289674997329712, "learning_rate": 0.0009755826199211849, "loss": 3.8255, "step": 2880 }, { "epoch": 0.196018480771844, "grad_norm": 0.989314615726471, "learning_rate": 0.0009755401549123523, "loss": 3.6562, "step": 2885 }, { "epoch": 0.19635820084250577, "grad_norm": 0.9591164588928223, "learning_rate": 0.0009754976899035195, "loss": 3.353, "step": 2890 }, { "epoch": 0.19669792091316754, "grad_norm": 1.3540157079696655, "learning_rate": 0.0009754552248946867, "loss": 3.4217, "step": 2895 }, { "epoch": 0.19703764098382934, "grad_norm": 1.3590593338012695, "learning_rate": 0.000975412759885854, "loss": 3.8313, "step": 2900 }, { "epoch": 0.1973773610544911, "grad_norm": 1.5452762842178345, "learning_rate": 0.0009753702948770214, "loss": 3.5026, "step": 2905 }, { "epoch": 0.19771708112515288, "grad_norm": 1.65347421169281, "learning_rate": 0.0009753278298681887, "loss": 3.6087, "step": 2910 }, { "epoch": 0.19805680119581465, "grad_norm": 1.7748278379440308, "learning_rate": 0.0009752853648593559, "loss": 3.7453, "step": 2915 }, { "epoch": 0.19839652126647642, "grad_norm": 1.1099189519882202, "learning_rate": 0.0009752428998505232, "loss": 3.6955, "step": 2920 }, { "epoch": 0.1987362413371382, "grad_norm": 0.9053229689598083, "learning_rate": 0.0009752004348416905, "loss": 3.6697, "step": 2925 }, { "epoch": 0.19907596140779998, "grad_norm": 2.4213836193084717, "learning_rate": 0.0009751579698328577, "loss": 3.5097, "step": 2930 }, { "epoch": 0.19941568147846175, "grad_norm": 0.9235332012176514, "learning_rate": 0.000975115504824025, "loss": 3.8964, "step": 2935 }, { "epoch": 0.19975540154912352, "grad_norm": 1.3368321657180786, "learning_rate": 0.0009750730398151924, "loss": 3.5361, "step": 2940 }, { "epoch": 0.2000951216197853, "grad_norm": 0.9944940805435181, "learning_rate": 0.0009750305748063596, "loss": 3.7339, "step": 2945 }, { "epoch": 0.20043484169044706, "grad_norm": 1.3750721216201782, "learning_rate": 0.0009749881097975268, "loss": 3.8428, "step": 2950 }, { "epoch": 0.20077456176110883, "grad_norm": 0.8513739705085754, "learning_rate": 0.0009749456447886942, "loss": 4.0376, "step": 2955 }, { "epoch": 0.20111428183177063, "grad_norm": 3.2792444229125977, "learning_rate": 0.0009749031797798614, "loss": 3.8573, "step": 2960 }, { "epoch": 0.2014540019024324, "grad_norm": 0.8513579368591309, "learning_rate": 0.0009748607147710286, "loss": 3.6531, "step": 2965 }, { "epoch": 0.20179372197309417, "grad_norm": 0.8898109793663025, "learning_rate": 0.000974818249762196, "loss": 3.6765, "step": 2970 }, { "epoch": 0.20213344204375594, "grad_norm": 1.1673920154571533, "learning_rate": 0.0009747757847533633, "loss": 3.6582, "step": 2975 }, { "epoch": 0.2024731621144177, "grad_norm": 0.9699649810791016, "learning_rate": 0.0009747333197445305, "loss": 3.7179, "step": 2980 }, { "epoch": 0.2028128821850795, "grad_norm": 1.229149341583252, "learning_rate": 0.0009746908547356979, "loss": 3.6074, "step": 2985 }, { "epoch": 0.20315260225574128, "grad_norm": 0.8139545917510986, "learning_rate": 0.0009746483897268651, "loss": 3.7504, "step": 2990 }, { "epoch": 0.20349232232640305, "grad_norm": 1.3915001153945923, "learning_rate": 0.0009746059247180323, "loss": 3.8052, "step": 2995 }, { "epoch": 0.20383204239706482, "grad_norm": 0.7467848658561707, "learning_rate": 0.0009745634597091996, "loss": 3.6932, "step": 3000 }, { "epoch": 0.2041717624677266, "grad_norm": 2.1742453575134277, "learning_rate": 0.0009745209947003669, "loss": 3.6885, "step": 3005 }, { "epoch": 0.20451148253838836, "grad_norm": 0.9873047471046448, "learning_rate": 0.0009744785296915342, "loss": 3.8317, "step": 3010 }, { "epoch": 0.20485120260905015, "grad_norm": 1.5980221033096313, "learning_rate": 0.0009744360646827015, "loss": 3.8775, "step": 3015 }, { "epoch": 0.20519092267971192, "grad_norm": 1.013993501663208, "learning_rate": 0.0009743935996738688, "loss": 3.8972, "step": 3020 }, { "epoch": 0.2055306427503737, "grad_norm": 1.1979862451553345, "learning_rate": 0.000974351134665036, "loss": 3.822, "step": 3025 }, { "epoch": 0.20587036282103546, "grad_norm": 6.292854309082031, "learning_rate": 0.0009743086696562033, "loss": 3.8041, "step": 3030 }, { "epoch": 0.20621008289169723, "grad_norm": 0.9071376919746399, "learning_rate": 0.0009742662046473706, "loss": 3.841, "step": 3035 }, { "epoch": 0.206549802962359, "grad_norm": 1.1299073696136475, "learning_rate": 0.0009742237396385378, "loss": 3.7188, "step": 3040 }, { "epoch": 0.2068895230330208, "grad_norm": 2.8120391368865967, "learning_rate": 0.0009741812746297052, "loss": 3.6011, "step": 3045 }, { "epoch": 0.20722924310368257, "grad_norm": 5.167077541351318, "learning_rate": 0.0009741388096208724, "loss": 3.6947, "step": 3050 }, { "epoch": 0.20756896317434434, "grad_norm": 1.0063681602478027, "learning_rate": 0.0009740963446120397, "loss": 3.5803, "step": 3055 }, { "epoch": 0.2079086832450061, "grad_norm": 2.4568755626678467, "learning_rate": 0.000974053879603207, "loss": 3.779, "step": 3060 }, { "epoch": 0.20824840331566788, "grad_norm": 0.7745293378829956, "learning_rate": 0.0009740114145943742, "loss": 3.6158, "step": 3065 }, { "epoch": 0.20858812338632968, "grad_norm": 1.889550805091858, "learning_rate": 0.0009739689495855415, "loss": 3.8687, "step": 3070 }, { "epoch": 0.20892784345699145, "grad_norm": 0.9048097729682922, "learning_rate": 0.0009739264845767088, "loss": 3.6131, "step": 3075 }, { "epoch": 0.20926756352765322, "grad_norm": 1.323327660560608, "learning_rate": 0.0009738840195678761, "loss": 3.6296, "step": 3080 }, { "epoch": 0.209607283598315, "grad_norm": 1.1745115518569946, "learning_rate": 0.0009738415545590434, "loss": 3.9373, "step": 3085 }, { "epoch": 0.20994700366897676, "grad_norm": 1.3808313608169556, "learning_rate": 0.0009737990895502107, "loss": 3.7342, "step": 3090 }, { "epoch": 0.21028672373963853, "grad_norm": 3.4337244033813477, "learning_rate": 0.0009737566245413779, "loss": 3.7983, "step": 3095 }, { "epoch": 0.21062644381030032, "grad_norm": 1.2716083526611328, "learning_rate": 0.0009737141595325451, "loss": 3.982, "step": 3100 }, { "epoch": 0.2109661638809621, "grad_norm": 1.2337026596069336, "learning_rate": 0.0009736716945237125, "loss": 3.6968, "step": 3105 }, { "epoch": 0.21130588395162386, "grad_norm": 1.1251949071884155, "learning_rate": 0.0009736292295148797, "loss": 3.9145, "step": 3110 }, { "epoch": 0.21164560402228563, "grad_norm": 1.759033203125, "learning_rate": 0.000973586764506047, "loss": 3.7322, "step": 3115 }, { "epoch": 0.2119853240929474, "grad_norm": 2.0175018310546875, "learning_rate": 0.0009735442994972144, "loss": 3.7358, "step": 3120 }, { "epoch": 0.21232504416360917, "grad_norm": 2.0821621417999268, "learning_rate": 0.0009735018344883816, "loss": 3.508, "step": 3125 }, { "epoch": 0.21266476423427097, "grad_norm": 1.0507006645202637, "learning_rate": 0.0009734593694795488, "loss": 3.8879, "step": 3130 }, { "epoch": 0.21300448430493274, "grad_norm": 3.81201171875, "learning_rate": 0.0009734169044707162, "loss": 3.6689, "step": 3135 }, { "epoch": 0.2133442043755945, "grad_norm": 1.0013079643249512, "learning_rate": 0.0009733744394618834, "loss": 3.6224, "step": 3140 }, { "epoch": 0.21368392444625628, "grad_norm": 1.2463504076004028, "learning_rate": 0.0009733319744530506, "loss": 3.7982, "step": 3145 }, { "epoch": 0.21402364451691805, "grad_norm": 1.0237860679626465, "learning_rate": 0.000973289509444218, "loss": 3.3468, "step": 3150 }, { "epoch": 0.21436336458757985, "grad_norm": 1.2770980596542358, "learning_rate": 0.0009732470444353853, "loss": 3.6819, "step": 3155 }, { "epoch": 0.21470308465824162, "grad_norm": 1.0352874994277954, "learning_rate": 0.0009732045794265525, "loss": 3.7567, "step": 3160 }, { "epoch": 0.2150428047289034, "grad_norm": 0.8460654020309448, "learning_rate": 0.0009731621144177198, "loss": 3.733, "step": 3165 }, { "epoch": 0.21538252479956516, "grad_norm": 1.786299705505371, "learning_rate": 0.0009731196494088871, "loss": 3.5113, "step": 3170 }, { "epoch": 0.21572224487022693, "grad_norm": 1.8967534303665161, "learning_rate": 0.0009730771844000543, "loss": 3.7083, "step": 3175 }, { "epoch": 0.2160619649408887, "grad_norm": 0.8761997818946838, "learning_rate": 0.0009730347193912217, "loss": 3.773, "step": 3180 }, { "epoch": 0.2164016850115505, "grad_norm": 1.4060221910476685, "learning_rate": 0.000972992254382389, "loss": 3.7749, "step": 3185 }, { "epoch": 0.21674140508221226, "grad_norm": 1.9329566955566406, "learning_rate": 0.0009729497893735562, "loss": 3.9475, "step": 3190 }, { "epoch": 0.21708112515287403, "grad_norm": 1.0064440965652466, "learning_rate": 0.0009729073243647235, "loss": 3.7091, "step": 3195 }, { "epoch": 0.2174208452235358, "grad_norm": 0.9790199398994446, "learning_rate": 0.0009728648593558907, "loss": 3.8766, "step": 3200 }, { "epoch": 0.21776056529419757, "grad_norm": 1.7721149921417236, "learning_rate": 0.000972822394347058, "loss": 3.9381, "step": 3205 }, { "epoch": 0.21810028536485934, "grad_norm": 1.106036901473999, "learning_rate": 0.0009727799293382253, "loss": 3.8851, "step": 3210 }, { "epoch": 0.21844000543552114, "grad_norm": 1.3976128101348877, "learning_rate": 0.0009727374643293926, "loss": 3.7519, "step": 3215 }, { "epoch": 0.2187797255061829, "grad_norm": 1.051262617111206, "learning_rate": 0.0009726949993205599, "loss": 3.6319, "step": 3220 }, { "epoch": 0.21911944557684468, "grad_norm": 1.0471906661987305, "learning_rate": 0.0009726525343117272, "loss": 3.6213, "step": 3225 }, { "epoch": 0.21945916564750645, "grad_norm": 1.4823367595672607, "learning_rate": 0.0009726100693028944, "loss": 3.778, "step": 3230 }, { "epoch": 0.21979888571816822, "grad_norm": 1.1066375970840454, "learning_rate": 0.0009725676042940616, "loss": 3.7011, "step": 3235 }, { "epoch": 0.22013860578883002, "grad_norm": 0.9445425868034363, "learning_rate": 0.000972525139285229, "loss": 3.7023, "step": 3240 }, { "epoch": 0.22047832585949179, "grad_norm": 0.8970370888710022, "learning_rate": 0.0009724826742763962, "loss": 3.6152, "step": 3245 }, { "epoch": 0.22081804593015356, "grad_norm": 1.057731032371521, "learning_rate": 0.0009724402092675637, "loss": 3.7583, "step": 3250 }, { "epoch": 0.22115776600081533, "grad_norm": 1.1157954931259155, "learning_rate": 0.0009723977442587309, "loss": 3.6142, "step": 3255 }, { "epoch": 0.2214974860714771, "grad_norm": 0.9557936191558838, "learning_rate": 0.0009723552792498981, "loss": 3.8147, "step": 3260 }, { "epoch": 0.22183720614213887, "grad_norm": 2.1245615482330322, "learning_rate": 0.0009723128142410654, "loss": 3.7559, "step": 3265 }, { "epoch": 0.22217692621280066, "grad_norm": 1.3809075355529785, "learning_rate": 0.0009722703492322327, "loss": 3.9078, "step": 3270 }, { "epoch": 0.22251664628346243, "grad_norm": 1.2838555574417114, "learning_rate": 0.0009722278842233999, "loss": 3.925, "step": 3275 }, { "epoch": 0.2228563663541242, "grad_norm": 0.8813046216964722, "learning_rate": 0.0009721854192145672, "loss": 3.6598, "step": 3280 }, { "epoch": 0.22319608642478597, "grad_norm": 1.1964993476867676, "learning_rate": 0.0009721429542057346, "loss": 4.0182, "step": 3285 }, { "epoch": 0.22353580649544774, "grad_norm": 1.736003041267395, "learning_rate": 0.0009721004891969018, "loss": 3.8026, "step": 3290 }, { "epoch": 0.22387552656610954, "grad_norm": 1.2979940176010132, "learning_rate": 0.0009720580241880691, "loss": 3.5668, "step": 3295 }, { "epoch": 0.2242152466367713, "grad_norm": 1.1765081882476807, "learning_rate": 0.0009720155591792363, "loss": 3.6631, "step": 3300 }, { "epoch": 0.22455496670743308, "grad_norm": 2.473254919052124, "learning_rate": 0.0009719730941704036, "loss": 3.9051, "step": 3305 }, { "epoch": 0.22489468677809485, "grad_norm": 39.752403259277344, "learning_rate": 0.0009719306291615709, "loss": 3.5537, "step": 3310 }, { "epoch": 0.22523440684875662, "grad_norm": 1.1928939819335938, "learning_rate": 0.0009718881641527381, "loss": 3.5853, "step": 3315 }, { "epoch": 0.2255741269194184, "grad_norm": 1.0238375663757324, "learning_rate": 0.0009718456991439055, "loss": 3.8087, "step": 3320 }, { "epoch": 0.22591384699008019, "grad_norm": 1.106451392173767, "learning_rate": 0.0009718032341350728, "loss": 3.8969, "step": 3325 }, { "epoch": 0.22625356706074196, "grad_norm": 4.071500778198242, "learning_rate": 0.00097176076912624, "loss": 3.9763, "step": 3330 }, { "epoch": 0.22659328713140373, "grad_norm": 1.253422498703003, "learning_rate": 0.0009717183041174073, "loss": 3.6757, "step": 3335 }, { "epoch": 0.2269330072020655, "grad_norm": 3.183511734008789, "learning_rate": 0.0009716758391085746, "loss": 3.536, "step": 3340 }, { "epoch": 0.22727272727272727, "grad_norm": 10.874913215637207, "learning_rate": 0.0009716333740997418, "loss": 3.8695, "step": 3345 }, { "epoch": 0.22761244734338903, "grad_norm": 1.1523030996322632, "learning_rate": 0.000971590909090909, "loss": 3.8555, "step": 3350 }, { "epoch": 0.22795216741405083, "grad_norm": 1.980513095855713, "learning_rate": 0.0009715484440820765, "loss": 3.6541, "step": 3355 }, { "epoch": 0.2282918874847126, "grad_norm": 1.1078399419784546, "learning_rate": 0.0009715059790732437, "loss": 3.7272, "step": 3360 }, { "epoch": 0.22863160755537437, "grad_norm": 1.0352423191070557, "learning_rate": 0.0009714635140644109, "loss": 3.8742, "step": 3365 }, { "epoch": 0.22897132762603614, "grad_norm": 8.155926704406738, "learning_rate": 0.0009714210490555783, "loss": 3.7515, "step": 3370 }, { "epoch": 0.2293110476966979, "grad_norm": 1.3456337451934814, "learning_rate": 0.0009713785840467455, "loss": 3.7859, "step": 3375 }, { "epoch": 0.2296507677673597, "grad_norm": 1.2146720886230469, "learning_rate": 0.0009713361190379127, "loss": 3.6597, "step": 3380 }, { "epoch": 0.22999048783802148, "grad_norm": 1.703666090965271, "learning_rate": 0.00097129365402908, "loss": 3.7685, "step": 3385 }, { "epoch": 0.23033020790868325, "grad_norm": 0.9638687968254089, "learning_rate": 0.0009712511890202474, "loss": 3.6349, "step": 3390 }, { "epoch": 0.23066992797934502, "grad_norm": 0.9819139838218689, "learning_rate": 0.0009712087240114146, "loss": 3.8415, "step": 3395 }, { "epoch": 0.2310096480500068, "grad_norm": 1.432641625404358, "learning_rate": 0.000971166259002582, "loss": 3.602, "step": 3400 }, { "epoch": 0.23134936812066856, "grad_norm": 1.1047865152359009, "learning_rate": 0.0009711237939937492, "loss": 3.8402, "step": 3405 }, { "epoch": 0.23168908819133036, "grad_norm": 1.7767201662063599, "learning_rate": 0.0009710813289849164, "loss": 3.6782, "step": 3410 }, { "epoch": 0.23202880826199213, "grad_norm": 1.2640701532363892, "learning_rate": 0.0009710388639760837, "loss": 3.7956, "step": 3415 }, { "epoch": 0.2323685283326539, "grad_norm": 0.9783449769020081, "learning_rate": 0.000970996398967251, "loss": 3.8328, "step": 3420 }, { "epoch": 0.23270824840331567, "grad_norm": 1.1869221925735474, "learning_rate": 0.0009709539339584183, "loss": 3.8628, "step": 3425 }, { "epoch": 0.23304796847397743, "grad_norm": 1.0379430055618286, "learning_rate": 0.0009709114689495856, "loss": 3.8292, "step": 3430 }, { "epoch": 0.2333876885446392, "grad_norm": 0.798918604850769, "learning_rate": 0.0009708690039407529, "loss": 3.7983, "step": 3435 }, { "epoch": 0.233727408615301, "grad_norm": 1.0565685033798218, "learning_rate": 0.0009708265389319201, "loss": 3.519, "step": 3440 }, { "epoch": 0.23406712868596277, "grad_norm": 1.5685796737670898, "learning_rate": 0.0009707840739230874, "loss": 3.614, "step": 3445 }, { "epoch": 0.23440684875662454, "grad_norm": 1.1374493837356567, "learning_rate": 0.0009707416089142546, "loss": 3.7021, "step": 3450 }, { "epoch": 0.2347465688272863, "grad_norm": 1.204840898513794, "learning_rate": 0.0009706991439054219, "loss": 3.709, "step": 3455 }, { "epoch": 0.23508628889794808, "grad_norm": 1.7392574548721313, "learning_rate": 0.0009706566788965893, "loss": 3.633, "step": 3460 }, { "epoch": 0.23542600896860988, "grad_norm": 3.483042001724243, "learning_rate": 0.0009706142138877565, "loss": 3.8666, "step": 3465 }, { "epoch": 0.23576572903927165, "grad_norm": 2.7645256519317627, "learning_rate": 0.0009705717488789238, "loss": 3.6714, "step": 3470 }, { "epoch": 0.23610544910993342, "grad_norm": 1.6535698175430298, "learning_rate": 0.0009705292838700911, "loss": 3.6591, "step": 3475 }, { "epoch": 0.2364451691805952, "grad_norm": 1.4330202341079712, "learning_rate": 0.0009704868188612583, "loss": 3.8997, "step": 3480 }, { "epoch": 0.23678488925125696, "grad_norm": 1.0103590488433838, "learning_rate": 0.0009704443538524255, "loss": 3.8286, "step": 3485 }, { "epoch": 0.23712460932191873, "grad_norm": 0.9755469560623169, "learning_rate": 0.0009704018888435929, "loss": 3.4604, "step": 3490 }, { "epoch": 0.23746432939258053, "grad_norm": 1.1946384906768799, "learning_rate": 0.0009703594238347602, "loss": 3.6954, "step": 3495 }, { "epoch": 0.2378040494632423, "grad_norm": 1.2918400764465332, "learning_rate": 0.0009703169588259274, "loss": 3.8547, "step": 3500 }, { "epoch": 0.23814376953390406, "grad_norm": 1.072406530380249, "learning_rate": 0.0009702744938170948, "loss": 3.8557, "step": 3505 }, { "epoch": 0.23848348960456583, "grad_norm": 1.3071180582046509, "learning_rate": 0.000970232028808262, "loss": 3.6447, "step": 3510 }, { "epoch": 0.2388232096752276, "grad_norm": 1.10308837890625, "learning_rate": 0.0009701895637994292, "loss": 3.7567, "step": 3515 }, { "epoch": 0.23916292974588937, "grad_norm": 1.0754598379135132, "learning_rate": 0.0009701470987905966, "loss": 3.6214, "step": 3520 }, { "epoch": 0.23950264981655117, "grad_norm": 1.659072756767273, "learning_rate": 0.0009701046337817638, "loss": 3.8394, "step": 3525 }, { "epoch": 0.23984236988721294, "grad_norm": 3.2296571731567383, "learning_rate": 0.0009700621687729311, "loss": 3.7565, "step": 3530 }, { "epoch": 0.2401820899578747, "grad_norm": 0.9745691418647766, "learning_rate": 0.0009700197037640985, "loss": 3.4102, "step": 3535 }, { "epoch": 0.24052181002853648, "grad_norm": 1.1702401638031006, "learning_rate": 0.0009699772387552657, "loss": 3.6733, "step": 3540 }, { "epoch": 0.24086153009919825, "grad_norm": 1.2394490242004395, "learning_rate": 0.0009699347737464329, "loss": 3.8177, "step": 3545 }, { "epoch": 0.24120125016986005, "grad_norm": 1.4692909717559814, "learning_rate": 0.0009698923087376002, "loss": 3.9611, "step": 3550 }, { "epoch": 0.24154097024052182, "grad_norm": 1.0239768028259277, "learning_rate": 0.0009698498437287675, "loss": 3.8783, "step": 3555 }, { "epoch": 0.2418806903111836, "grad_norm": 1.4669255018234253, "learning_rate": 0.0009698073787199347, "loss": 3.5217, "step": 3560 }, { "epoch": 0.24222041038184536, "grad_norm": 1.0156927108764648, "learning_rate": 0.0009697649137111021, "loss": 3.7387, "step": 3565 }, { "epoch": 0.24256013045250713, "grad_norm": 1.273494839668274, "learning_rate": 0.0009697224487022694, "loss": 3.4956, "step": 3570 }, { "epoch": 0.2428998505231689, "grad_norm": 2.413400888442993, "learning_rate": 0.0009696799836934366, "loss": 3.8674, "step": 3575 }, { "epoch": 0.2432395705938307, "grad_norm": 2.876403331756592, "learning_rate": 0.0009696375186846039, "loss": 3.8919, "step": 3580 }, { "epoch": 0.24357929066449246, "grad_norm": 0.8977981805801392, "learning_rate": 0.0009695950536757711, "loss": 3.882, "step": 3585 }, { "epoch": 0.24391901073515423, "grad_norm": 1.9583923816680908, "learning_rate": 0.0009695525886669385, "loss": 3.5686, "step": 3590 }, { "epoch": 0.244258730805816, "grad_norm": 13.16466236114502, "learning_rate": 0.0009695101236581057, "loss": 3.7089, "step": 3595 }, { "epoch": 0.24459845087647777, "grad_norm": 1.0929129123687744, "learning_rate": 0.000969467658649273, "loss": 3.672, "step": 3600 }, { "epoch": 0.24493817094713954, "grad_norm": 1.2125701904296875, "learning_rate": 0.0009694251936404404, "loss": 3.6643, "step": 3605 }, { "epoch": 0.24527789101780134, "grad_norm": 1.9351205825805664, "learning_rate": 0.0009693827286316076, "loss": 3.7559, "step": 3610 }, { "epoch": 0.2456176110884631, "grad_norm": 1.8809832334518433, "learning_rate": 0.0009693402636227748, "loss": 3.8387, "step": 3615 }, { "epoch": 0.24595733115912488, "grad_norm": 0.8717310428619385, "learning_rate": 0.0009692977986139422, "loss": 3.5149, "step": 3620 }, { "epoch": 0.24629705122978665, "grad_norm": 1.3143110275268555, "learning_rate": 0.0009692553336051094, "loss": 3.7157, "step": 3625 }, { "epoch": 0.24663677130044842, "grad_norm": 1.0391416549682617, "learning_rate": 0.0009692128685962766, "loss": 3.6212, "step": 3630 }, { "epoch": 0.24697649137111022, "grad_norm": 3.5025224685668945, "learning_rate": 0.0009691704035874441, "loss": 3.8246, "step": 3635 }, { "epoch": 0.247316211441772, "grad_norm": 1.1201882362365723, "learning_rate": 0.0009691279385786113, "loss": 3.7905, "step": 3640 }, { "epoch": 0.24765593151243376, "grad_norm": 1.1135727167129517, "learning_rate": 0.0009690854735697785, "loss": 3.9557, "step": 3645 }, { "epoch": 0.24799565158309553, "grad_norm": 0.8443942070007324, "learning_rate": 0.0009690430085609458, "loss": 3.6952, "step": 3650 }, { "epoch": 0.2483353716537573, "grad_norm": 1.2699190378189087, "learning_rate": 0.0009690005435521131, "loss": 3.5383, "step": 3655 }, { "epoch": 0.24867509172441907, "grad_norm": 1.3860055208206177, "learning_rate": 0.0009689580785432803, "loss": 3.6215, "step": 3660 }, { "epoch": 0.24901481179508086, "grad_norm": 1.0114727020263672, "learning_rate": 0.0009689156135344476, "loss": 3.6462, "step": 3665 }, { "epoch": 0.24935453186574263, "grad_norm": 1.029746651649475, "learning_rate": 0.000968873148525615, "loss": 3.7336, "step": 3670 }, { "epoch": 0.2496942519364044, "grad_norm": 1.4423309564590454, "learning_rate": 0.0009688306835167822, "loss": 3.8197, "step": 3675 }, { "epoch": 0.2500339720070662, "grad_norm": 0.919982373714447, "learning_rate": 0.0009687882185079495, "loss": 3.7516, "step": 3680 }, { "epoch": 0.25037369207772797, "grad_norm": 0.9887851476669312, "learning_rate": 0.0009687457534991167, "loss": 3.6397, "step": 3685 }, { "epoch": 0.2507134121483897, "grad_norm": 1.1524382829666138, "learning_rate": 0.000968703288490284, "loss": 3.6041, "step": 3690 }, { "epoch": 0.2510531322190515, "grad_norm": 1.0924803018569946, "learning_rate": 0.0009686608234814513, "loss": 3.8356, "step": 3695 }, { "epoch": 0.25139285228971325, "grad_norm": 1.1735782623291016, "learning_rate": 0.0009686183584726185, "loss": 3.8643, "step": 3700 }, { "epoch": 0.25173257236037505, "grad_norm": 0.950107991695404, "learning_rate": 0.0009685758934637859, "loss": 3.8039, "step": 3705 }, { "epoch": 0.25207229243103685, "grad_norm": 1.3449469804763794, "learning_rate": 0.0009685334284549532, "loss": 3.7397, "step": 3710 }, { "epoch": 0.2524120125016986, "grad_norm": 8.063600540161133, "learning_rate": 0.0009684909634461204, "loss": 3.6495, "step": 3715 }, { "epoch": 0.2527517325723604, "grad_norm": 0.8109523057937622, "learning_rate": 0.0009684484984372877, "loss": 3.6199, "step": 3720 }, { "epoch": 0.25309145264302213, "grad_norm": 0.9069363474845886, "learning_rate": 0.000968406033428455, "loss": 3.7891, "step": 3725 }, { "epoch": 0.2534311727136839, "grad_norm": 1.1431958675384521, "learning_rate": 0.0009683635684196222, "loss": 3.8845, "step": 3730 }, { "epoch": 0.2537708927843457, "grad_norm": 1.1577270030975342, "learning_rate": 0.0009683211034107894, "loss": 3.6575, "step": 3735 }, { "epoch": 0.25411061285500747, "grad_norm": 1.0579068660736084, "learning_rate": 0.0009682786384019569, "loss": 3.7659, "step": 3740 }, { "epoch": 0.25445033292566926, "grad_norm": 1.3598712682724, "learning_rate": 0.0009682361733931241, "loss": 3.5419, "step": 3745 }, { "epoch": 0.254790052996331, "grad_norm": 0.894422173500061, "learning_rate": 0.0009681937083842913, "loss": 3.5342, "step": 3750 }, { "epoch": 0.2551297730669928, "grad_norm": 1.3733201026916504, "learning_rate": 0.0009681512433754587, "loss": 3.9068, "step": 3755 }, { "epoch": 0.25546949313765455, "grad_norm": 1.156059980392456, "learning_rate": 0.0009681087783666259, "loss": 3.6756, "step": 3760 }, { "epoch": 0.25580921320831634, "grad_norm": 0.8994821906089783, "learning_rate": 0.0009680663133577931, "loss": 3.7078, "step": 3765 }, { "epoch": 0.25614893327897814, "grad_norm": 1.1082830429077148, "learning_rate": 0.0009680238483489606, "loss": 3.7112, "step": 3770 }, { "epoch": 0.2564886533496399, "grad_norm": 0.8584597706794739, "learning_rate": 0.0009679813833401278, "loss": 3.6165, "step": 3775 }, { "epoch": 0.2568283734203017, "grad_norm": 1.1586264371871948, "learning_rate": 0.000967938918331295, "loss": 3.844, "step": 3780 }, { "epoch": 0.2571680934909634, "grad_norm": 2.05064058303833, "learning_rate": 0.0009678964533224623, "loss": 3.7438, "step": 3785 }, { "epoch": 0.2575078135616252, "grad_norm": 0.9126152396202087, "learning_rate": 0.0009678539883136296, "loss": 3.8158, "step": 3790 }, { "epoch": 0.257847533632287, "grad_norm": 3.099888563156128, "learning_rate": 0.0009678115233047968, "loss": 3.7426, "step": 3795 }, { "epoch": 0.25818725370294876, "grad_norm": 0.997413694858551, "learning_rate": 0.0009677690582959641, "loss": 3.5938, "step": 3800 }, { "epoch": 0.25852697377361056, "grad_norm": 1.480401873588562, "learning_rate": 0.0009677265932871315, "loss": 3.7106, "step": 3805 }, { "epoch": 0.2588666938442723, "grad_norm": 1.2259539365768433, "learning_rate": 0.0009676841282782987, "loss": 3.7183, "step": 3810 }, { "epoch": 0.2592064139149341, "grad_norm": 1.1748878955841064, "learning_rate": 0.000967641663269466, "loss": 3.7591, "step": 3815 }, { "epoch": 0.2595461339855959, "grad_norm": 1.0843846797943115, "learning_rate": 0.0009675991982606333, "loss": 3.5921, "step": 3820 }, { "epoch": 0.25988585405625764, "grad_norm": 1.0269793272018433, "learning_rate": 0.0009675567332518005, "loss": 3.7605, "step": 3825 }, { "epoch": 0.26022557412691943, "grad_norm": 1.2714554071426392, "learning_rate": 0.0009675142682429678, "loss": 3.5445, "step": 3830 }, { "epoch": 0.2605652941975812, "grad_norm": 1.0141854286193848, "learning_rate": 0.000967471803234135, "loss": 3.6392, "step": 3835 }, { "epoch": 0.260905014268243, "grad_norm": 1.1693915128707886, "learning_rate": 0.0009674293382253024, "loss": 3.7287, "step": 3840 }, { "epoch": 0.2612447343389047, "grad_norm": 0.911729633808136, "learning_rate": 0.0009673868732164697, "loss": 3.9118, "step": 3845 }, { "epoch": 0.2615844544095665, "grad_norm": 1.2623109817504883, "learning_rate": 0.0009673444082076369, "loss": 3.8696, "step": 3850 }, { "epoch": 0.2619241744802283, "grad_norm": 1.069911241531372, "learning_rate": 0.0009673019431988042, "loss": 3.5416, "step": 3855 }, { "epoch": 0.26226389455089005, "grad_norm": 4.318561553955078, "learning_rate": 0.0009672594781899715, "loss": 3.8614, "step": 3860 }, { "epoch": 0.26260361462155185, "grad_norm": 3.3920795917510986, "learning_rate": 0.0009672170131811387, "loss": 3.7632, "step": 3865 }, { "epoch": 0.2629433346922136, "grad_norm": 2.275514841079712, "learning_rate": 0.000967174548172306, "loss": 3.6582, "step": 3870 }, { "epoch": 0.2632830547628754, "grad_norm": 3.5748398303985596, "learning_rate": 0.0009671320831634734, "loss": 3.8978, "step": 3875 }, { "epoch": 0.2636227748335372, "grad_norm": 1.5022661685943604, "learning_rate": 0.0009670896181546406, "loss": 3.9127, "step": 3880 }, { "epoch": 0.26396249490419893, "grad_norm": 1.6353659629821777, "learning_rate": 0.0009670471531458078, "loss": 3.7883, "step": 3885 }, { "epoch": 0.2643022149748607, "grad_norm": 4.395288944244385, "learning_rate": 0.0009670046881369752, "loss": 3.8781, "step": 3890 }, { "epoch": 0.26464193504552247, "grad_norm": 1.0897356271743774, "learning_rate": 0.0009669622231281424, "loss": 3.8832, "step": 3895 }, { "epoch": 0.26498165511618427, "grad_norm": 1.3724782466888428, "learning_rate": 0.0009669197581193096, "loss": 3.5523, "step": 3900 }, { "epoch": 0.26532137518684606, "grad_norm": 1.2643336057662964, "learning_rate": 0.000966877293110477, "loss": 3.9706, "step": 3905 }, { "epoch": 0.2656610952575078, "grad_norm": 1.261716365814209, "learning_rate": 0.0009668348281016443, "loss": 3.5962, "step": 3910 }, { "epoch": 0.2660008153281696, "grad_norm": 1.286710500717163, "learning_rate": 0.0009667923630928115, "loss": 3.9127, "step": 3915 }, { "epoch": 0.26634053539883135, "grad_norm": 1.6948519945144653, "learning_rate": 0.0009667498980839789, "loss": 3.9442, "step": 3920 }, { "epoch": 0.26668025546949314, "grad_norm": 4.210630416870117, "learning_rate": 0.0009667074330751461, "loss": 3.8056, "step": 3925 }, { "epoch": 0.2670199755401549, "grad_norm": 1.033447265625, "learning_rate": 0.0009666649680663134, "loss": 3.4903, "step": 3930 }, { "epoch": 0.2673596956108167, "grad_norm": 1.7164576053619385, "learning_rate": 0.0009666225030574806, "loss": 3.8919, "step": 3935 }, { "epoch": 0.2676994156814785, "grad_norm": 1.0079880952835083, "learning_rate": 0.0009665800380486479, "loss": 3.8132, "step": 3940 }, { "epoch": 0.2680391357521402, "grad_norm": 1.1071363687515259, "learning_rate": 0.0009665375730398153, "loss": 3.5718, "step": 3945 }, { "epoch": 0.268378855822802, "grad_norm": 1.0285614728927612, "learning_rate": 0.0009664951080309825, "loss": 4.0007, "step": 3950 }, { "epoch": 0.26871857589346376, "grad_norm": 1.2174079418182373, "learning_rate": 0.0009664526430221498, "loss": 3.4874, "step": 3955 }, { "epoch": 0.26905829596412556, "grad_norm": 0.931437611579895, "learning_rate": 0.0009664101780133171, "loss": 3.8037, "step": 3960 }, { "epoch": 0.26939801603478736, "grad_norm": 1.3316792249679565, "learning_rate": 0.0009663677130044843, "loss": 3.9111, "step": 3965 }, { "epoch": 0.2697377361054491, "grad_norm": 1.6231976747512817, "learning_rate": 0.0009663252479956515, "loss": 3.5747, "step": 3970 }, { "epoch": 0.2700774561761109, "grad_norm": 1.1298778057098389, "learning_rate": 0.0009662827829868189, "loss": 4.0268, "step": 3975 }, { "epoch": 0.27041717624677264, "grad_norm": 1.2084243297576904, "learning_rate": 0.0009662403179779862, "loss": 3.8091, "step": 3980 }, { "epoch": 0.27075689631743444, "grad_norm": 1.0195139646530151, "learning_rate": 0.0009661978529691534, "loss": 3.9356, "step": 3985 }, { "epoch": 0.27109661638809623, "grad_norm": 0.9428665041923523, "learning_rate": 0.0009661553879603208, "loss": 3.7235, "step": 3990 }, { "epoch": 0.271436336458758, "grad_norm": 1.2279903888702393, "learning_rate": 0.000966112922951488, "loss": 3.6504, "step": 3995 }, { "epoch": 0.2717760565294198, "grad_norm": 1.7150359153747559, "learning_rate": 0.0009660704579426552, "loss": 3.9889, "step": 4000 }, { "epoch": 0.2721157766000815, "grad_norm": 1.0850931406021118, "learning_rate": 0.0009660279929338226, "loss": 3.7155, "step": 4005 }, { "epoch": 0.2724554966707433, "grad_norm": 1.1364189386367798, "learning_rate": 0.0009659855279249898, "loss": 3.6651, "step": 4010 }, { "epoch": 0.27279521674140506, "grad_norm": 1.3042093515396118, "learning_rate": 0.0009659430629161571, "loss": 4.0118, "step": 4015 }, { "epoch": 0.27313493681206685, "grad_norm": 1.42545485496521, "learning_rate": 0.0009659005979073245, "loss": 3.6034, "step": 4020 }, { "epoch": 0.27347465688272865, "grad_norm": 1.1655789613723755, "learning_rate": 0.0009658581328984917, "loss": 3.6671, "step": 4025 }, { "epoch": 0.2738143769533904, "grad_norm": 1.2997692823410034, "learning_rate": 0.0009658156678896589, "loss": 3.484, "step": 4030 }, { "epoch": 0.2741540970240522, "grad_norm": 1.1916096210479736, "learning_rate": 0.0009657732028808262, "loss": 3.8345, "step": 4035 }, { "epoch": 0.27449381709471393, "grad_norm": 1.0867416858673096, "learning_rate": 0.0009657307378719935, "loss": 3.6404, "step": 4040 }, { "epoch": 0.27483353716537573, "grad_norm": 4.530776023864746, "learning_rate": 0.0009656882728631607, "loss": 3.9722, "step": 4045 }, { "epoch": 0.2751732572360375, "grad_norm": 5.92178201675415, "learning_rate": 0.0009656458078543281, "loss": 3.8644, "step": 4050 }, { "epoch": 0.27551297730669927, "grad_norm": 5.541032791137695, "learning_rate": 0.0009656033428454954, "loss": 3.8368, "step": 4055 }, { "epoch": 0.27585269737736107, "grad_norm": 1.6482176780700684, "learning_rate": 0.0009655608778366626, "loss": 3.687, "step": 4060 }, { "epoch": 0.2761924174480228, "grad_norm": 1.8368566036224365, "learning_rate": 0.0009655184128278299, "loss": 3.7202, "step": 4065 }, { "epoch": 0.2765321375186846, "grad_norm": 1.6306824684143066, "learning_rate": 0.0009654759478189972, "loss": 3.5416, "step": 4070 }, { "epoch": 0.2768718575893464, "grad_norm": 1.021378755569458, "learning_rate": 0.0009654334828101644, "loss": 3.844, "step": 4075 }, { "epoch": 0.27721157766000815, "grad_norm": 0.8540748357772827, "learning_rate": 0.0009653910178013317, "loss": 3.7475, "step": 4080 }, { "epoch": 0.27755129773066994, "grad_norm": 1.1259151697158813, "learning_rate": 0.000965348552792499, "loss": 3.4594, "step": 4085 }, { "epoch": 0.2778910178013317, "grad_norm": 1.1310237646102905, "learning_rate": 0.0009653060877836663, "loss": 3.7153, "step": 4090 }, { "epoch": 0.2782307378719935, "grad_norm": 1.2896407842636108, "learning_rate": 0.0009652636227748336, "loss": 3.776, "step": 4095 }, { "epoch": 0.2785704579426552, "grad_norm": 1.051513433456421, "learning_rate": 0.0009652211577660008, "loss": 3.8321, "step": 4100 }, { "epoch": 0.278910178013317, "grad_norm": 0.9950342774391174, "learning_rate": 0.0009651786927571681, "loss": 3.61, "step": 4105 }, { "epoch": 0.2792498980839788, "grad_norm": 1.1275299787521362, "learning_rate": 0.0009651362277483354, "loss": 3.8291, "step": 4110 }, { "epoch": 0.27958961815464056, "grad_norm": 1.243725299835205, "learning_rate": 0.0009650937627395026, "loss": 3.6989, "step": 4115 }, { "epoch": 0.27992933822530236, "grad_norm": 1.0368530750274658, "learning_rate": 0.00096505129773067, "loss": 3.7575, "step": 4120 }, { "epoch": 0.2802690582959641, "grad_norm": 1.1992262601852417, "learning_rate": 0.0009650088327218373, "loss": 3.5623, "step": 4125 }, { "epoch": 0.2806087783666259, "grad_norm": 1.0593440532684326, "learning_rate": 0.0009649663677130045, "loss": 3.6839, "step": 4130 }, { "epoch": 0.2809484984372877, "grad_norm": 0.8463146090507507, "learning_rate": 0.0009649239027041717, "loss": 3.6032, "step": 4135 }, { "epoch": 0.28128821850794944, "grad_norm": 1.0624035596847534, "learning_rate": 0.0009648814376953391, "loss": 3.6275, "step": 4140 }, { "epoch": 0.28162793857861124, "grad_norm": 1.1140871047973633, "learning_rate": 0.0009648389726865063, "loss": 3.7193, "step": 4145 }, { "epoch": 0.281967658649273, "grad_norm": 1.4138745069503784, "learning_rate": 0.0009647965076776735, "loss": 3.9939, "step": 4150 }, { "epoch": 0.2823073787199348, "grad_norm": 1.0267720222473145, "learning_rate": 0.000964754042668841, "loss": 3.8837, "step": 4155 }, { "epoch": 0.2826470987905966, "grad_norm": 1.3282206058502197, "learning_rate": 0.0009647115776600082, "loss": 3.8476, "step": 4160 }, { "epoch": 0.2829868188612583, "grad_norm": 1.0653339624404907, "learning_rate": 0.0009646691126511754, "loss": 3.5499, "step": 4165 }, { "epoch": 0.2833265389319201, "grad_norm": 0.8647159934043884, "learning_rate": 0.0009646266476423428, "loss": 3.6593, "step": 4170 }, { "epoch": 0.28366625900258186, "grad_norm": 1.084784746170044, "learning_rate": 0.00096458418263351, "loss": 3.7622, "step": 4175 }, { "epoch": 0.28400597907324365, "grad_norm": 1.2169792652130127, "learning_rate": 0.0009645417176246772, "loss": 3.4668, "step": 4180 }, { "epoch": 0.2843456991439054, "grad_norm": 1.2356256246566772, "learning_rate": 0.0009644992526158445, "loss": 3.8281, "step": 4185 }, { "epoch": 0.2846854192145672, "grad_norm": 1.4746983051300049, "learning_rate": 0.0009644567876070119, "loss": 3.4014, "step": 4190 }, { "epoch": 0.285025139285229, "grad_norm": 1.1547867059707642, "learning_rate": 0.0009644143225981791, "loss": 3.7475, "step": 4195 }, { "epoch": 0.28536485935589073, "grad_norm": 1.0167630910873413, "learning_rate": 0.0009643718575893464, "loss": 3.7178, "step": 4200 }, { "epoch": 0.28570457942655253, "grad_norm": 0.9099068641662598, "learning_rate": 0.0009643293925805137, "loss": 3.7284, "step": 4205 }, { "epoch": 0.28604429949721427, "grad_norm": 1.3242944478988647, "learning_rate": 0.0009642869275716809, "loss": 3.7695, "step": 4210 }, { "epoch": 0.28638401956787607, "grad_norm": 1.4188430309295654, "learning_rate": 0.0009642444625628482, "loss": 3.7561, "step": 4215 }, { "epoch": 0.28672373963853787, "grad_norm": 1.3588393926620483, "learning_rate": 0.0009642019975540154, "loss": 3.9338, "step": 4220 }, { "epoch": 0.2870634597091996, "grad_norm": 1.0978115797042847, "learning_rate": 0.0009641595325451828, "loss": 3.9354, "step": 4225 }, { "epoch": 0.2874031797798614, "grad_norm": 1.0073235034942627, "learning_rate": 0.0009641170675363501, "loss": 3.8829, "step": 4230 }, { "epoch": 0.28774289985052315, "grad_norm": 0.8149319291114807, "learning_rate": 0.0009640746025275173, "loss": 3.6001, "step": 4235 }, { "epoch": 0.28808261992118495, "grad_norm": 0.9418841600418091, "learning_rate": 0.0009640321375186846, "loss": 3.8564, "step": 4240 }, { "epoch": 0.28842233999184674, "grad_norm": 0.8430861234664917, "learning_rate": 0.0009639896725098519, "loss": 3.8383, "step": 4245 }, { "epoch": 0.2887620600625085, "grad_norm": 1.227128505706787, "learning_rate": 0.0009639472075010191, "loss": 3.6639, "step": 4250 }, { "epoch": 0.2891017801331703, "grad_norm": 0.9428737759590149, "learning_rate": 0.0009639047424921864, "loss": 3.5557, "step": 4255 }, { "epoch": 0.289441500203832, "grad_norm": 1.1423616409301758, "learning_rate": 0.0009638622774833538, "loss": 3.9375, "step": 4260 }, { "epoch": 0.2897812202744938, "grad_norm": 1.116066336631775, "learning_rate": 0.000963819812474521, "loss": 3.6469, "step": 4265 }, { "epoch": 0.29012094034515556, "grad_norm": 1.2520655393600464, "learning_rate": 0.0009637773474656884, "loss": 3.705, "step": 4270 }, { "epoch": 0.29046066041581736, "grad_norm": 1.570820927619934, "learning_rate": 0.0009637348824568556, "loss": 3.8273, "step": 4275 }, { "epoch": 0.29080038048647916, "grad_norm": 1.902830958366394, "learning_rate": 0.0009636924174480228, "loss": 3.6434, "step": 4280 }, { "epoch": 0.2911401005571409, "grad_norm": 0.9167801141738892, "learning_rate": 0.0009636499524391901, "loss": 3.8741, "step": 4285 }, { "epoch": 0.2914798206278027, "grad_norm": 1.1784716844558716, "learning_rate": 0.0009636074874303575, "loss": 3.5708, "step": 4290 }, { "epoch": 0.29181954069846444, "grad_norm": 1.0707528591156006, "learning_rate": 0.0009635650224215247, "loss": 3.9295, "step": 4295 }, { "epoch": 0.29215926076912624, "grad_norm": 1.0608491897583008, "learning_rate": 0.000963522557412692, "loss": 3.5993, "step": 4300 }, { "epoch": 0.29249898083978804, "grad_norm": 1.1349802017211914, "learning_rate": 0.0009634800924038593, "loss": 3.454, "step": 4305 }, { "epoch": 0.2928387009104498, "grad_norm": 1.1372907161712646, "learning_rate": 0.0009634376273950265, "loss": 3.855, "step": 4310 }, { "epoch": 0.2931784209811116, "grad_norm": 1.342961072921753, "learning_rate": 0.0009633951623861938, "loss": 3.5764, "step": 4315 }, { "epoch": 0.2935181410517733, "grad_norm": 1.2137556076049805, "learning_rate": 0.000963352697377361, "loss": 3.6993, "step": 4320 }, { "epoch": 0.2938578611224351, "grad_norm": 1.3890081644058228, "learning_rate": 0.0009633102323685284, "loss": 3.6504, "step": 4325 }, { "epoch": 0.2941975811930969, "grad_norm": 0.972489058971405, "learning_rate": 0.0009632677673596957, "loss": 3.7381, "step": 4330 }, { "epoch": 0.29453730126375866, "grad_norm": 1.0242931842803955, "learning_rate": 0.0009632253023508629, "loss": 3.8142, "step": 4335 }, { "epoch": 0.29487702133442045, "grad_norm": 1.589585542678833, "learning_rate": 0.0009631828373420302, "loss": 3.718, "step": 4340 }, { "epoch": 0.2952167414050822, "grad_norm": 1.1338411569595337, "learning_rate": 0.0009631403723331975, "loss": 3.2938, "step": 4345 }, { "epoch": 0.295556461475744, "grad_norm": 1.2631596326828003, "learning_rate": 0.0009630979073243647, "loss": 3.7381, "step": 4350 }, { "epoch": 0.29589618154640573, "grad_norm": 1.13748300075531, "learning_rate": 0.000963055442315532, "loss": 3.7659, "step": 4355 }, { "epoch": 0.29623590161706753, "grad_norm": 1.5526479482650757, "learning_rate": 0.0009630129773066994, "loss": 3.5605, "step": 4360 }, { "epoch": 0.29657562168772933, "grad_norm": 1.3494572639465332, "learning_rate": 0.0009629705122978666, "loss": 3.9211, "step": 4365 }, { "epoch": 0.29691534175839107, "grad_norm": 1.0175126791000366, "learning_rate": 0.0009629280472890338, "loss": 3.8879, "step": 4370 }, { "epoch": 0.29725506182905287, "grad_norm": 1.3201661109924316, "learning_rate": 0.0009628855822802012, "loss": 3.6082, "step": 4375 }, { "epoch": 0.2975947818997146, "grad_norm": 2.7748191356658936, "learning_rate": 0.0009628431172713684, "loss": 3.7161, "step": 4380 }, { "epoch": 0.2979345019703764, "grad_norm": 0.9651588797569275, "learning_rate": 0.0009628006522625356, "loss": 3.7153, "step": 4385 }, { "epoch": 0.2982742220410382, "grad_norm": 1.4994429349899292, "learning_rate": 0.000962758187253703, "loss": 3.925, "step": 4390 }, { "epoch": 0.29861394211169995, "grad_norm": 5.641922950744629, "learning_rate": 0.0009627157222448703, "loss": 3.9285, "step": 4395 }, { "epoch": 0.29895366218236175, "grad_norm": 2.2435333728790283, "learning_rate": 0.0009626732572360375, "loss": 3.742, "step": 4400 }, { "epoch": 0.2992933822530235, "grad_norm": 3.884735345840454, "learning_rate": 0.0009626307922272049, "loss": 3.769, "step": 4405 }, { "epoch": 0.2996331023236853, "grad_norm": 2.183898448944092, "learning_rate": 0.0009625883272183721, "loss": 3.9379, "step": 4410 }, { "epoch": 0.2999728223943471, "grad_norm": 1.249224305152893, "learning_rate": 0.0009625458622095393, "loss": 3.921, "step": 4415 }, { "epoch": 0.3003125424650088, "grad_norm": 1.4108771085739136, "learning_rate": 0.0009625033972007066, "loss": 3.7551, "step": 4420 }, { "epoch": 0.3006522625356706, "grad_norm": 0.9130754470825195, "learning_rate": 0.0009624609321918739, "loss": 3.8247, "step": 4425 }, { "epoch": 0.30099198260633236, "grad_norm": 2.136962652206421, "learning_rate": 0.0009624184671830412, "loss": 4.0067, "step": 4430 }, { "epoch": 0.30133170267699416, "grad_norm": 1.456647276878357, "learning_rate": 0.0009623760021742085, "loss": 4.0309, "step": 4435 }, { "epoch": 0.3016714227476559, "grad_norm": 2.4046711921691895, "learning_rate": 0.0009623335371653758, "loss": 3.6098, "step": 4440 }, { "epoch": 0.3020111428183177, "grad_norm": 1.082567572593689, "learning_rate": 0.000962291072156543, "loss": 3.747, "step": 4445 }, { "epoch": 0.3023508628889795, "grad_norm": 1.1557730436325073, "learning_rate": 0.0009622486071477103, "loss": 3.7214, "step": 4450 }, { "epoch": 0.30269058295964124, "grad_norm": 0.8550136685371399, "learning_rate": 0.0009622061421388776, "loss": 3.5156, "step": 4455 }, { "epoch": 0.30303030303030304, "grad_norm": 1.0919111967086792, "learning_rate": 0.0009621636771300448, "loss": 3.625, "step": 4460 }, { "epoch": 0.3033700231009648, "grad_norm": 2.4144346714019775, "learning_rate": 0.0009621212121212122, "loss": 3.6986, "step": 4465 }, { "epoch": 0.3037097431716266, "grad_norm": 1.0780550241470337, "learning_rate": 0.0009620787471123794, "loss": 3.5539, "step": 4470 }, { "epoch": 0.3040494632422884, "grad_norm": 0.9155409932136536, "learning_rate": 0.0009620362821035467, "loss": 3.7101, "step": 4475 }, { "epoch": 0.3043891833129501, "grad_norm": 1.1482208967208862, "learning_rate": 0.000961993817094714, "loss": 3.8898, "step": 4480 }, { "epoch": 0.3047289033836119, "grad_norm": 1.1876227855682373, "learning_rate": 0.0009619513520858812, "loss": 3.6311, "step": 4485 }, { "epoch": 0.30506862345427366, "grad_norm": 0.9206277132034302, "learning_rate": 0.0009619088870770485, "loss": 4.0481, "step": 4490 }, { "epoch": 0.30540834352493546, "grad_norm": 1.8055614233016968, "learning_rate": 0.0009618664220682158, "loss": 3.732, "step": 4495 }, { "epoch": 0.30574806359559725, "grad_norm": 1.351907730102539, "learning_rate": 0.0009618239570593831, "loss": 3.7717, "step": 4500 }, { "epoch": 0.306087783666259, "grad_norm": 1.2030396461486816, "learning_rate": 0.0009617814920505504, "loss": 3.5808, "step": 4505 }, { "epoch": 0.3064275037369208, "grad_norm": 1.085195779800415, "learning_rate": 0.0009617390270417177, "loss": 4.0801, "step": 4510 }, { "epoch": 0.30676722380758253, "grad_norm": 0.8475092649459839, "learning_rate": 0.0009616965620328849, "loss": 3.8221, "step": 4515 }, { "epoch": 0.30710694387824433, "grad_norm": 1.6519461870193481, "learning_rate": 0.0009616540970240521, "loss": 3.7648, "step": 4520 }, { "epoch": 0.3074466639489061, "grad_norm": 1.8256940841674805, "learning_rate": 0.0009616116320152195, "loss": 3.5993, "step": 4525 }, { "epoch": 0.30778638401956787, "grad_norm": 1.5678510665893555, "learning_rate": 0.0009615691670063867, "loss": 3.8737, "step": 4530 }, { "epoch": 0.30812610409022967, "grad_norm": 1.2203842401504517, "learning_rate": 0.000961526701997554, "loss": 3.5766, "step": 4535 }, { "epoch": 0.3084658241608914, "grad_norm": 1.3865641355514526, "learning_rate": 0.0009614842369887214, "loss": 3.7138, "step": 4540 }, { "epoch": 0.3088055442315532, "grad_norm": 1.9196029901504517, "learning_rate": 0.0009614417719798886, "loss": 3.4623, "step": 4545 }, { "epoch": 0.30914526430221495, "grad_norm": 4.257297992706299, "learning_rate": 0.0009613993069710558, "loss": 3.6533, "step": 4550 }, { "epoch": 0.30948498437287675, "grad_norm": 0.8628863096237183, "learning_rate": 0.0009613568419622232, "loss": 3.8449, "step": 4555 }, { "epoch": 0.30982470444353855, "grad_norm": 0.8914626836776733, "learning_rate": 0.0009613143769533904, "loss": 3.7277, "step": 4560 }, { "epoch": 0.3101644245142003, "grad_norm": 1.2480285167694092, "learning_rate": 0.0009612719119445576, "loss": 3.8026, "step": 4565 }, { "epoch": 0.3105041445848621, "grad_norm": 5.2876129150390625, "learning_rate": 0.000961229446935725, "loss": 3.7952, "step": 4570 }, { "epoch": 0.3108438646555238, "grad_norm": 2.8995797634124756, "learning_rate": 0.0009611869819268923, "loss": 3.5644, "step": 4575 }, { "epoch": 0.3111835847261856, "grad_norm": 2.3347842693328857, "learning_rate": 0.0009611445169180595, "loss": 3.9901, "step": 4580 }, { "epoch": 0.3115233047968474, "grad_norm": 1.1441822052001953, "learning_rate": 0.0009611020519092268, "loss": 3.8186, "step": 4585 }, { "epoch": 0.31186302486750916, "grad_norm": 1.3066918849945068, "learning_rate": 0.0009610595869003941, "loss": 3.7356, "step": 4590 }, { "epoch": 0.31220274493817096, "grad_norm": 1.018831729888916, "learning_rate": 0.0009610171218915613, "loss": 3.7369, "step": 4595 }, { "epoch": 0.3125424650088327, "grad_norm": 1.1097151041030884, "learning_rate": 0.0009609746568827286, "loss": 3.8008, "step": 4600 }, { "epoch": 0.3128821850794945, "grad_norm": 1.062623381614685, "learning_rate": 0.000960932191873896, "loss": 3.8152, "step": 4605 }, { "epoch": 0.3132219051501563, "grad_norm": 1.2783746719360352, "learning_rate": 0.0009608897268650633, "loss": 3.6653, "step": 4610 }, { "epoch": 0.31356162522081804, "grad_norm": 0.90913987159729, "learning_rate": 0.0009608472618562305, "loss": 3.667, "step": 4615 }, { "epoch": 0.31390134529147984, "grad_norm": 4.707248210906982, "learning_rate": 0.0009608047968473977, "loss": 3.9547, "step": 4620 }, { "epoch": 0.3142410653621416, "grad_norm": 1.1647425889968872, "learning_rate": 0.0009607623318385651, "loss": 3.8036, "step": 4625 }, { "epoch": 0.3145807854328034, "grad_norm": 1.1200703382492065, "learning_rate": 0.0009607198668297323, "loss": 3.6124, "step": 4630 }, { "epoch": 0.3149205055034651, "grad_norm": 2.2258846759796143, "learning_rate": 0.0009606774018208995, "loss": 3.6701, "step": 4635 }, { "epoch": 0.3152602255741269, "grad_norm": 1.1642742156982422, "learning_rate": 0.000960634936812067, "loss": 3.9269, "step": 4640 }, { "epoch": 0.3155999456447887, "grad_norm": 1.0213059186935425, "learning_rate": 0.0009605924718032342, "loss": 3.7275, "step": 4645 }, { "epoch": 0.31593966571545046, "grad_norm": 0.9682114124298096, "learning_rate": 0.0009605500067944014, "loss": 3.6884, "step": 4650 }, { "epoch": 0.31627938578611225, "grad_norm": 1.4206169843673706, "learning_rate": 0.0009605075417855688, "loss": 3.6552, "step": 4655 }, { "epoch": 0.316619105856774, "grad_norm": 3.184673309326172, "learning_rate": 0.000960465076776736, "loss": 3.9729, "step": 4660 }, { "epoch": 0.3169588259274358, "grad_norm": 1.410278081893921, "learning_rate": 0.0009604226117679032, "loss": 3.7051, "step": 4665 }, { "epoch": 0.3172985459980976, "grad_norm": 1.7664132118225098, "learning_rate": 0.0009603801467590705, "loss": 3.9423, "step": 4670 }, { "epoch": 0.31763826606875933, "grad_norm": 1.0384076833724976, "learning_rate": 0.0009603376817502379, "loss": 3.7861, "step": 4675 }, { "epoch": 0.31797798613942113, "grad_norm": 7.299210071563721, "learning_rate": 0.0009602952167414051, "loss": 3.6493, "step": 4680 }, { "epoch": 0.3183177062100829, "grad_norm": 3.458264112472534, "learning_rate": 0.0009602527517325724, "loss": 3.8685, "step": 4685 }, { "epoch": 0.31865742628074467, "grad_norm": 1.0091203451156616, "learning_rate": 0.0009602102867237397, "loss": 3.9601, "step": 4690 }, { "epoch": 0.31899714635140647, "grad_norm": 0.9379859566688538, "learning_rate": 0.0009601678217149069, "loss": 3.8619, "step": 4695 }, { "epoch": 0.3193368664220682, "grad_norm": 2.4536662101745605, "learning_rate": 0.0009601253567060742, "loss": 3.7479, "step": 4700 }, { "epoch": 0.31967658649273, "grad_norm": 0.958350419998169, "learning_rate": 0.0009600828916972415, "loss": 3.6261, "step": 4705 }, { "epoch": 0.32001630656339175, "grad_norm": 1.5080496072769165, "learning_rate": 0.0009600404266884088, "loss": 3.6299, "step": 4710 }, { "epoch": 0.32035602663405355, "grad_norm": 0.9307295083999634, "learning_rate": 0.0009599979616795761, "loss": 3.7789, "step": 4715 }, { "epoch": 0.3206957467047153, "grad_norm": 1.1181360483169556, "learning_rate": 0.0009599554966707433, "loss": 3.7916, "step": 4720 }, { "epoch": 0.3210354667753771, "grad_norm": 1.5595407485961914, "learning_rate": 0.0009599130316619106, "loss": 3.8776, "step": 4725 }, { "epoch": 0.3213751868460389, "grad_norm": 2.1008949279785156, "learning_rate": 0.0009598705666530779, "loss": 3.6076, "step": 4730 }, { "epoch": 0.3217149069167006, "grad_norm": 1.0758533477783203, "learning_rate": 0.0009598281016442451, "loss": 3.5094, "step": 4735 }, { "epoch": 0.3220546269873624, "grad_norm": 1.51201331615448, "learning_rate": 0.0009597856366354124, "loss": 3.7053, "step": 4740 }, { "epoch": 0.32239434705802417, "grad_norm": 1.0790780782699585, "learning_rate": 0.0009597431716265798, "loss": 3.6244, "step": 4745 }, { "epoch": 0.32273406712868596, "grad_norm": 1.182840347290039, "learning_rate": 0.000959700706617747, "loss": 3.6385, "step": 4750 }, { "epoch": 0.32307378719934776, "grad_norm": 1.4682059288024902, "learning_rate": 0.0009596582416089143, "loss": 3.8952, "step": 4755 }, { "epoch": 0.3234135072700095, "grad_norm": 2.05829119682312, "learning_rate": 0.0009596157766000816, "loss": 3.5066, "step": 4760 }, { "epoch": 0.3237532273406713, "grad_norm": 1.345306158065796, "learning_rate": 0.0009595733115912488, "loss": 3.8082, "step": 4765 }, { "epoch": 0.32409294741133304, "grad_norm": 1.3484514951705933, "learning_rate": 0.000959530846582416, "loss": 3.7725, "step": 4770 }, { "epoch": 0.32443266748199484, "grad_norm": 0.940561056137085, "learning_rate": 0.0009594883815735834, "loss": 3.559, "step": 4775 }, { "epoch": 0.32477238755265664, "grad_norm": 1.0889874696731567, "learning_rate": 0.0009594459165647507, "loss": 3.8779, "step": 4780 }, { "epoch": 0.3251121076233184, "grad_norm": 1.7974698543548584, "learning_rate": 0.0009594034515559179, "loss": 3.7622, "step": 4785 }, { "epoch": 0.3254518276939802, "grad_norm": 1.4531970024108887, "learning_rate": 0.0009593609865470853, "loss": 3.5627, "step": 4790 }, { "epoch": 0.3257915477646419, "grad_norm": 1.1882195472717285, "learning_rate": 0.0009593185215382525, "loss": 3.7631, "step": 4795 }, { "epoch": 0.3261312678353037, "grad_norm": 1.3279176950454712, "learning_rate": 0.0009592760565294197, "loss": 3.5192, "step": 4800 }, { "epoch": 0.32647098790596546, "grad_norm": 1.7268539667129517, "learning_rate": 0.000959233591520587, "loss": 3.6413, "step": 4805 }, { "epoch": 0.32681070797662726, "grad_norm": 1.469990611076355, "learning_rate": 0.0009591911265117543, "loss": 3.9157, "step": 4810 }, { "epoch": 0.32715042804728905, "grad_norm": 1.2988018989562988, "learning_rate": 0.0009591486615029216, "loss": 3.5623, "step": 4815 }, { "epoch": 0.3274901481179508, "grad_norm": 1.7490649223327637, "learning_rate": 0.000959106196494089, "loss": 3.781, "step": 4820 }, { "epoch": 0.3278298681886126, "grad_norm": 0.9296604990959167, "learning_rate": 0.0009590637314852562, "loss": 3.836, "step": 4825 }, { "epoch": 0.32816958825927434, "grad_norm": 1.5864883661270142, "learning_rate": 0.0009590212664764234, "loss": 3.7702, "step": 4830 }, { "epoch": 0.32850930832993613, "grad_norm": 1.1811796426773071, "learning_rate": 0.0009589788014675907, "loss": 3.7335, "step": 4835 }, { "epoch": 0.32884902840059793, "grad_norm": 1.4311074018478394, "learning_rate": 0.000958936336458758, "loss": 3.9564, "step": 4840 }, { "epoch": 0.3291887484712597, "grad_norm": 1.1369911432266235, "learning_rate": 0.0009588938714499252, "loss": 3.8214, "step": 4845 }, { "epoch": 0.32952846854192147, "grad_norm": 1.9329270124435425, "learning_rate": 0.0009588514064410926, "loss": 3.77, "step": 4850 }, { "epoch": 0.3298681886125832, "grad_norm": 13.86181640625, "learning_rate": 0.0009588089414322599, "loss": 3.8484, "step": 4855 }, { "epoch": 0.330207908683245, "grad_norm": 1.9929890632629395, "learning_rate": 0.0009587664764234271, "loss": 3.4698, "step": 4860 }, { "epoch": 0.3305476287539068, "grad_norm": 1.2384124994277954, "learning_rate": 0.0009587240114145944, "loss": 3.8542, "step": 4865 }, { "epoch": 0.33088734882456855, "grad_norm": 1.2119916677474976, "learning_rate": 0.0009586815464057616, "loss": 4.0049, "step": 4870 }, { "epoch": 0.33122706889523035, "grad_norm": 0.9872625470161438, "learning_rate": 0.0009586390813969289, "loss": 3.4618, "step": 4875 }, { "epoch": 0.3315667889658921, "grad_norm": 1.1805238723754883, "learning_rate": 0.0009585966163880963, "loss": 3.3942, "step": 4880 }, { "epoch": 0.3319065090365539, "grad_norm": 0.9241127371788025, "learning_rate": 0.0009585541513792635, "loss": 3.8897, "step": 4885 }, { "epoch": 0.33224622910721563, "grad_norm": 2.562610626220703, "learning_rate": 0.0009585116863704308, "loss": 3.716, "step": 4890 }, { "epoch": 0.3325859491778774, "grad_norm": 1.8110535144805908, "learning_rate": 0.0009584692213615981, "loss": 3.984, "step": 4895 }, { "epoch": 0.3329256692485392, "grad_norm": 1.4076822996139526, "learning_rate": 0.0009584267563527653, "loss": 3.718, "step": 4900 }, { "epoch": 0.33326538931920097, "grad_norm": 1.5568163394927979, "learning_rate": 0.0009583842913439325, "loss": 3.6733, "step": 4905 }, { "epoch": 0.33360510938986276, "grad_norm": 1.3953509330749512, "learning_rate": 0.0009583418263350999, "loss": 3.8545, "step": 4910 }, { "epoch": 0.3339448294605245, "grad_norm": 1.1893253326416016, "learning_rate": 0.0009582993613262672, "loss": 3.8148, "step": 4915 }, { "epoch": 0.3342845495311863, "grad_norm": 4.40472936630249, "learning_rate": 0.0009582568963174344, "loss": 3.5566, "step": 4920 }, { "epoch": 0.3346242696018481, "grad_norm": 1.0402964353561401, "learning_rate": 0.0009582144313086018, "loss": 3.8437, "step": 4925 }, { "epoch": 0.33496398967250984, "grad_norm": 1.942793607711792, "learning_rate": 0.000958171966299769, "loss": 4.2193, "step": 4930 }, { "epoch": 0.33530370974317164, "grad_norm": 1.9661610126495361, "learning_rate": 0.0009581295012909362, "loss": 3.7354, "step": 4935 }, { "epoch": 0.3356434298138334, "grad_norm": 0.9551913142204285, "learning_rate": 0.0009580870362821036, "loss": 3.6451, "step": 4940 }, { "epoch": 0.3359831498844952, "grad_norm": 0.8537495136260986, "learning_rate": 0.0009580445712732708, "loss": 3.8634, "step": 4945 }, { "epoch": 0.336322869955157, "grad_norm": 1.1686935424804688, "learning_rate": 0.0009580021062644382, "loss": 3.7495, "step": 4950 }, { "epoch": 0.3366625900258187, "grad_norm": 0.7999626398086548, "learning_rate": 0.0009579596412556055, "loss": 3.8142, "step": 4955 }, { "epoch": 0.3370023100964805, "grad_norm": 0.9165166616439819, "learning_rate": 0.0009579171762467727, "loss": 3.5877, "step": 4960 }, { "epoch": 0.33734203016714226, "grad_norm": 0.8132341504096985, "learning_rate": 0.00095787471123794, "loss": 3.7833, "step": 4965 }, { "epoch": 0.33768175023780406, "grad_norm": 1.8504841327667236, "learning_rate": 0.0009578322462291072, "loss": 3.5799, "step": 4970 }, { "epoch": 0.3380214703084658, "grad_norm": 1.4949086904525757, "learning_rate": 0.0009577897812202745, "loss": 3.6808, "step": 4975 }, { "epoch": 0.3383611903791276, "grad_norm": 1.1684014797210693, "learning_rate": 0.0009577473162114418, "loss": 3.7741, "step": 4980 }, { "epoch": 0.3387009104497894, "grad_norm": 1.0179752111434937, "learning_rate": 0.0009577048512026091, "loss": 3.6925, "step": 4985 }, { "epoch": 0.33904063052045114, "grad_norm": 1.5498842000961304, "learning_rate": 0.0009576623861937764, "loss": 3.8278, "step": 4990 }, { "epoch": 0.33938035059111293, "grad_norm": 1.1606643199920654, "learning_rate": 0.0009576199211849437, "loss": 3.9071, "step": 4995 }, { "epoch": 0.3397200706617747, "grad_norm": 1.477744221687317, "learning_rate": 0.0009575774561761109, "loss": 3.6933, "step": 5000 }, { "epoch": 0.3400597907324365, "grad_norm": 1.2226893901824951, "learning_rate": 0.0009575349911672781, "loss": 3.9065, "step": 5005 }, { "epoch": 0.34039951080309827, "grad_norm": 1.2254085540771484, "learning_rate": 0.0009574925261584455, "loss": 3.6321, "step": 5010 }, { "epoch": 0.34073923087376, "grad_norm": 1.9400677680969238, "learning_rate": 0.0009574500611496127, "loss": 3.5512, "step": 5015 }, { "epoch": 0.3410789509444218, "grad_norm": 1.079958200454712, "learning_rate": 0.00095740759614078, "loss": 3.5091, "step": 5020 }, { "epoch": 0.34141867101508355, "grad_norm": 1.4161641597747803, "learning_rate": 0.0009573651311319474, "loss": 3.5902, "step": 5025 }, { "epoch": 0.34175839108574535, "grad_norm": 1.3935139179229736, "learning_rate": 0.0009573226661231146, "loss": 3.6097, "step": 5030 }, { "epoch": 0.34209811115640715, "grad_norm": 1.2177248001098633, "learning_rate": 0.0009572802011142818, "loss": 3.5747, "step": 5035 }, { "epoch": 0.3424378312270689, "grad_norm": 1.4001250267028809, "learning_rate": 0.0009572377361054492, "loss": 3.5645, "step": 5040 }, { "epoch": 0.3427775512977307, "grad_norm": 1.1264708042144775, "learning_rate": 0.0009571952710966164, "loss": 3.8645, "step": 5045 }, { "epoch": 0.34311727136839243, "grad_norm": 1.547636866569519, "learning_rate": 0.0009571528060877836, "loss": 3.9034, "step": 5050 }, { "epoch": 0.3434569914390542, "grad_norm": 1.2393828630447388, "learning_rate": 0.0009571103410789511, "loss": 4.1156, "step": 5055 }, { "epoch": 0.34379671150971597, "grad_norm": 2.219208002090454, "learning_rate": 0.0009570678760701183, "loss": 3.8553, "step": 5060 }, { "epoch": 0.34413643158037777, "grad_norm": 3.25079607963562, "learning_rate": 0.0009570254110612855, "loss": 3.7355, "step": 5065 }, { "epoch": 0.34447615165103956, "grad_norm": 1.1041288375854492, "learning_rate": 0.0009569829460524528, "loss": 3.6834, "step": 5070 }, { "epoch": 0.3448158717217013, "grad_norm": 1.3428406715393066, "learning_rate": 0.0009569489740453866, "loss": 3.6874, "step": 5075 }, { "epoch": 0.3451555917923631, "grad_norm": 1.5162826776504517, "learning_rate": 0.0009569065090365539, "loss": 3.772, "step": 5080 }, { "epoch": 0.34549531186302485, "grad_norm": 1.9768587350845337, "learning_rate": 0.0009568640440277212, "loss": 3.6376, "step": 5085 }, { "epoch": 0.34583503193368664, "grad_norm": 1.9925334453582764, "learning_rate": 0.0009568215790188885, "loss": 3.5891, "step": 5090 }, { "epoch": 0.34617475200434844, "grad_norm": 2.7873759269714355, "learning_rate": 0.0009567791140100557, "loss": 3.8428, "step": 5095 }, { "epoch": 0.3465144720750102, "grad_norm": 1.193323016166687, "learning_rate": 0.000956736649001223, "loss": 3.5272, "step": 5100 }, { "epoch": 0.346854192145672, "grad_norm": 1.4499406814575195, "learning_rate": 0.0009566941839923902, "loss": 3.6985, "step": 5105 }, { "epoch": 0.3471939122163337, "grad_norm": 1.4175437688827515, "learning_rate": 0.0009566517189835575, "loss": 3.7916, "step": 5110 }, { "epoch": 0.3475336322869955, "grad_norm": 1.9915251731872559, "learning_rate": 0.0009566092539747249, "loss": 3.8164, "step": 5115 }, { "epoch": 0.3478733523576573, "grad_norm": 1.0645300149917603, "learning_rate": 0.0009565667889658921, "loss": 3.7411, "step": 5120 }, { "epoch": 0.34821307242831906, "grad_norm": 1.616195797920227, "learning_rate": 0.0009565243239570594, "loss": 3.8258, "step": 5125 }, { "epoch": 0.34855279249898086, "grad_norm": 1.4745944738388062, "learning_rate": 0.0009564818589482267, "loss": 3.5433, "step": 5130 }, { "epoch": 0.3488925125696426, "grad_norm": 1.3611572980880737, "learning_rate": 0.0009564393939393939, "loss": 3.5972, "step": 5135 }, { "epoch": 0.3492322326403044, "grad_norm": 2.5851516723632812, "learning_rate": 0.0009563969289305611, "loss": 3.7215, "step": 5140 }, { "epoch": 0.34957195271096614, "grad_norm": 1.194017767906189, "learning_rate": 0.0009563544639217286, "loss": 3.4261, "step": 5145 }, { "epoch": 0.34991167278162794, "grad_norm": 1.002134919166565, "learning_rate": 0.0009563119989128958, "loss": 3.8155, "step": 5150 }, { "epoch": 0.35025139285228973, "grad_norm": 1.0008842945098877, "learning_rate": 0.0009562695339040631, "loss": 3.588, "step": 5155 }, { "epoch": 0.3505911129229515, "grad_norm": 1.0924912691116333, "learning_rate": 0.0009562270688952304, "loss": 3.8505, "step": 5160 }, { "epoch": 0.3509308329936133, "grad_norm": 1.7533397674560547, "learning_rate": 0.0009561846038863976, "loss": 3.7063, "step": 5165 }, { "epoch": 0.351270553064275, "grad_norm": 0.9251696467399597, "learning_rate": 0.0009561421388775649, "loss": 3.601, "step": 5170 }, { "epoch": 0.3516102731349368, "grad_norm": 1.1320195198059082, "learning_rate": 0.0009560996738687322, "loss": 3.5101, "step": 5175 }, { "epoch": 0.3519499932055986, "grad_norm": 4.2274346351623535, "learning_rate": 0.0009560572088598995, "loss": 3.8409, "step": 5180 }, { "epoch": 0.35228971327626035, "grad_norm": 1.1874873638153076, "learning_rate": 0.0009560147438510668, "loss": 3.6948, "step": 5185 }, { "epoch": 0.35262943334692215, "grad_norm": 0.8868839740753174, "learning_rate": 0.000955972278842234, "loss": 3.8422, "step": 5190 }, { "epoch": 0.3529691534175839, "grad_norm": 1.0911035537719727, "learning_rate": 0.0009559298138334013, "loss": 3.6875, "step": 5195 }, { "epoch": 0.3533088734882457, "grad_norm": 1.2100552320480347, "learning_rate": 0.0009558873488245686, "loss": 3.7857, "step": 5200 }, { "epoch": 0.3536485935589075, "grad_norm": 1.153662919998169, "learning_rate": 0.0009558448838157358, "loss": 3.5674, "step": 5205 }, { "epoch": 0.35398831362956923, "grad_norm": 1.4947353601455688, "learning_rate": 0.0009558024188069031, "loss": 3.7687, "step": 5210 }, { "epoch": 0.354328033700231, "grad_norm": 0.9409990310668945, "learning_rate": 0.0009557599537980705, "loss": 3.8648, "step": 5215 }, { "epoch": 0.35466775377089277, "grad_norm": 1.1996432542800903, "learning_rate": 0.0009557174887892377, "loss": 3.8324, "step": 5220 }, { "epoch": 0.35500747384155457, "grad_norm": 1.9511903524398804, "learning_rate": 0.000955675023780405, "loss": 3.6027, "step": 5225 }, { "epoch": 0.3553471939122163, "grad_norm": 6.737617015838623, "learning_rate": 0.0009556325587715723, "loss": 3.6982, "step": 5230 }, { "epoch": 0.3556869139828781, "grad_norm": 2.9973106384277344, "learning_rate": 0.0009555900937627395, "loss": 3.4942, "step": 5235 }, { "epoch": 0.3560266340535399, "grad_norm": 0.8402284383773804, "learning_rate": 0.0009555476287539067, "loss": 3.7677, "step": 5240 }, { "epoch": 0.35636635412420165, "grad_norm": 1.113147258758545, "learning_rate": 0.0009555051637450741, "loss": 3.593, "step": 5245 }, { "epoch": 0.35670607419486344, "grad_norm": 1.174051284790039, "learning_rate": 0.0009554626987362414, "loss": 3.9097, "step": 5250 }, { "epoch": 0.3570457942655252, "grad_norm": 1.0213407278060913, "learning_rate": 0.0009554202337274086, "loss": 3.5559, "step": 5255 }, { "epoch": 0.357385514336187, "grad_norm": 1.6173577308654785, "learning_rate": 0.000955377768718576, "loss": 3.6182, "step": 5260 }, { "epoch": 0.3577252344068488, "grad_norm": 1.0219593048095703, "learning_rate": 0.0009553353037097432, "loss": 3.3884, "step": 5265 }, { "epoch": 0.3580649544775105, "grad_norm": 1.0309182405471802, "learning_rate": 0.0009552928387009104, "loss": 3.6006, "step": 5270 }, { "epoch": 0.3584046745481723, "grad_norm": 1.0483531951904297, "learning_rate": 0.0009552503736920778, "loss": 3.8511, "step": 5275 }, { "epoch": 0.35874439461883406, "grad_norm": 0.9873725175857544, "learning_rate": 0.000955207908683245, "loss": 3.6234, "step": 5280 }, { "epoch": 0.35908411468949586, "grad_norm": 1.4338494539260864, "learning_rate": 0.0009551654436744123, "loss": 3.8215, "step": 5285 }, { "epoch": 0.35942383476015766, "grad_norm": 1.2328026294708252, "learning_rate": 0.0009551229786655797, "loss": 3.7851, "step": 5290 }, { "epoch": 0.3597635548308194, "grad_norm": 1.24882972240448, "learning_rate": 0.0009550805136567469, "loss": 3.9378, "step": 5295 }, { "epoch": 0.3601032749014812, "grad_norm": 0.8902396559715271, "learning_rate": 0.0009550380486479141, "loss": 3.8848, "step": 5300 }, { "epoch": 0.36044299497214294, "grad_norm": 0.8952227234840393, "learning_rate": 0.0009549955836390814, "loss": 3.9013, "step": 5305 }, { "epoch": 0.36078271504280474, "grad_norm": 5.062201976776123, "learning_rate": 0.0009549531186302487, "loss": 3.8318, "step": 5310 }, { "epoch": 0.3611224351134665, "grad_norm": 0.9878135323524475, "learning_rate": 0.0009549106536214159, "loss": 4.0119, "step": 5315 }, { "epoch": 0.3614621551841283, "grad_norm": 1.094847559928894, "learning_rate": 0.0009548681886125833, "loss": 3.6094, "step": 5320 }, { "epoch": 0.3618018752547901, "grad_norm": 1.0124294757843018, "learning_rate": 0.0009548257236037506, "loss": 3.9489, "step": 5325 }, { "epoch": 0.3621415953254518, "grad_norm": 0.9026657938957214, "learning_rate": 0.0009547832585949178, "loss": 3.7404, "step": 5330 }, { "epoch": 0.3624813153961136, "grad_norm": 3.5481836795806885, "learning_rate": 0.0009547407935860851, "loss": 4.0219, "step": 5335 }, { "epoch": 0.36282103546677535, "grad_norm": 1.3254965543746948, "learning_rate": 0.0009546983285772523, "loss": 3.637, "step": 5340 }, { "epoch": 0.36316075553743715, "grad_norm": 1.2330217361450195, "learning_rate": 0.0009546558635684196, "loss": 3.5714, "step": 5345 }, { "epoch": 0.36350047560809895, "grad_norm": 1.2653712034225464, "learning_rate": 0.0009546133985595869, "loss": 3.7474, "step": 5350 }, { "epoch": 0.3638401956787607, "grad_norm": 1.0522609949111938, "learning_rate": 0.0009545709335507542, "loss": 3.4171, "step": 5355 }, { "epoch": 0.3641799157494225, "grad_norm": 1.2289862632751465, "learning_rate": 0.0009545284685419215, "loss": 3.567, "step": 5360 }, { "epoch": 0.36451963582008423, "grad_norm": 2.2240090370178223, "learning_rate": 0.0009544860035330888, "loss": 3.8492, "step": 5365 }, { "epoch": 0.36485935589074603, "grad_norm": 1.6121822595596313, "learning_rate": 0.000954443538524256, "loss": 3.5091, "step": 5370 }, { "epoch": 0.3651990759614078, "grad_norm": 1.504547119140625, "learning_rate": 0.0009544010735154233, "loss": 3.7453, "step": 5375 }, { "epoch": 0.36553879603206957, "grad_norm": 1.1721127033233643, "learning_rate": 0.0009543586085065906, "loss": 3.6277, "step": 5380 }, { "epoch": 0.36587851610273137, "grad_norm": 0.9358933568000793, "learning_rate": 0.0009543161434977578, "loss": 3.8166, "step": 5385 }, { "epoch": 0.3662182361733931, "grad_norm": 6.367455959320068, "learning_rate": 0.0009542736784889251, "loss": 3.6245, "step": 5390 }, { "epoch": 0.3665579562440549, "grad_norm": 1.2850887775421143, "learning_rate": 0.0009542312134800925, "loss": 3.5691, "step": 5395 }, { "epoch": 0.36689767631471665, "grad_norm": 1.6504201889038086, "learning_rate": 0.0009541887484712597, "loss": 3.7624, "step": 5400 }, { "epoch": 0.36723739638537845, "grad_norm": 5.81767463684082, "learning_rate": 0.0009541462834624269, "loss": 3.6069, "step": 5405 }, { "epoch": 0.36757711645604024, "grad_norm": 1.1315971612930298, "learning_rate": 0.0009541038184535943, "loss": 3.6246, "step": 5410 }, { "epoch": 0.367916836526702, "grad_norm": 1.118908166885376, "learning_rate": 0.0009540613534447615, "loss": 3.7847, "step": 5415 }, { "epoch": 0.3682565565973638, "grad_norm": 0.9292604923248291, "learning_rate": 0.0009540188884359287, "loss": 3.6641, "step": 5420 }, { "epoch": 0.3685962766680255, "grad_norm": 1.7065891027450562, "learning_rate": 0.0009539764234270962, "loss": 3.5163, "step": 5425 }, { "epoch": 0.3689359967386873, "grad_norm": 1.223719596862793, "learning_rate": 0.0009539339584182634, "loss": 3.7918, "step": 5430 }, { "epoch": 0.3692757168093491, "grad_norm": 1.9273377656936646, "learning_rate": 0.0009538914934094306, "loss": 3.6335, "step": 5435 }, { "epoch": 0.36961543688001086, "grad_norm": 3.0467746257781982, "learning_rate": 0.000953849028400598, "loss": 3.6261, "step": 5440 }, { "epoch": 0.36995515695067266, "grad_norm": 1.1062440872192383, "learning_rate": 0.0009538065633917652, "loss": 3.7659, "step": 5445 }, { "epoch": 0.3702948770213344, "grad_norm": 1.4020024538040161, "learning_rate": 0.0009537640983829324, "loss": 3.5376, "step": 5450 }, { "epoch": 0.3706345970919962, "grad_norm": 1.067507028579712, "learning_rate": 0.0009537216333740997, "loss": 3.8924, "step": 5455 }, { "epoch": 0.370974317162658, "grad_norm": 1.194716215133667, "learning_rate": 0.0009536791683652671, "loss": 3.6304, "step": 5460 }, { "epoch": 0.37131403723331974, "grad_norm": 1.374372124671936, "learning_rate": 0.0009536367033564343, "loss": 3.6069, "step": 5465 }, { "epoch": 0.37165375730398154, "grad_norm": 1.3556677103042603, "learning_rate": 0.0009535942383476016, "loss": 3.8234, "step": 5470 }, { "epoch": 0.3719934773746433, "grad_norm": 3.0137810707092285, "learning_rate": 0.0009535517733387689, "loss": 3.8855, "step": 5475 }, { "epoch": 0.3723331974453051, "grad_norm": 1.0279306173324585, "learning_rate": 0.0009535093083299361, "loss": 3.8023, "step": 5480 }, { "epoch": 0.3726729175159668, "grad_norm": 1.2137823104858398, "learning_rate": 0.0009534668433211034, "loss": 3.9979, "step": 5485 }, { "epoch": 0.3730126375866286, "grad_norm": 1.340428352355957, "learning_rate": 0.0009534243783122706, "loss": 3.6659, "step": 5490 }, { "epoch": 0.3733523576572904, "grad_norm": 1.103487253189087, "learning_rate": 0.0009533819133034381, "loss": 3.7088, "step": 5495 }, { "epoch": 0.37369207772795215, "grad_norm": 1.0343180894851685, "learning_rate": 0.0009533394482946053, "loss": 3.7829, "step": 5500 }, { "epoch": 0.37403179779861395, "grad_norm": 1.935268759727478, "learning_rate": 0.0009532969832857725, "loss": 3.8892, "step": 5505 }, { "epoch": 0.3743715178692757, "grad_norm": 1.081479549407959, "learning_rate": 0.0009532545182769399, "loss": 3.5737, "step": 5510 }, { "epoch": 0.3747112379399375, "grad_norm": 1.2317755222320557, "learning_rate": 0.0009532120532681071, "loss": 3.8176, "step": 5515 }, { "epoch": 0.3750509580105993, "grad_norm": 1.0253571271896362, "learning_rate": 0.0009531695882592743, "loss": 3.4613, "step": 5520 }, { "epoch": 0.37539067808126103, "grad_norm": 1.1692956686019897, "learning_rate": 0.0009531271232504417, "loss": 3.5683, "step": 5525 }, { "epoch": 0.37573039815192283, "grad_norm": 4.029736518859863, "learning_rate": 0.000953084658241609, "loss": 3.6935, "step": 5530 }, { "epoch": 0.37607011822258457, "grad_norm": 1.3538397550582886, "learning_rate": 0.0009530421932327762, "loss": 4.085, "step": 5535 }, { "epoch": 0.37640983829324637, "grad_norm": 1.2644790410995483, "learning_rate": 0.0009529997282239436, "loss": 3.7186, "step": 5540 }, { "epoch": 0.37674955836390817, "grad_norm": 1.3055534362792969, "learning_rate": 0.0009529572632151108, "loss": 4.0079, "step": 5545 }, { "epoch": 0.3770892784345699, "grad_norm": 0.9528002738952637, "learning_rate": 0.000952914798206278, "loss": 3.9095, "step": 5550 }, { "epoch": 0.3774289985052317, "grad_norm": 1.6377143859863281, "learning_rate": 0.0009528723331974453, "loss": 3.9234, "step": 5555 }, { "epoch": 0.37776871857589345, "grad_norm": 1.3917982578277588, "learning_rate": 0.0009528298681886126, "loss": 3.885, "step": 5560 }, { "epoch": 0.37810843864655524, "grad_norm": 1.1703463792800903, "learning_rate": 0.0009527874031797799, "loss": 3.5973, "step": 5565 }, { "epoch": 0.378448158717217, "grad_norm": 1.9888688325881958, "learning_rate": 0.0009527449381709472, "loss": 3.8142, "step": 5570 }, { "epoch": 0.3787878787878788, "grad_norm": 1.1203135251998901, "learning_rate": 0.0009527024731621145, "loss": 3.4735, "step": 5575 }, { "epoch": 0.3791275988585406, "grad_norm": 4.498457908630371, "learning_rate": 0.0009526600081532817, "loss": 3.6015, "step": 5580 }, { "epoch": 0.3794673189292023, "grad_norm": 1.3430852890014648, "learning_rate": 0.000952617543144449, "loss": 3.6387, "step": 5585 }, { "epoch": 0.3798070389998641, "grad_norm": 1.1951220035552979, "learning_rate": 0.0009525750781356162, "loss": 3.4751, "step": 5590 }, { "epoch": 0.38014675907052586, "grad_norm": 1.1512469053268433, "learning_rate": 0.0009525326131267835, "loss": 3.7064, "step": 5595 }, { "epoch": 0.38048647914118766, "grad_norm": 1.02385413646698, "learning_rate": 0.0009524901481179509, "loss": 3.8822, "step": 5600 }, { "epoch": 0.38082619921184946, "grad_norm": 2.266383647918701, "learning_rate": 0.0009524476831091181, "loss": 3.7652, "step": 5605 }, { "epoch": 0.3811659192825112, "grad_norm": 1.2888633012771606, "learning_rate": 0.0009524052181002854, "loss": 3.6037, "step": 5610 }, { "epoch": 0.381505639353173, "grad_norm": 1.2476814985275269, "learning_rate": 0.0009523627530914527, "loss": 3.6331, "step": 5615 }, { "epoch": 0.38184535942383474, "grad_norm": 0.9476566314697266, "learning_rate": 0.0009523202880826199, "loss": 3.7645, "step": 5620 }, { "epoch": 0.38218507949449654, "grad_norm": 1.1279027462005615, "learning_rate": 0.0009522778230737871, "loss": 3.5917, "step": 5625 }, { "epoch": 0.38252479956515834, "grad_norm": 1.205605387687683, "learning_rate": 0.0009522353580649546, "loss": 3.7947, "step": 5630 }, { "epoch": 0.3828645196358201, "grad_norm": 1.091375470161438, "learning_rate": 0.0009521928930561218, "loss": 3.5473, "step": 5635 }, { "epoch": 0.3832042397064819, "grad_norm": 1.2799270153045654, "learning_rate": 0.000952150428047289, "loss": 3.7655, "step": 5640 }, { "epoch": 0.3835439597771436, "grad_norm": 1.3126327991485596, "learning_rate": 0.0009521079630384564, "loss": 3.7705, "step": 5645 }, { "epoch": 0.3838836798478054, "grad_norm": 2.500420093536377, "learning_rate": 0.0009520654980296236, "loss": 3.633, "step": 5650 }, { "epoch": 0.38422339991846716, "grad_norm": 1.160470724105835, "learning_rate": 0.0009520230330207908, "loss": 3.8583, "step": 5655 }, { "epoch": 0.38456311998912895, "grad_norm": 1.1482396125793457, "learning_rate": 0.0009519805680119582, "loss": 3.6716, "step": 5660 }, { "epoch": 0.38490284005979075, "grad_norm": 1.1528693437576294, "learning_rate": 0.0009519381030031255, "loss": 3.6538, "step": 5665 }, { "epoch": 0.3852425601304525, "grad_norm": 1.1429359912872314, "learning_rate": 0.0009518956379942927, "loss": 3.5855, "step": 5670 }, { "epoch": 0.3855822802011143, "grad_norm": 2.6612730026245117, "learning_rate": 0.0009518531729854601, "loss": 3.6073, "step": 5675 }, { "epoch": 0.38592200027177603, "grad_norm": 2.4097864627838135, "learning_rate": 0.0009518107079766273, "loss": 3.6385, "step": 5680 }, { "epoch": 0.38626172034243783, "grad_norm": 0.9922126531600952, "learning_rate": 0.0009517682429677945, "loss": 3.6102, "step": 5685 }, { "epoch": 0.38660144041309963, "grad_norm": 1.1976373195648193, "learning_rate": 0.0009517257779589618, "loss": 3.8261, "step": 5690 }, { "epoch": 0.38694116048376137, "grad_norm": 0.974834680557251, "learning_rate": 0.0009516833129501291, "loss": 3.6081, "step": 5695 }, { "epoch": 0.38728088055442317, "grad_norm": 1.296669840812683, "learning_rate": 0.0009516408479412964, "loss": 3.7101, "step": 5700 }, { "epoch": 0.3876206006250849, "grad_norm": 1.0671838521957397, "learning_rate": 0.0009515983829324637, "loss": 3.6258, "step": 5705 }, { "epoch": 0.3879603206957467, "grad_norm": 1.0688583850860596, "learning_rate": 0.000951555917923631, "loss": 3.7222, "step": 5710 }, { "epoch": 0.3883000407664085, "grad_norm": 1.0784046649932861, "learning_rate": 0.0009515134529147982, "loss": 3.7649, "step": 5715 }, { "epoch": 0.38863976083707025, "grad_norm": 1.1260690689086914, "learning_rate": 0.0009514709879059655, "loss": 3.7519, "step": 5720 }, { "epoch": 0.38897948090773204, "grad_norm": 1.0358492136001587, "learning_rate": 0.0009514285228971328, "loss": 3.8095, "step": 5725 }, { "epoch": 0.3893192009783938, "grad_norm": 1.073951244354248, "learning_rate": 0.0009513860578883, "loss": 3.7285, "step": 5730 }, { "epoch": 0.3896589210490556, "grad_norm": 1.1902964115142822, "learning_rate": 0.0009513435928794674, "loss": 3.6962, "step": 5735 }, { "epoch": 0.3899986411197173, "grad_norm": 1.2849375009536743, "learning_rate": 0.0009513011278706346, "loss": 3.9486, "step": 5740 }, { "epoch": 0.3903383611903791, "grad_norm": 0.8682353496551514, "learning_rate": 0.0009512586628618019, "loss": 3.6897, "step": 5745 }, { "epoch": 0.3906780812610409, "grad_norm": 1.3415600061416626, "learning_rate": 0.0009512161978529692, "loss": 3.7475, "step": 5750 }, { "epoch": 0.39101780133170266, "grad_norm": 1.095535159111023, "learning_rate": 0.0009511737328441364, "loss": 3.7945, "step": 5755 }, { "epoch": 0.39135752140236446, "grad_norm": 1.156894326210022, "learning_rate": 0.0009511312678353037, "loss": 3.8429, "step": 5760 }, { "epoch": 0.3916972414730262, "grad_norm": 1.3410859107971191, "learning_rate": 0.000951088802826471, "loss": 3.4894, "step": 5765 }, { "epoch": 0.392036961543688, "grad_norm": 3.099006175994873, "learning_rate": 0.0009510463378176383, "loss": 3.8844, "step": 5770 }, { "epoch": 0.3923766816143498, "grad_norm": 1.443575143814087, "learning_rate": 0.0009510038728088056, "loss": 3.888, "step": 5775 }, { "epoch": 0.39271640168501154, "grad_norm": 1.1571073532104492, "learning_rate": 0.0009509614077999729, "loss": 3.5171, "step": 5780 }, { "epoch": 0.39305612175567334, "grad_norm": 1.1101527214050293, "learning_rate": 0.0009509189427911401, "loss": 3.8468, "step": 5785 }, { "epoch": 0.3933958418263351, "grad_norm": 1.5306934118270874, "learning_rate": 0.0009508764777823073, "loss": 3.8877, "step": 5790 }, { "epoch": 0.3937355618969969, "grad_norm": 1.1900215148925781, "learning_rate": 0.0009508340127734747, "loss": 3.7025, "step": 5795 }, { "epoch": 0.3940752819676587, "grad_norm": 1.4084842205047607, "learning_rate": 0.0009507915477646419, "loss": 3.8555, "step": 5800 }, { "epoch": 0.3944150020383204, "grad_norm": 6.075224876403809, "learning_rate": 0.0009507490827558092, "loss": 3.8502, "step": 5805 }, { "epoch": 0.3947547221089822, "grad_norm": 1.107698678970337, "learning_rate": 0.0009507066177469766, "loss": 3.6819, "step": 5810 }, { "epoch": 0.39509444217964396, "grad_norm": 1.3059264421463013, "learning_rate": 0.0009506641527381438, "loss": 3.3492, "step": 5815 }, { "epoch": 0.39543416225030575, "grad_norm": 1.0872838497161865, "learning_rate": 0.000950621687729311, "loss": 3.8966, "step": 5820 }, { "epoch": 0.3957738823209675, "grad_norm": 0.9930705428123474, "learning_rate": 0.0009505792227204784, "loss": 3.8112, "step": 5825 }, { "epoch": 0.3961136023916293, "grad_norm": 3.4324820041656494, "learning_rate": 0.0009505367577116456, "loss": 3.5077, "step": 5830 }, { "epoch": 0.3964533224622911, "grad_norm": 1.0019121170043945, "learning_rate": 0.0009504942927028129, "loss": 3.8734, "step": 5835 }, { "epoch": 0.39679304253295283, "grad_norm": 1.0993496179580688, "learning_rate": 0.0009504518276939802, "loss": 3.6494, "step": 5840 }, { "epoch": 0.39713276260361463, "grad_norm": 1.4946439266204834, "learning_rate": 0.0009504093626851475, "loss": 3.6711, "step": 5845 }, { "epoch": 0.3974724826742764, "grad_norm": 1.1829396486282349, "learning_rate": 0.0009503668976763148, "loss": 3.5251, "step": 5850 }, { "epoch": 0.39781220274493817, "grad_norm": 1.072950839996338, "learning_rate": 0.000950324432667482, "loss": 3.7528, "step": 5855 }, { "epoch": 0.39815192281559997, "grad_norm": 0.9970988035202026, "learning_rate": 0.0009502819676586493, "loss": 3.732, "step": 5860 }, { "epoch": 0.3984916428862617, "grad_norm": 1.0353868007659912, "learning_rate": 0.0009502395026498166, "loss": 3.926, "step": 5865 }, { "epoch": 0.3988313629569235, "grad_norm": 3.478158712387085, "learning_rate": 0.0009501970376409838, "loss": 3.6483, "step": 5870 }, { "epoch": 0.39917108302758525, "grad_norm": 0.888249397277832, "learning_rate": 0.0009501545726321512, "loss": 3.4292, "step": 5875 }, { "epoch": 0.39951080309824705, "grad_norm": 1.3389489650726318, "learning_rate": 0.0009501121076233185, "loss": 3.7357, "step": 5880 }, { "epoch": 0.39985052316890884, "grad_norm": 4.101470470428467, "learning_rate": 0.0009500696426144857, "loss": 3.946, "step": 5885 }, { "epoch": 0.4001902432395706, "grad_norm": 0.9313814043998718, "learning_rate": 0.0009500271776056529, "loss": 3.7328, "step": 5890 }, { "epoch": 0.4005299633102324, "grad_norm": 1.2180026769638062, "learning_rate": 0.0009499847125968203, "loss": 3.5572, "step": 5895 }, { "epoch": 0.4008696833808941, "grad_norm": 1.2215102910995483, "learning_rate": 0.0009499422475879875, "loss": 3.6838, "step": 5900 }, { "epoch": 0.4012094034515559, "grad_norm": 0.936327338218689, "learning_rate": 0.0009498997825791547, "loss": 3.7664, "step": 5905 }, { "epoch": 0.40154912352221767, "grad_norm": 1.2562206983566284, "learning_rate": 0.0009498573175703222, "loss": 3.774, "step": 5910 }, { "epoch": 0.40188884359287946, "grad_norm": 1.073562502861023, "learning_rate": 0.0009498148525614894, "loss": 3.8829, "step": 5915 }, { "epoch": 0.40222856366354126, "grad_norm": 1.5196235179901123, "learning_rate": 0.0009497723875526566, "loss": 3.8426, "step": 5920 }, { "epoch": 0.402568283734203, "grad_norm": 1.3453785181045532, "learning_rate": 0.000949729922543824, "loss": 3.7102, "step": 5925 }, { "epoch": 0.4029080038048648, "grad_norm": 1.2013074159622192, "learning_rate": 0.0009496874575349912, "loss": 3.6381, "step": 5930 }, { "epoch": 0.40324772387552654, "grad_norm": 0.9586192965507507, "learning_rate": 0.0009496449925261584, "loss": 3.9005, "step": 5935 }, { "epoch": 0.40358744394618834, "grad_norm": 1.1437575817108154, "learning_rate": 0.0009496025275173257, "loss": 3.6924, "step": 5940 }, { "epoch": 0.40392716401685014, "grad_norm": 1.2208669185638428, "learning_rate": 0.0009495600625084931, "loss": 3.6433, "step": 5945 }, { "epoch": 0.4042668840875119, "grad_norm": 1.1657934188842773, "learning_rate": 0.0009495175974996603, "loss": 3.959, "step": 5950 }, { "epoch": 0.4046066041581737, "grad_norm": 1.018587350845337, "learning_rate": 0.0009494751324908276, "loss": 3.8262, "step": 5955 }, { "epoch": 0.4049463242288354, "grad_norm": 1.0834336280822754, "learning_rate": 0.0009494326674819949, "loss": 3.7183, "step": 5960 }, { "epoch": 0.4052860442994972, "grad_norm": 1.0993506908416748, "learning_rate": 0.0009493902024731621, "loss": 3.8289, "step": 5965 }, { "epoch": 0.405625764370159, "grad_norm": 1.3152086734771729, "learning_rate": 0.0009493477374643294, "loss": 4.011, "step": 5970 }, { "epoch": 0.40596548444082076, "grad_norm": 1.2822860479354858, "learning_rate": 0.0009493052724554966, "loss": 3.5298, "step": 5975 }, { "epoch": 0.40630520451148255, "grad_norm": 1.6147123575210571, "learning_rate": 0.000949262807446664, "loss": 3.6971, "step": 5980 }, { "epoch": 0.4066449245821443, "grad_norm": 1.5828018188476562, "learning_rate": 0.0009492203424378313, "loss": 3.6263, "step": 5985 }, { "epoch": 0.4069846446528061, "grad_norm": 1.3114936351776123, "learning_rate": 0.0009491778774289985, "loss": 4.0223, "step": 5990 }, { "epoch": 0.40732436472346784, "grad_norm": 4.493700981140137, "learning_rate": 0.0009491354124201658, "loss": 3.851, "step": 5995 }, { "epoch": 0.40766408479412963, "grad_norm": 1.0380257368087769, "learning_rate": 0.0009490929474113331, "loss": 3.6132, "step": 6000 }, { "epoch": 0.40800380486479143, "grad_norm": 1.2151621580123901, "learning_rate": 0.0009490504824025003, "loss": 3.7499, "step": 6005 }, { "epoch": 0.4083435249354532, "grad_norm": 1.2467670440673828, "learning_rate": 0.0009490080173936676, "loss": 3.7192, "step": 6010 }, { "epoch": 0.40868324500611497, "grad_norm": 1.1538589000701904, "learning_rate": 0.000948965552384835, "loss": 3.7104, "step": 6015 }, { "epoch": 0.4090229650767767, "grad_norm": 1.1752573251724243, "learning_rate": 0.0009489230873760022, "loss": 3.7732, "step": 6020 }, { "epoch": 0.4093626851474385, "grad_norm": 1.241243600845337, "learning_rate": 0.0009488806223671694, "loss": 3.5896, "step": 6025 }, { "epoch": 0.4097024052181003, "grad_norm": 1.165532112121582, "learning_rate": 0.0009488381573583368, "loss": 3.5807, "step": 6030 }, { "epoch": 0.41004212528876205, "grad_norm": 1.164171576499939, "learning_rate": 0.000948795692349504, "loss": 3.6479, "step": 6035 }, { "epoch": 0.41038184535942385, "grad_norm": 2.6586148738861084, "learning_rate": 0.0009487532273406712, "loss": 3.6523, "step": 6040 }, { "epoch": 0.4107215654300856, "grad_norm": 2.2949278354644775, "learning_rate": 0.0009487107623318386, "loss": 3.683, "step": 6045 }, { "epoch": 0.4110612855007474, "grad_norm": 1.2675297260284424, "learning_rate": 0.0009486682973230059, "loss": 3.7645, "step": 6050 }, { "epoch": 0.4114010055714092, "grad_norm": 0.9878287315368652, "learning_rate": 0.0009486258323141731, "loss": 3.4998, "step": 6055 }, { "epoch": 0.4117407256420709, "grad_norm": 1.1512476205825806, "learning_rate": 0.0009485833673053405, "loss": 3.5999, "step": 6060 }, { "epoch": 0.4120804457127327, "grad_norm": 1.3821051120758057, "learning_rate": 0.0009485409022965077, "loss": 3.8708, "step": 6065 }, { "epoch": 0.41242016578339447, "grad_norm": 1.2752748727798462, "learning_rate": 0.0009484984372876749, "loss": 4.0288, "step": 6070 }, { "epoch": 0.41275988585405626, "grad_norm": 1.1951062679290771, "learning_rate": 0.0009484559722788422, "loss": 3.2878, "step": 6075 }, { "epoch": 0.413099605924718, "grad_norm": 1.5378471612930298, "learning_rate": 0.0009484135072700095, "loss": 3.8522, "step": 6080 }, { "epoch": 0.4134393259953798, "grad_norm": 1.7370601892471313, "learning_rate": 0.0009483710422611768, "loss": 3.693, "step": 6085 }, { "epoch": 0.4137790460660416, "grad_norm": 1.1362842321395874, "learning_rate": 0.0009483285772523441, "loss": 3.9106, "step": 6090 }, { "epoch": 0.41411876613670334, "grad_norm": 1.5647481679916382, "learning_rate": 0.0009482861122435114, "loss": 3.7076, "step": 6095 }, { "epoch": 0.41445848620736514, "grad_norm": 1.0030677318572998, "learning_rate": 0.0009482436472346786, "loss": 3.6099, "step": 6100 }, { "epoch": 0.4147982062780269, "grad_norm": 1.0165951251983643, "learning_rate": 0.0009482011822258459, "loss": 3.5804, "step": 6105 }, { "epoch": 0.4151379263486887, "grad_norm": 1.1157257556915283, "learning_rate": 0.0009481587172170132, "loss": 3.6585, "step": 6110 }, { "epoch": 0.4154776464193505, "grad_norm": 1.4630250930786133, "learning_rate": 0.0009481162522081804, "loss": 3.6373, "step": 6115 }, { "epoch": 0.4158173664900122, "grad_norm": 1.0570592880249023, "learning_rate": 0.0009480737871993478, "loss": 3.9197, "step": 6120 }, { "epoch": 0.416157086560674, "grad_norm": 0.9899817109107971, "learning_rate": 0.000948031322190515, "loss": 3.7524, "step": 6125 }, { "epoch": 0.41649680663133576, "grad_norm": 2.664741277694702, "learning_rate": 0.0009479888571816823, "loss": 3.5579, "step": 6130 }, { "epoch": 0.41683652670199756, "grad_norm": 1.2770137786865234, "learning_rate": 0.0009479463921728496, "loss": 3.62, "step": 6135 }, { "epoch": 0.41717624677265935, "grad_norm": 6.187646865844727, "learning_rate": 0.0009479039271640168, "loss": 3.7727, "step": 6140 }, { "epoch": 0.4175159668433211, "grad_norm": 1.1661713123321533, "learning_rate": 0.0009478614621551841, "loss": 3.8306, "step": 6145 }, { "epoch": 0.4178556869139829, "grad_norm": 2.6171793937683105, "learning_rate": 0.0009478189971463514, "loss": 3.842, "step": 6150 }, { "epoch": 0.41819540698464464, "grad_norm": 3.458650827407837, "learning_rate": 0.0009477765321375187, "loss": 3.5683, "step": 6155 }, { "epoch": 0.41853512705530643, "grad_norm": 1.1584410667419434, "learning_rate": 0.000947734067128686, "loss": 3.7374, "step": 6160 }, { "epoch": 0.4188748471259682, "grad_norm": 1.2718228101730347, "learning_rate": 0.0009476916021198533, "loss": 3.9218, "step": 6165 }, { "epoch": 0.41921456719663, "grad_norm": 1.2231667041778564, "learning_rate": 0.0009476491371110205, "loss": 3.8017, "step": 6170 }, { "epoch": 0.41955428726729177, "grad_norm": 1.3204630613327026, "learning_rate": 0.0009476066721021878, "loss": 3.7824, "step": 6175 }, { "epoch": 0.4198940073379535, "grad_norm": 1.01287043094635, "learning_rate": 0.0009475642070933551, "loss": 3.7023, "step": 6180 }, { "epoch": 0.4202337274086153, "grad_norm": 0.9673585891723633, "learning_rate": 0.0009475217420845223, "loss": 3.7006, "step": 6185 }, { "epoch": 0.42057344747927705, "grad_norm": 1.1784453392028809, "learning_rate": 0.0009474792770756897, "loss": 3.5723, "step": 6190 }, { "epoch": 0.42091316754993885, "grad_norm": 0.9351099729537964, "learning_rate": 0.000947436812066857, "loss": 3.8126, "step": 6195 }, { "epoch": 0.42125288762060065, "grad_norm": 1.0416945219039917, "learning_rate": 0.0009473943470580242, "loss": 3.6757, "step": 6200 }, { "epoch": 0.4215926076912624, "grad_norm": 1.2345649003982544, "learning_rate": 0.0009473518820491915, "loss": 3.6747, "step": 6205 }, { "epoch": 0.4219323277619242, "grad_norm": 1.0960874557495117, "learning_rate": 0.0009473094170403588, "loss": 3.6959, "step": 6210 }, { "epoch": 0.42227204783258593, "grad_norm": 1.2891786098480225, "learning_rate": 0.000947266952031526, "loss": 3.516, "step": 6215 }, { "epoch": 0.4226117679032477, "grad_norm": 1.1018257141113281, "learning_rate": 0.0009472244870226934, "loss": 3.7597, "step": 6220 }, { "epoch": 0.4229514879739095, "grad_norm": 1.0776675939559937, "learning_rate": 0.0009471820220138606, "loss": 3.5723, "step": 6225 }, { "epoch": 0.42329120804457127, "grad_norm": 1.0148835182189941, "learning_rate": 0.0009471395570050279, "loss": 3.6593, "step": 6230 }, { "epoch": 0.42363092811523306, "grad_norm": 0.9103671312332153, "learning_rate": 0.0009470970919961952, "loss": 3.7263, "step": 6235 }, { "epoch": 0.4239706481858948, "grad_norm": 1.3977333307266235, "learning_rate": 0.0009470546269873624, "loss": 3.6404, "step": 6240 }, { "epoch": 0.4243103682565566, "grad_norm": 1.3441193103790283, "learning_rate": 0.0009470121619785297, "loss": 3.7087, "step": 6245 }, { "epoch": 0.42465008832721834, "grad_norm": 1.7664027214050293, "learning_rate": 0.000946969696969697, "loss": 3.5677, "step": 6250 }, { "epoch": 0.42498980839788014, "grad_norm": 1.499841332435608, "learning_rate": 0.0009469272319608643, "loss": 3.6879, "step": 6255 }, { "epoch": 0.42532952846854194, "grad_norm": 1.3653477430343628, "learning_rate": 0.0009468847669520316, "loss": 3.3687, "step": 6260 }, { "epoch": 0.4256692485392037, "grad_norm": 1.561939001083374, "learning_rate": 0.0009468423019431989, "loss": 3.5195, "step": 6265 }, { "epoch": 0.4260089686098655, "grad_norm": 1.2736279964447021, "learning_rate": 0.0009467998369343661, "loss": 3.834, "step": 6270 }, { "epoch": 0.4263486886805272, "grad_norm": 1.3535723686218262, "learning_rate": 0.0009467573719255333, "loss": 3.5923, "step": 6275 }, { "epoch": 0.426688408751189, "grad_norm": 1.4644920825958252, "learning_rate": 0.0009467149069167007, "loss": 3.6382, "step": 6280 }, { "epoch": 0.4270281288218508, "grad_norm": 1.2731937170028687, "learning_rate": 0.0009466724419078679, "loss": 3.8715, "step": 6285 }, { "epoch": 0.42736784889251256, "grad_norm": 1.2855011224746704, "learning_rate": 0.0009466299768990352, "loss": 3.6907, "step": 6290 }, { "epoch": 0.42770756896317436, "grad_norm": 1.0074275732040405, "learning_rate": 0.0009465875118902026, "loss": 3.8788, "step": 6295 }, { "epoch": 0.4280472890338361, "grad_norm": 1.0491783618927002, "learning_rate": 0.0009465450468813698, "loss": 3.7404, "step": 6300 }, { "epoch": 0.4283870091044979, "grad_norm": 1.1039282083511353, "learning_rate": 0.000946502581872537, "loss": 3.36, "step": 6305 }, { "epoch": 0.4287267291751597, "grad_norm": 1.4295217990875244, "learning_rate": 0.0009464601168637044, "loss": 4.084, "step": 6310 }, { "epoch": 0.42906644924582144, "grad_norm": 2.3130288124084473, "learning_rate": 0.0009464176518548716, "loss": 3.7675, "step": 6315 }, { "epoch": 0.42940616931648323, "grad_norm": 1.1467266082763672, "learning_rate": 0.0009463751868460388, "loss": 3.7389, "step": 6320 }, { "epoch": 0.429745889387145, "grad_norm": 1.4909875392913818, "learning_rate": 0.0009463327218372063, "loss": 3.5983, "step": 6325 }, { "epoch": 0.4300856094578068, "grad_norm": 55.99380111694336, "learning_rate": 0.0009462902568283735, "loss": 3.7456, "step": 6330 }, { "epoch": 0.4304253295284685, "grad_norm": 3.4502925872802734, "learning_rate": 0.0009462477918195407, "loss": 3.7502, "step": 6335 }, { "epoch": 0.4307650495991303, "grad_norm": 1.1081171035766602, "learning_rate": 0.000946205326810708, "loss": 3.7078, "step": 6340 }, { "epoch": 0.4311047696697921, "grad_norm": 1.1747417449951172, "learning_rate": 0.0009461628618018753, "loss": 3.5984, "step": 6345 }, { "epoch": 0.43144448974045385, "grad_norm": 1.32005774974823, "learning_rate": 0.0009461203967930425, "loss": 3.5909, "step": 6350 }, { "epoch": 0.43178420981111565, "grad_norm": 1.4474611282348633, "learning_rate": 0.0009460779317842098, "loss": 3.6718, "step": 6355 }, { "epoch": 0.4321239298817774, "grad_norm": 1.0202085971832275, "learning_rate": 0.0009460354667753772, "loss": 3.4368, "step": 6360 }, { "epoch": 0.4324636499524392, "grad_norm": 1.0721896886825562, "learning_rate": 0.0009459930017665444, "loss": 3.7588, "step": 6365 }, { "epoch": 0.432803370023101, "grad_norm": 1.424838662147522, "learning_rate": 0.0009459505367577117, "loss": 3.4132, "step": 6370 }, { "epoch": 0.43314309009376273, "grad_norm": 1.3854074478149414, "learning_rate": 0.0009459080717488789, "loss": 3.6547, "step": 6375 }, { "epoch": 0.4334828101644245, "grad_norm": 1.27243173122406, "learning_rate": 0.0009458656067400462, "loss": 3.6831, "step": 6380 }, { "epoch": 0.43382253023508627, "grad_norm": 1.3859477043151855, "learning_rate": 0.0009458231417312135, "loss": 3.7148, "step": 6385 }, { "epoch": 0.43416225030574807, "grad_norm": 1.331432580947876, "learning_rate": 0.0009457806767223807, "loss": 3.5887, "step": 6390 }, { "epoch": 0.43450197037640986, "grad_norm": 1.6575798988342285, "learning_rate": 0.0009457382117135481, "loss": 3.8523, "step": 6395 }, { "epoch": 0.4348416904470716, "grad_norm": 1.4394904375076294, "learning_rate": 0.0009456957467047154, "loss": 3.6325, "step": 6400 }, { "epoch": 0.4351814105177334, "grad_norm": 1.1630573272705078, "learning_rate": 0.0009456532816958826, "loss": 3.7098, "step": 6405 }, { "epoch": 0.43552113058839514, "grad_norm": 1.2830561399459839, "learning_rate": 0.0009456108166870499, "loss": 3.6061, "step": 6410 }, { "epoch": 0.43586085065905694, "grad_norm": 1.385108470916748, "learning_rate": 0.0009455683516782172, "loss": 3.6621, "step": 6415 }, { "epoch": 0.4362005707297187, "grad_norm": 1.0649902820587158, "learning_rate": 0.0009455258866693844, "loss": 3.6298, "step": 6420 }, { "epoch": 0.4365402908003805, "grad_norm": 1.182368516921997, "learning_rate": 0.0009454834216605516, "loss": 3.5758, "step": 6425 }, { "epoch": 0.4368800108710423, "grad_norm": 1.0729496479034424, "learning_rate": 0.0009454409566517191, "loss": 3.9255, "step": 6430 }, { "epoch": 0.437219730941704, "grad_norm": 1.1408847570419312, "learning_rate": 0.0009453984916428863, "loss": 3.7626, "step": 6435 }, { "epoch": 0.4375594510123658, "grad_norm": 1.200448989868164, "learning_rate": 0.0009453560266340535, "loss": 3.5733, "step": 6440 }, { "epoch": 0.43789917108302756, "grad_norm": 1.251009225845337, "learning_rate": 0.0009453135616252209, "loss": 3.702, "step": 6445 }, { "epoch": 0.43823889115368936, "grad_norm": 1.1641876697540283, "learning_rate": 0.0009452710966163881, "loss": 3.7579, "step": 6450 }, { "epoch": 0.43857861122435116, "grad_norm": 1.1435065269470215, "learning_rate": 0.0009452286316075553, "loss": 3.9502, "step": 6455 }, { "epoch": 0.4389183312950129, "grad_norm": 1.1139477491378784, "learning_rate": 0.0009451861665987227, "loss": 3.4819, "step": 6460 }, { "epoch": 0.4392580513656747, "grad_norm": 1.1908572912216187, "learning_rate": 0.00094514370158989, "loss": 3.6516, "step": 6465 }, { "epoch": 0.43959777143633644, "grad_norm": 1.4063154458999634, "learning_rate": 0.0009451012365810572, "loss": 3.456, "step": 6470 }, { "epoch": 0.43993749150699824, "grad_norm": 1.0899357795715332, "learning_rate": 0.0009450587715722245, "loss": 3.9109, "step": 6475 }, { "epoch": 0.44027721157766003, "grad_norm": 1.250020146369934, "learning_rate": 0.0009450163065633918, "loss": 3.8961, "step": 6480 }, { "epoch": 0.4406169316483218, "grad_norm": 1.6173255443572998, "learning_rate": 0.000944973841554559, "loss": 3.5555, "step": 6485 }, { "epoch": 0.44095665171898357, "grad_norm": 1.0814850330352783, "learning_rate": 0.0009449313765457263, "loss": 3.9189, "step": 6490 }, { "epoch": 0.4412963717896453, "grad_norm": 1.4209396839141846, "learning_rate": 0.0009448889115368936, "loss": 3.688, "step": 6495 }, { "epoch": 0.4416360918603071, "grad_norm": 1.5768418312072754, "learning_rate": 0.0009448464465280609, "loss": 3.8792, "step": 6500 }, { "epoch": 0.4419758119309689, "grad_norm": 1.6675361394882202, "learning_rate": 0.0009448039815192282, "loss": 3.4961, "step": 6505 }, { "epoch": 0.44231553200163065, "grad_norm": 1.4268646240234375, "learning_rate": 0.0009447615165103955, "loss": 3.8669, "step": 6510 }, { "epoch": 0.44265525207229245, "grad_norm": 1.1074867248535156, "learning_rate": 0.0009447190515015628, "loss": 3.6236, "step": 6515 }, { "epoch": 0.4429949721429542, "grad_norm": 5.6159892082214355, "learning_rate": 0.00094467658649273, "loss": 3.5135, "step": 6520 }, { "epoch": 0.443334692213616, "grad_norm": 1.4120656251907349, "learning_rate": 0.0009446341214838972, "loss": 3.8049, "step": 6525 }, { "epoch": 0.44367441228427773, "grad_norm": 1.035157322883606, "learning_rate": 0.0009445916564750646, "loss": 3.6535, "step": 6530 }, { "epoch": 0.44401413235493953, "grad_norm": 1.6971696615219116, "learning_rate": 0.0009445491914662319, "loss": 3.5341, "step": 6535 }, { "epoch": 0.4443538524256013, "grad_norm": 1.1930265426635742, "learning_rate": 0.0009445067264573991, "loss": 3.6104, "step": 6540 }, { "epoch": 0.44469357249626307, "grad_norm": 1.0448393821716309, "learning_rate": 0.0009444642614485665, "loss": 3.8153, "step": 6545 }, { "epoch": 0.44503329256692487, "grad_norm": 1.1613003015518188, "learning_rate": 0.0009444217964397337, "loss": 3.4671, "step": 6550 }, { "epoch": 0.4453730126375866, "grad_norm": 1.5508346557617188, "learning_rate": 0.0009443793314309009, "loss": 3.8162, "step": 6555 }, { "epoch": 0.4457127327082484, "grad_norm": 1.0248148441314697, "learning_rate": 0.0009443368664220683, "loss": 3.9931, "step": 6560 }, { "epoch": 0.4460524527789102, "grad_norm": 1.9922316074371338, "learning_rate": 0.0009442944014132355, "loss": 3.376, "step": 6565 }, { "epoch": 0.44639217284957194, "grad_norm": 1.036615252494812, "learning_rate": 0.0009442519364044028, "loss": 3.4476, "step": 6570 }, { "epoch": 0.44673189292023374, "grad_norm": 1.2380717992782593, "learning_rate": 0.0009442094713955701, "loss": 3.8536, "step": 6575 }, { "epoch": 0.4470716129908955, "grad_norm": 1.275560736656189, "learning_rate": 0.0009441670063867374, "loss": 3.5858, "step": 6580 }, { "epoch": 0.4474113330615573, "grad_norm": 1.2600946426391602, "learning_rate": 0.0009441245413779046, "loss": 3.6155, "step": 6585 }, { "epoch": 0.4477510531322191, "grad_norm": 1.0523712635040283, "learning_rate": 0.0009440820763690719, "loss": 3.6443, "step": 6590 }, { "epoch": 0.4480907732028808, "grad_norm": 1.0980836153030396, "learning_rate": 0.0009440396113602392, "loss": 3.606, "step": 6595 }, { "epoch": 0.4484304932735426, "grad_norm": 1.1133266687393188, "learning_rate": 0.0009439971463514064, "loss": 3.9213, "step": 6600 }, { "epoch": 0.44877021334420436, "grad_norm": 0.840295672416687, "learning_rate": 0.0009439546813425738, "loss": 3.626, "step": 6605 }, { "epoch": 0.44910993341486616, "grad_norm": 1.7207982540130615, "learning_rate": 0.000943912216333741, "loss": 3.6249, "step": 6610 }, { "epoch": 0.4494496534855279, "grad_norm": 1.0424376726150513, "learning_rate": 0.0009438697513249083, "loss": 3.8834, "step": 6615 }, { "epoch": 0.4497893735561897, "grad_norm": 0.9753870964050293, "learning_rate": 0.0009438272863160756, "loss": 3.6858, "step": 6620 }, { "epoch": 0.4501290936268515, "grad_norm": 1.4401915073394775, "learning_rate": 0.0009437848213072428, "loss": 3.7697, "step": 6625 }, { "epoch": 0.45046881369751324, "grad_norm": 1.2434371709823608, "learning_rate": 0.0009437423562984101, "loss": 3.8716, "step": 6630 }, { "epoch": 0.45080853376817503, "grad_norm": 1.365737795829773, "learning_rate": 0.0009436998912895774, "loss": 3.6888, "step": 6635 }, { "epoch": 0.4511482538388368, "grad_norm": 1.2234160900115967, "learning_rate": 0.0009436574262807447, "loss": 3.8344, "step": 6640 }, { "epoch": 0.4514879739094986, "grad_norm": 1.1441856622695923, "learning_rate": 0.000943614961271912, "loss": 3.4657, "step": 6645 }, { "epoch": 0.45182769398016037, "grad_norm": 1.1656910181045532, "learning_rate": 0.0009435724962630793, "loss": 3.8402, "step": 6650 }, { "epoch": 0.4521674140508221, "grad_norm": 1.2320067882537842, "learning_rate": 0.0009435300312542465, "loss": 3.6074, "step": 6655 }, { "epoch": 0.4525071341214839, "grad_norm": 1.30469810962677, "learning_rate": 0.0009434875662454137, "loss": 3.5521, "step": 6660 }, { "epoch": 0.45284685419214565, "grad_norm": 0.9607745409011841, "learning_rate": 0.0009434451012365811, "loss": 3.5081, "step": 6665 }, { "epoch": 0.45318657426280745, "grad_norm": 2.173412322998047, "learning_rate": 0.0009434026362277483, "loss": 3.8509, "step": 6670 }, { "epoch": 0.45352629433346925, "grad_norm": 1.395026683807373, "learning_rate": 0.0009433601712189156, "loss": 4.0101, "step": 6675 }, { "epoch": 0.453866014404131, "grad_norm": 1.1709812879562378, "learning_rate": 0.000943317706210083, "loss": 3.7747, "step": 6680 }, { "epoch": 0.4542057344747928, "grad_norm": 1.579063057899475, "learning_rate": 0.0009432752412012502, "loss": 3.5502, "step": 6685 }, { "epoch": 0.45454545454545453, "grad_norm": 1.0587866306304932, "learning_rate": 0.0009432327761924174, "loss": 3.5084, "step": 6690 }, { "epoch": 0.45488517461611633, "grad_norm": 1.4730803966522217, "learning_rate": 0.0009431903111835848, "loss": 3.7981, "step": 6695 }, { "epoch": 0.45522489468677807, "grad_norm": 1.3846031427383423, "learning_rate": 0.000943147846174752, "loss": 3.4403, "step": 6700 }, { "epoch": 0.45556461475743987, "grad_norm": 1.9179236888885498, "learning_rate": 0.0009431053811659192, "loss": 3.5949, "step": 6705 }, { "epoch": 0.45590433482810166, "grad_norm": 1.304921269416809, "learning_rate": 0.0009430629161570867, "loss": 3.7481, "step": 6710 }, { "epoch": 0.4562440548987634, "grad_norm": 1.1491141319274902, "learning_rate": 0.0009430204511482539, "loss": 3.724, "step": 6715 }, { "epoch": 0.4565837749694252, "grad_norm": 1.0532443523406982, "learning_rate": 0.0009429779861394211, "loss": 3.6218, "step": 6720 }, { "epoch": 0.45692349504008695, "grad_norm": 1.4490158557891846, "learning_rate": 0.0009429355211305884, "loss": 3.6048, "step": 6725 }, { "epoch": 0.45726321511074874, "grad_norm": 0.929561197757721, "learning_rate": 0.0009428930561217557, "loss": 3.6961, "step": 6730 }, { "epoch": 0.45760293518141054, "grad_norm": 1.6406091451644897, "learning_rate": 0.0009428505911129229, "loss": 3.7471, "step": 6735 }, { "epoch": 0.4579426552520723, "grad_norm": 1.2588796615600586, "learning_rate": 0.0009428081261040902, "loss": 3.7945, "step": 6740 }, { "epoch": 0.4582823753227341, "grad_norm": 1.3335473537445068, "learning_rate": 0.0009427656610952576, "loss": 3.7334, "step": 6745 }, { "epoch": 0.4586220953933958, "grad_norm": 1.1830120086669922, "learning_rate": 0.0009427231960864248, "loss": 3.732, "step": 6750 }, { "epoch": 0.4589618154640576, "grad_norm": 1.109380841255188, "learning_rate": 0.0009426807310775921, "loss": 3.8011, "step": 6755 }, { "epoch": 0.4593015355347194, "grad_norm": 1.565940022468567, "learning_rate": 0.0009426382660687593, "loss": 3.5487, "step": 6760 }, { "epoch": 0.45964125560538116, "grad_norm": 1.409398078918457, "learning_rate": 0.0009425958010599266, "loss": 3.7448, "step": 6765 }, { "epoch": 0.45998097567604296, "grad_norm": 1.737565040588379, "learning_rate": 0.0009425533360510939, "loss": 3.7679, "step": 6770 }, { "epoch": 0.4603206957467047, "grad_norm": 1.0678552389144897, "learning_rate": 0.0009425108710422611, "loss": 3.846, "step": 6775 }, { "epoch": 0.4606604158173665, "grad_norm": 1.3739103078842163, "learning_rate": 0.0009424684060334285, "loss": 3.8537, "step": 6780 }, { "epoch": 0.46100013588802824, "grad_norm": 1.0254541635513306, "learning_rate": 0.0009424259410245958, "loss": 3.7295, "step": 6785 }, { "epoch": 0.46133985595869004, "grad_norm": 1.4005464315414429, "learning_rate": 0.000942383476015763, "loss": 3.6054, "step": 6790 }, { "epoch": 0.46167957602935183, "grad_norm": 1.4013137817382812, "learning_rate": 0.0009423410110069303, "loss": 3.4997, "step": 6795 }, { "epoch": 0.4620192961000136, "grad_norm": 1.136405348777771, "learning_rate": 0.0009422985459980976, "loss": 3.7525, "step": 6800 }, { "epoch": 0.4623590161706754, "grad_norm": 1.3474076986312866, "learning_rate": 0.0009422560809892648, "loss": 3.6099, "step": 6805 }, { "epoch": 0.4626987362413371, "grad_norm": 1.5540437698364258, "learning_rate": 0.000942213615980432, "loss": 3.5484, "step": 6810 }, { "epoch": 0.4630384563119989, "grad_norm": 1.0455472469329834, "learning_rate": 0.0009421711509715995, "loss": 3.4614, "step": 6815 }, { "epoch": 0.4633781763826607, "grad_norm": 1.30460786819458, "learning_rate": 0.0009421286859627667, "loss": 3.9921, "step": 6820 }, { "epoch": 0.46371789645332245, "grad_norm": 1.1884065866470337, "learning_rate": 0.0009420862209539339, "loss": 3.7832, "step": 6825 }, { "epoch": 0.46405761652398425, "grad_norm": 1.2336610555648804, "learning_rate": 0.0009420437559451013, "loss": 3.6212, "step": 6830 }, { "epoch": 0.464397336594646, "grad_norm": 1.2572101354599, "learning_rate": 0.0009420012909362685, "loss": 3.5509, "step": 6835 }, { "epoch": 0.4647370566653078, "grad_norm": 0.9942195415496826, "learning_rate": 0.0009419588259274357, "loss": 3.8374, "step": 6840 }, { "epoch": 0.4650767767359696, "grad_norm": 16.05023765563965, "learning_rate": 0.0009419163609186032, "loss": 3.6243, "step": 6845 }, { "epoch": 0.46541649680663133, "grad_norm": 1.235830307006836, "learning_rate": 0.0009418738959097704, "loss": 3.8152, "step": 6850 }, { "epoch": 0.4657562168772931, "grad_norm": 1.1541485786437988, "learning_rate": 0.0009418314309009377, "loss": 3.7282, "step": 6855 }, { "epoch": 0.46609593694795487, "grad_norm": 1.1021071672439575, "learning_rate": 0.000941788965892105, "loss": 3.769, "step": 6860 }, { "epoch": 0.46643565701861667, "grad_norm": 1.662878155708313, "learning_rate": 0.0009417465008832722, "loss": 3.5555, "step": 6865 }, { "epoch": 0.4667753770892784, "grad_norm": 1.1599427461624146, "learning_rate": 0.0009417040358744395, "loss": 3.5826, "step": 6870 }, { "epoch": 0.4671150971599402, "grad_norm": 1.2510055303573608, "learning_rate": 0.0009416615708656067, "loss": 3.3914, "step": 6875 }, { "epoch": 0.467454817230602, "grad_norm": 1.2327708005905151, "learning_rate": 0.0009416191058567741, "loss": 3.4065, "step": 6880 }, { "epoch": 0.46779453730126375, "grad_norm": 1.1488326787948608, "learning_rate": 0.0009415766408479414, "loss": 3.6624, "step": 6885 }, { "epoch": 0.46813425737192554, "grad_norm": 0.9506558775901794, "learning_rate": 0.0009415341758391086, "loss": 3.7784, "step": 6890 }, { "epoch": 0.4684739774425873, "grad_norm": 1.1363532543182373, "learning_rate": 0.0009414917108302759, "loss": 3.741, "step": 6895 }, { "epoch": 0.4688136975132491, "grad_norm": 1.121234655380249, "learning_rate": 0.0009414492458214432, "loss": 3.8673, "step": 6900 }, { "epoch": 0.4691534175839109, "grad_norm": 1.2178614139556885, "learning_rate": 0.0009414067808126104, "loss": 3.8568, "step": 6905 }, { "epoch": 0.4694931376545726, "grad_norm": 1.2187033891677856, "learning_rate": 0.0009413643158037776, "loss": 3.742, "step": 6910 }, { "epoch": 0.4698328577252344, "grad_norm": 1.2367980480194092, "learning_rate": 0.0009413218507949451, "loss": 3.7404, "step": 6915 }, { "epoch": 0.47017257779589616, "grad_norm": 1.153576135635376, "learning_rate": 0.0009412793857861123, "loss": 3.5208, "step": 6920 }, { "epoch": 0.47051229786655796, "grad_norm": 0.9599360823631287, "learning_rate": 0.0009412369207772795, "loss": 3.6421, "step": 6925 }, { "epoch": 0.47085201793721976, "grad_norm": 2.719930648803711, "learning_rate": 0.0009411944557684469, "loss": 3.7323, "step": 6930 }, { "epoch": 0.4711917380078815, "grad_norm": 1.2099874019622803, "learning_rate": 0.0009411519907596141, "loss": 3.6461, "step": 6935 }, { "epoch": 0.4715314580785433, "grad_norm": 1.4781163930892944, "learning_rate": 0.0009411095257507813, "loss": 3.6668, "step": 6940 }, { "epoch": 0.47187117814920504, "grad_norm": 1.2328463792800903, "learning_rate": 0.0009410670607419487, "loss": 3.9484, "step": 6945 }, { "epoch": 0.47221089821986684, "grad_norm": 1.0890402793884277, "learning_rate": 0.000941024595733116, "loss": 3.6228, "step": 6950 }, { "epoch": 0.4725506182905286, "grad_norm": 1.2135679721832275, "learning_rate": 0.0009409821307242832, "loss": 3.9311, "step": 6955 }, { "epoch": 0.4728903383611904, "grad_norm": 1.5227183103561401, "learning_rate": 0.0009409396657154506, "loss": 3.8795, "step": 6960 }, { "epoch": 0.4732300584318522, "grad_norm": 1.5254743099212646, "learning_rate": 0.0009408972007066178, "loss": 3.7746, "step": 6965 }, { "epoch": 0.4735697785025139, "grad_norm": 0.9065627455711365, "learning_rate": 0.000940854735697785, "loss": 3.5008, "step": 6970 }, { "epoch": 0.4739094985731757, "grad_norm": 1.059850811958313, "learning_rate": 0.0009408122706889523, "loss": 3.7182, "step": 6975 }, { "epoch": 0.47424921864383746, "grad_norm": 1.0083694458007812, "learning_rate": 0.0009407698056801196, "loss": 3.8813, "step": 6980 }, { "epoch": 0.47458893871449925, "grad_norm": 1.0603982210159302, "learning_rate": 0.0009407273406712869, "loss": 3.6809, "step": 6985 }, { "epoch": 0.47492865878516105, "grad_norm": 1.185856580734253, "learning_rate": 0.0009406848756624542, "loss": 3.7431, "step": 6990 }, { "epoch": 0.4752683788558228, "grad_norm": 1.0761592388153076, "learning_rate": 0.0009406424106536215, "loss": 3.3726, "step": 6995 }, { "epoch": 0.4756080989264846, "grad_norm": 1.2383710145950317, "learning_rate": 0.0009405999456447887, "loss": 3.2869, "step": 7000 }, { "epoch": 0.47594781899714633, "grad_norm": 1.5200833082199097, "learning_rate": 0.000940557480635956, "loss": 4.0705, "step": 7005 }, { "epoch": 0.47628753906780813, "grad_norm": 1.3238149881362915, "learning_rate": 0.0009405150156271232, "loss": 3.7957, "step": 7010 }, { "epoch": 0.4766272591384699, "grad_norm": 1.1879098415374756, "learning_rate": 0.0009404725506182905, "loss": 3.6929, "step": 7015 }, { "epoch": 0.47696697920913167, "grad_norm": 1.102543592453003, "learning_rate": 0.0009404300856094579, "loss": 3.3201, "step": 7020 }, { "epoch": 0.47730669927979347, "grad_norm": 1.0969135761260986, "learning_rate": 0.0009403876206006251, "loss": 3.719, "step": 7025 }, { "epoch": 0.4776464193504552, "grad_norm": 1.2527323961257935, "learning_rate": 0.0009403451555917924, "loss": 3.79, "step": 7030 }, { "epoch": 0.477986139421117, "grad_norm": 1.0693672895431519, "learning_rate": 0.0009403026905829597, "loss": 3.5069, "step": 7035 }, { "epoch": 0.47832585949177875, "grad_norm": 1.1052601337432861, "learning_rate": 0.0009402602255741269, "loss": 3.5906, "step": 7040 }, { "epoch": 0.47866557956244055, "grad_norm": 1.415627360343933, "learning_rate": 0.0009402177605652941, "loss": 3.7382, "step": 7045 }, { "epoch": 0.47900529963310234, "grad_norm": 1.0481630563735962, "learning_rate": 0.0009401752955564615, "loss": 3.6782, "step": 7050 }, { "epoch": 0.4793450197037641, "grad_norm": 1.1521446704864502, "learning_rate": 0.0009401328305476288, "loss": 3.8552, "step": 7055 }, { "epoch": 0.4796847397744259, "grad_norm": 1.0002691745758057, "learning_rate": 0.000940090365538796, "loss": 3.7927, "step": 7060 }, { "epoch": 0.4800244598450876, "grad_norm": 1.0738654136657715, "learning_rate": 0.0009400479005299634, "loss": 3.6342, "step": 7065 }, { "epoch": 0.4803641799157494, "grad_norm": 1.3510044813156128, "learning_rate": 0.0009400054355211306, "loss": 3.459, "step": 7070 }, { "epoch": 0.4807038999864112, "grad_norm": 0.8757941722869873, "learning_rate": 0.0009399629705122978, "loss": 3.7937, "step": 7075 }, { "epoch": 0.48104362005707296, "grad_norm": 0.9115470051765442, "learning_rate": 0.0009399205055034652, "loss": 3.8358, "step": 7080 }, { "epoch": 0.48138334012773476, "grad_norm": 1.2008947134017944, "learning_rate": 0.0009398780404946324, "loss": 3.8685, "step": 7085 }, { "epoch": 0.4817230601983965, "grad_norm": 1.2236970663070679, "learning_rate": 0.0009398355754857997, "loss": 3.7091, "step": 7090 }, { "epoch": 0.4820627802690583, "grad_norm": 1.089529037475586, "learning_rate": 0.0009397931104769671, "loss": 3.7862, "step": 7095 }, { "epoch": 0.4824025003397201, "grad_norm": 0.863137423992157, "learning_rate": 0.0009397506454681343, "loss": 3.6583, "step": 7100 }, { "epoch": 0.48274222041038184, "grad_norm": 3.1047635078430176, "learning_rate": 0.0009397081804593015, "loss": 3.7125, "step": 7105 }, { "epoch": 0.48308194048104364, "grad_norm": 1.18228280544281, "learning_rate": 0.0009396657154504688, "loss": 3.8608, "step": 7110 }, { "epoch": 0.4834216605517054, "grad_norm": 1.1549732685089111, "learning_rate": 0.0009396232504416361, "loss": 3.5973, "step": 7115 }, { "epoch": 0.4837613806223672, "grad_norm": 1.0201400518417358, "learning_rate": 0.0009395807854328033, "loss": 3.6623, "step": 7120 }, { "epoch": 0.4841011006930289, "grad_norm": 1.1722661256790161, "learning_rate": 0.0009395383204239707, "loss": 3.6693, "step": 7125 }, { "epoch": 0.4844408207636907, "grad_norm": 1.2059043645858765, "learning_rate": 0.000939495855415138, "loss": 3.6402, "step": 7130 }, { "epoch": 0.4847805408343525, "grad_norm": 1.4328112602233887, "learning_rate": 0.0009394533904063052, "loss": 3.5933, "step": 7135 }, { "epoch": 0.48512026090501426, "grad_norm": 1.1484456062316895, "learning_rate": 0.0009394109253974725, "loss": 3.7593, "step": 7140 }, { "epoch": 0.48545998097567605, "grad_norm": 1.3974989652633667, "learning_rate": 0.0009393684603886398, "loss": 3.8078, "step": 7145 }, { "epoch": 0.4857997010463378, "grad_norm": 1.0863779783248901, "learning_rate": 0.000939325995379807, "loss": 3.73, "step": 7150 }, { "epoch": 0.4861394211169996, "grad_norm": 1.1503394842147827, "learning_rate": 0.0009392835303709743, "loss": 3.7966, "step": 7155 }, { "epoch": 0.4864791411876614, "grad_norm": 1.392094612121582, "learning_rate": 0.0009392410653621416, "loss": 3.7414, "step": 7160 }, { "epoch": 0.48681886125832313, "grad_norm": 1.1935057640075684, "learning_rate": 0.0009391986003533089, "loss": 3.6289, "step": 7165 }, { "epoch": 0.48715858132898493, "grad_norm": 1.2388349771499634, "learning_rate": 0.0009391561353444762, "loss": 3.3398, "step": 7170 }, { "epoch": 0.48749830139964667, "grad_norm": 1.2457733154296875, "learning_rate": 0.0009391136703356434, "loss": 3.7675, "step": 7175 }, { "epoch": 0.48783802147030847, "grad_norm": 1.0429373979568481, "learning_rate": 0.0009390712053268107, "loss": 3.7443, "step": 7180 }, { "epoch": 0.48817774154097027, "grad_norm": 1.1760070323944092, "learning_rate": 0.000939028740317978, "loss": 3.5977, "step": 7185 }, { "epoch": 0.488517461611632, "grad_norm": 0.9982202053070068, "learning_rate": 0.0009389862753091452, "loss": 3.7664, "step": 7190 }, { "epoch": 0.4888571816822938, "grad_norm": 1.2277858257293701, "learning_rate": 0.0009389438103003127, "loss": 3.8451, "step": 7195 }, { "epoch": 0.48919690175295555, "grad_norm": 1.2304216623306274, "learning_rate": 0.0009389013452914799, "loss": 3.5623, "step": 7200 }, { "epoch": 0.48953662182361735, "grad_norm": 1.2191616296768188, "learning_rate": 0.0009388588802826471, "loss": 3.6252, "step": 7205 }, { "epoch": 0.4898763418942791, "grad_norm": 1.0619202852249146, "learning_rate": 0.0009388164152738144, "loss": 3.9306, "step": 7210 }, { "epoch": 0.4902160619649409, "grad_norm": 0.9791811108589172, "learning_rate": 0.0009387739502649817, "loss": 3.7367, "step": 7215 }, { "epoch": 0.4905557820356027, "grad_norm": 1.1820954084396362, "learning_rate": 0.0009387314852561489, "loss": 3.6504, "step": 7220 }, { "epoch": 0.4908955021062644, "grad_norm": 1.1978389024734497, "learning_rate": 0.0009386890202473162, "loss": 3.8733, "step": 7225 }, { "epoch": 0.4912352221769262, "grad_norm": 1.229096531867981, "learning_rate": 0.0009386465552384836, "loss": 3.4617, "step": 7230 }, { "epoch": 0.49157494224758796, "grad_norm": 1.167171835899353, "learning_rate": 0.0009386040902296508, "loss": 3.7386, "step": 7235 }, { "epoch": 0.49191466231824976, "grad_norm": 1.314456582069397, "learning_rate": 0.0009385616252208181, "loss": 3.7235, "step": 7240 }, { "epoch": 0.49225438238891156, "grad_norm": 1.1623713970184326, "learning_rate": 0.0009385191602119854, "loss": 3.8235, "step": 7245 }, { "epoch": 0.4925941024595733, "grad_norm": 1.1598591804504395, "learning_rate": 0.0009384766952031526, "loss": 3.8547, "step": 7250 }, { "epoch": 0.4929338225302351, "grad_norm": 0.9379782676696777, "learning_rate": 0.0009384342301943199, "loss": 3.3559, "step": 7255 }, { "epoch": 0.49327354260089684, "grad_norm": 1.2309314012527466, "learning_rate": 0.0009383917651854871, "loss": 3.8114, "step": 7260 }, { "epoch": 0.49361326267155864, "grad_norm": 0.9785090088844299, "learning_rate": 0.0009383493001766545, "loss": 3.6674, "step": 7265 }, { "epoch": 0.49395298274222044, "grad_norm": 1.1109025478363037, "learning_rate": 0.0009383068351678218, "loss": 3.667, "step": 7270 }, { "epoch": 0.4942927028128822, "grad_norm": 1.2513909339904785, "learning_rate": 0.000938264370158989, "loss": 3.6104, "step": 7275 }, { "epoch": 0.494632422883544, "grad_norm": 1.2467678785324097, "learning_rate": 0.0009382219051501563, "loss": 3.7344, "step": 7280 }, { "epoch": 0.4949721429542057, "grad_norm": 0.9834097623825073, "learning_rate": 0.0009381794401413236, "loss": 3.9261, "step": 7285 }, { "epoch": 0.4953118630248675, "grad_norm": 1.1549733877182007, "learning_rate": 0.0009381369751324908, "loss": 3.478, "step": 7290 }, { "epoch": 0.49565158309552926, "grad_norm": 1.2903131246566772, "learning_rate": 0.000938094510123658, "loss": 3.6878, "step": 7295 }, { "epoch": 0.49599130316619106, "grad_norm": 0.9437738060951233, "learning_rate": 0.0009380520451148255, "loss": 3.7336, "step": 7300 }, { "epoch": 0.49633102323685285, "grad_norm": 1.10988187789917, "learning_rate": 0.0009380095801059927, "loss": 3.5978, "step": 7305 }, { "epoch": 0.4966707433075146, "grad_norm": 1.1523690223693848, "learning_rate": 0.0009379671150971599, "loss": 3.6848, "step": 7310 }, { "epoch": 0.4970104633781764, "grad_norm": 1.810145616531372, "learning_rate": 0.0009379246500883273, "loss": 3.5224, "step": 7315 }, { "epoch": 0.49735018344883813, "grad_norm": 1.6198829412460327, "learning_rate": 0.0009378821850794945, "loss": 3.9063, "step": 7320 }, { "epoch": 0.49768990351949993, "grad_norm": 1.3214085102081299, "learning_rate": 0.0009378397200706617, "loss": 3.7299, "step": 7325 }, { "epoch": 0.49802962359016173, "grad_norm": 0.8633680939674377, "learning_rate": 0.0009377972550618292, "loss": 3.5728, "step": 7330 }, { "epoch": 0.49836934366082347, "grad_norm": 0.9961379170417786, "learning_rate": 0.0009377547900529964, "loss": 3.8111, "step": 7335 }, { "epoch": 0.49870906373148527, "grad_norm": 0.9436635971069336, "learning_rate": 0.0009377123250441636, "loss": 3.8661, "step": 7340 }, { "epoch": 0.499048783802147, "grad_norm": 1.5019248723983765, "learning_rate": 0.000937669860035331, "loss": 3.6957, "step": 7345 }, { "epoch": 0.4993885038728088, "grad_norm": 1.0294833183288574, "learning_rate": 0.0009376273950264982, "loss": 3.373, "step": 7350 }, { "epoch": 0.4997282239434706, "grad_norm": 1.078193187713623, "learning_rate": 0.0009375849300176654, "loss": 3.6606, "step": 7355 }, { "epoch": 0.5000679440141323, "grad_norm": 1.171591877937317, "learning_rate": 0.0009375424650088327, "loss": 3.7968, "step": 7360 }, { "epoch": 0.5004076640847941, "grad_norm": 1.0641673803329468, "learning_rate": 0.0009375, "loss": 3.7373, "step": 7365 }, { "epoch": 0.5007473841554559, "grad_norm": 1.2803821563720703, "learning_rate": 0.0009374575349911673, "loss": 3.944, "step": 7370 }, { "epoch": 0.5010871042261177, "grad_norm": 1.2533233165740967, "learning_rate": 0.0009374150699823346, "loss": 3.773, "step": 7375 }, { "epoch": 0.5014268242967794, "grad_norm": 1.034691333770752, "learning_rate": 0.0009373726049735019, "loss": 4.0131, "step": 7380 }, { "epoch": 0.5017665443674413, "grad_norm": 1.0015690326690674, "learning_rate": 0.0009373301399646691, "loss": 3.9511, "step": 7385 }, { "epoch": 0.502106264438103, "grad_norm": 0.9758409857749939, "learning_rate": 0.0009372876749558364, "loss": 3.4367, "step": 7390 }, { "epoch": 0.5024459845087648, "grad_norm": 1.3273394107818604, "learning_rate": 0.0009372452099470036, "loss": 3.6861, "step": 7395 }, { "epoch": 0.5027857045794265, "grad_norm": 1.2701365947723389, "learning_rate": 0.0009372027449381709, "loss": 3.7772, "step": 7400 }, { "epoch": 0.5031254246500884, "grad_norm": 1.5237646102905273, "learning_rate": 0.0009371602799293383, "loss": 3.5501, "step": 7405 }, { "epoch": 0.5034651447207501, "grad_norm": 1.2512867450714111, "learning_rate": 0.0009371178149205055, "loss": 3.6774, "step": 7410 }, { "epoch": 0.5038048647914118, "grad_norm": 0.9934244155883789, "learning_rate": 0.0009370753499116728, "loss": 3.6873, "step": 7415 }, { "epoch": 0.5041445848620737, "grad_norm": 1.3568506240844727, "learning_rate": 0.0009370328849028401, "loss": 3.8585, "step": 7420 }, { "epoch": 0.5044843049327354, "grad_norm": 1.2144678831100464, "learning_rate": 0.0009369904198940073, "loss": 3.5407, "step": 7425 }, { "epoch": 0.5048240250033972, "grad_norm": 1.1186283826828003, "learning_rate": 0.0009369479548851746, "loss": 3.756, "step": 7430 }, { "epoch": 0.5051637450740589, "grad_norm": 0.8792811036109924, "learning_rate": 0.000936905489876342, "loss": 3.7721, "step": 7435 }, { "epoch": 0.5055034651447208, "grad_norm": 0.9764002561569214, "learning_rate": 0.0009368630248675092, "loss": 3.6131, "step": 7440 }, { "epoch": 0.5058431852153825, "grad_norm": 0.8620773553848267, "learning_rate": 0.0009368205598586764, "loss": 3.6113, "step": 7445 }, { "epoch": 0.5061829052860443, "grad_norm": 1.0090112686157227, "learning_rate": 0.0009367780948498438, "loss": 3.6155, "step": 7450 }, { "epoch": 0.5065226253567061, "grad_norm": 1.2821530103683472, "learning_rate": 0.000936735629841011, "loss": 3.5352, "step": 7455 }, { "epoch": 0.5068623454273679, "grad_norm": 1.2656856775283813, "learning_rate": 0.0009366931648321782, "loss": 3.7862, "step": 7460 }, { "epoch": 0.5072020654980296, "grad_norm": 1.321777105331421, "learning_rate": 0.0009366506998233456, "loss": 3.8369, "step": 7465 }, { "epoch": 0.5075417855686915, "grad_norm": 1.3134765625, "learning_rate": 0.0009366082348145129, "loss": 3.4397, "step": 7470 }, { "epoch": 0.5078815056393532, "grad_norm": 0.9310628771781921, "learning_rate": 0.0009365657698056801, "loss": 3.9897, "step": 7475 }, { "epoch": 0.5082212257100149, "grad_norm": 0.9907513856887817, "learning_rate": 0.0009365233047968475, "loss": 3.4125, "step": 7480 }, { "epoch": 0.5085609457806767, "grad_norm": 1.192732572555542, "learning_rate": 0.0009364808397880147, "loss": 4.0256, "step": 7485 }, { "epoch": 0.5089006658513385, "grad_norm": 1.8713558912277222, "learning_rate": 0.0009364383747791819, "loss": 3.4769, "step": 7490 }, { "epoch": 0.5092403859220003, "grad_norm": 1.1946039199829102, "learning_rate": 0.0009363959097703492, "loss": 3.7971, "step": 7495 }, { "epoch": 0.509580105992662, "grad_norm": 1.0961804389953613, "learning_rate": 0.0009363534447615165, "loss": 3.779, "step": 7500 }, { "epoch": 0.5099198260633239, "grad_norm": 0.9258148670196533, "learning_rate": 0.0009363109797526838, "loss": 3.773, "step": 7505 }, { "epoch": 0.5102595461339856, "grad_norm": 1.0682048797607422, "learning_rate": 0.0009362685147438511, "loss": 3.8983, "step": 7510 }, { "epoch": 0.5105992662046474, "grad_norm": 1.5872859954833984, "learning_rate": 0.0009362260497350184, "loss": 3.7427, "step": 7515 }, { "epoch": 0.5109389862753091, "grad_norm": 1.3132884502410889, "learning_rate": 0.0009361835847261856, "loss": 3.812, "step": 7520 }, { "epoch": 0.511278706345971, "grad_norm": 1.3130509853363037, "learning_rate": 0.0009361411197173529, "loss": 3.8053, "step": 7525 }, { "epoch": 0.5116184264166327, "grad_norm": 1.4017024040222168, "learning_rate": 0.0009360986547085202, "loss": 3.7793, "step": 7530 }, { "epoch": 0.5119581464872944, "grad_norm": 1.9575494527816772, "learning_rate": 0.0009360561896996874, "loss": 3.7338, "step": 7535 }, { "epoch": 0.5122978665579563, "grad_norm": 0.8824830651283264, "learning_rate": 0.0009360137246908548, "loss": 3.8274, "step": 7540 }, { "epoch": 0.512637586628618, "grad_norm": 1.2025723457336426, "learning_rate": 0.000935971259682022, "loss": 3.4578, "step": 7545 }, { "epoch": 0.5129773066992798, "grad_norm": 1.0491780042648315, "learning_rate": 0.0009359287946731894, "loss": 3.6035, "step": 7550 }, { "epoch": 0.5133170267699416, "grad_norm": 1.0104501247406006, "learning_rate": 0.0009358863296643566, "loss": 3.8515, "step": 7555 }, { "epoch": 0.5136567468406034, "grad_norm": 1.1402860879898071, "learning_rate": 0.0009358438646555238, "loss": 3.8447, "step": 7560 }, { "epoch": 0.5139964669112651, "grad_norm": 1.202095627784729, "learning_rate": 0.0009358013996466912, "loss": 3.8014, "step": 7565 }, { "epoch": 0.5143361869819268, "grad_norm": 1.03019380569458, "learning_rate": 0.0009357589346378584, "loss": 3.7893, "step": 7570 }, { "epoch": 0.5146759070525887, "grad_norm": 1.3638306856155396, "learning_rate": 0.0009357164696290257, "loss": 3.575, "step": 7575 }, { "epoch": 0.5150156271232504, "grad_norm": 1.6329610347747803, "learning_rate": 0.0009356740046201931, "loss": 3.7804, "step": 7580 }, { "epoch": 0.5153553471939122, "grad_norm": 1.2659451961517334, "learning_rate": 0.0009356315396113603, "loss": 3.7963, "step": 7585 }, { "epoch": 0.515695067264574, "grad_norm": 1.1022026538848877, "learning_rate": 0.0009355890746025275, "loss": 3.6365, "step": 7590 }, { "epoch": 0.5160347873352358, "grad_norm": 1.3284788131713867, "learning_rate": 0.0009355466095936948, "loss": 3.6495, "step": 7595 }, { "epoch": 0.5163745074058975, "grad_norm": 1.0188695192337036, "learning_rate": 0.0009355041445848621, "loss": 3.77, "step": 7600 }, { "epoch": 0.5167142274765593, "grad_norm": 1.0540428161621094, "learning_rate": 0.0009354616795760293, "loss": 3.6102, "step": 7605 }, { "epoch": 0.5170539475472211, "grad_norm": 1.45895254611969, "learning_rate": 0.0009354192145671967, "loss": 3.6447, "step": 7610 }, { "epoch": 0.5173936676178829, "grad_norm": 1.8507425785064697, "learning_rate": 0.000935376749558364, "loss": 3.4138, "step": 7615 }, { "epoch": 0.5177333876885446, "grad_norm": 6.116234302520752, "learning_rate": 0.0009353342845495312, "loss": 3.8231, "step": 7620 }, { "epoch": 0.5180731077592065, "grad_norm": 1.008333444595337, "learning_rate": 0.0009352918195406985, "loss": 3.8507, "step": 7625 }, { "epoch": 0.5184128278298682, "grad_norm": 1.2379567623138428, "learning_rate": 0.0009352493545318658, "loss": 3.5083, "step": 7630 }, { "epoch": 0.5187525479005299, "grad_norm": 1.4677168130874634, "learning_rate": 0.000935206889523033, "loss": 3.8183, "step": 7635 }, { "epoch": 0.5190922679711918, "grad_norm": 1.395475149154663, "learning_rate": 0.0009351644245142003, "loss": 3.8871, "step": 7640 }, { "epoch": 0.5194319880418535, "grad_norm": 1.5383607149124146, "learning_rate": 0.0009351219595053676, "loss": 3.6559, "step": 7645 }, { "epoch": 0.5197717081125153, "grad_norm": 1.5595837831497192, "learning_rate": 0.0009350794944965349, "loss": 3.7209, "step": 7650 }, { "epoch": 0.520111428183177, "grad_norm": 1.2814797163009644, "learning_rate": 0.0009350370294877022, "loss": 3.535, "step": 7655 }, { "epoch": 0.5204511482538389, "grad_norm": 1.0895686149597168, "learning_rate": 0.0009349945644788694, "loss": 3.5664, "step": 7660 }, { "epoch": 0.5207908683245006, "grad_norm": 1.2648905515670776, "learning_rate": 0.0009349520994700367, "loss": 3.4602, "step": 7665 }, { "epoch": 0.5211305883951624, "grad_norm": 1.0873327255249023, "learning_rate": 0.000934909634461204, "loss": 3.6725, "step": 7670 }, { "epoch": 0.5214703084658242, "grad_norm": 1.0951557159423828, "learning_rate": 0.0009348671694523712, "loss": 3.5352, "step": 7675 }, { "epoch": 0.521810028536486, "grad_norm": 0.9958415031433105, "learning_rate": 0.0009348247044435386, "loss": 3.7544, "step": 7680 }, { "epoch": 0.5221497486071477, "grad_norm": 1.670166015625, "learning_rate": 0.0009347822394347059, "loss": 3.6565, "step": 7685 }, { "epoch": 0.5224894686778094, "grad_norm": 3.0542385578155518, "learning_rate": 0.0009347397744258731, "loss": 3.8871, "step": 7690 }, { "epoch": 0.5228291887484713, "grad_norm": 1.1545768976211548, "learning_rate": 0.0009346973094170403, "loss": 3.4541, "step": 7695 }, { "epoch": 0.523168908819133, "grad_norm": 0.9881004691123962, "learning_rate": 0.0009346548444082077, "loss": 3.688, "step": 7700 }, { "epoch": 0.5235086288897948, "grad_norm": 0.9992630481719971, "learning_rate": 0.0009346123793993749, "loss": 3.9525, "step": 7705 }, { "epoch": 0.5238483489604566, "grad_norm": 1.113618016242981, "learning_rate": 0.0009345699143905421, "loss": 3.7002, "step": 7710 }, { "epoch": 0.5241880690311184, "grad_norm": 1.3320246934890747, "learning_rate": 0.0009345274493817096, "loss": 3.7377, "step": 7715 }, { "epoch": 0.5245277891017801, "grad_norm": 1.509116768836975, "learning_rate": 0.0009344849843728768, "loss": 3.6709, "step": 7720 }, { "epoch": 0.524867509172442, "grad_norm": 1.0284534692764282, "learning_rate": 0.000934442519364044, "loss": 3.7172, "step": 7725 }, { "epoch": 0.5252072292431037, "grad_norm": 1.2342617511749268, "learning_rate": 0.0009344000543552114, "loss": 3.6105, "step": 7730 }, { "epoch": 0.5255469493137654, "grad_norm": 1.2496405839920044, "learning_rate": 0.0009343575893463786, "loss": 3.5958, "step": 7735 }, { "epoch": 0.5258866693844272, "grad_norm": 1.2541583776474, "learning_rate": 0.0009343151243375458, "loss": 3.7191, "step": 7740 }, { "epoch": 0.526226389455089, "grad_norm": 1.0107487440109253, "learning_rate": 0.0009342726593287131, "loss": 3.641, "step": 7745 }, { "epoch": 0.5265661095257508, "grad_norm": 1.280989408493042, "learning_rate": 0.0009342301943198805, "loss": 3.9554, "step": 7750 }, { "epoch": 0.5269058295964125, "grad_norm": 1.2286425828933716, "learning_rate": 0.0009341877293110477, "loss": 3.8715, "step": 7755 }, { "epoch": 0.5272455496670744, "grad_norm": 1.8868203163146973, "learning_rate": 0.000934145264302215, "loss": 3.1447, "step": 7760 }, { "epoch": 0.5275852697377361, "grad_norm": 1.1919766664505005, "learning_rate": 0.0009341027992933823, "loss": 3.7771, "step": 7765 }, { "epoch": 0.5279249898083979, "grad_norm": 1.460443377494812, "learning_rate": 0.0009340603342845495, "loss": 3.7159, "step": 7770 }, { "epoch": 0.5282647098790596, "grad_norm": 3.972892999649048, "learning_rate": 0.0009340178692757168, "loss": 3.9516, "step": 7775 }, { "epoch": 0.5286044299497215, "grad_norm": 1.4918595552444458, "learning_rate": 0.000933975404266884, "loss": 3.3514, "step": 7780 }, { "epoch": 0.5289441500203832, "grad_norm": 1.0850756168365479, "learning_rate": 0.0009339329392580514, "loss": 3.8969, "step": 7785 }, { "epoch": 0.5292838700910449, "grad_norm": 1.0735626220703125, "learning_rate": 0.0009338904742492187, "loss": 3.8229, "step": 7790 }, { "epoch": 0.5296235901617068, "grad_norm": 1.221892237663269, "learning_rate": 0.0009338480092403859, "loss": 3.8966, "step": 7795 }, { "epoch": 0.5299633102323685, "grad_norm": 1.1236017942428589, "learning_rate": 0.0009338055442315532, "loss": 3.5172, "step": 7800 }, { "epoch": 0.5303030303030303, "grad_norm": 1.0543568134307861, "learning_rate": 0.0009337630792227205, "loss": 3.5511, "step": 7805 }, { "epoch": 0.5306427503736921, "grad_norm": 1.4324862957000732, "learning_rate": 0.0009337206142138877, "loss": 3.6518, "step": 7810 }, { "epoch": 0.5309824704443539, "grad_norm": 1.2227822542190552, "learning_rate": 0.000933678149205055, "loss": 3.6506, "step": 7815 }, { "epoch": 0.5313221905150156, "grad_norm": 1.026319146156311, "learning_rate": 0.0009336356841962224, "loss": 3.6612, "step": 7820 }, { "epoch": 0.5316619105856774, "grad_norm": 1.086834192276001, "learning_rate": 0.0009335932191873896, "loss": 3.7798, "step": 7825 }, { "epoch": 0.5320016306563392, "grad_norm": 1.1934934854507446, "learning_rate": 0.0009335507541785568, "loss": 3.6533, "step": 7830 }, { "epoch": 0.532341350727001, "grad_norm": 1.1866930723190308, "learning_rate": 0.0009335082891697242, "loss": 3.7766, "step": 7835 }, { "epoch": 0.5326810707976627, "grad_norm": 1.5652979612350464, "learning_rate": 0.0009334658241608914, "loss": 3.9816, "step": 7840 }, { "epoch": 0.5330207908683245, "grad_norm": 1.22066068649292, "learning_rate": 0.0009334233591520586, "loss": 3.6651, "step": 7845 }, { "epoch": 0.5333605109389863, "grad_norm": 0.9763436317443848, "learning_rate": 0.000933380894143226, "loss": 3.7591, "step": 7850 }, { "epoch": 0.533700231009648, "grad_norm": 0.8988045454025269, "learning_rate": 0.0009333384291343933, "loss": 3.847, "step": 7855 }, { "epoch": 0.5340399510803098, "grad_norm": 0.968604326248169, "learning_rate": 0.0009332959641255605, "loss": 3.7062, "step": 7860 }, { "epoch": 0.5343796711509716, "grad_norm": 1.5570576190948486, "learning_rate": 0.0009332534991167279, "loss": 3.6847, "step": 7865 }, { "epoch": 0.5347193912216334, "grad_norm": 1.6227978467941284, "learning_rate": 0.0009332110341078951, "loss": 3.7869, "step": 7870 }, { "epoch": 0.5350591112922951, "grad_norm": 1.0815150737762451, "learning_rate": 0.0009331685690990623, "loss": 3.6967, "step": 7875 }, { "epoch": 0.535398831362957, "grad_norm": 1.631769061088562, "learning_rate": 0.0009331261040902297, "loss": 3.77, "step": 7880 }, { "epoch": 0.5357385514336187, "grad_norm": 1.3114298582077026, "learning_rate": 0.0009330836390813969, "loss": 3.6807, "step": 7885 }, { "epoch": 0.5360782715042804, "grad_norm": 1.006790041923523, "learning_rate": 0.0009330411740725643, "loss": 3.9304, "step": 7890 }, { "epoch": 0.5364179915749423, "grad_norm": 1.5147154331207275, "learning_rate": 0.0009329987090637315, "loss": 3.4588, "step": 7895 }, { "epoch": 0.536757711645604, "grad_norm": 1.091998815536499, "learning_rate": 0.0009329562440548988, "loss": 3.4594, "step": 7900 }, { "epoch": 0.5370974317162658, "grad_norm": 1.1596065759658813, "learning_rate": 0.0009329137790460661, "loss": 3.8361, "step": 7905 }, { "epoch": 0.5374371517869275, "grad_norm": 1.0459673404693604, "learning_rate": 0.0009328713140372333, "loss": 3.8111, "step": 7910 }, { "epoch": 0.5377768718575894, "grad_norm": 1.0345773696899414, "learning_rate": 0.0009328288490284006, "loss": 3.8497, "step": 7915 }, { "epoch": 0.5381165919282511, "grad_norm": 1.3941420316696167, "learning_rate": 0.000932786384019568, "loss": 3.6084, "step": 7920 }, { "epoch": 0.5384563119989129, "grad_norm": 1.1645723581314087, "learning_rate": 0.0009327439190107352, "loss": 3.7795, "step": 7925 }, { "epoch": 0.5387960320695747, "grad_norm": 1.067965030670166, "learning_rate": 0.0009327014540019025, "loss": 3.887, "step": 7930 }, { "epoch": 0.5391357521402365, "grad_norm": 0.8955399990081787, "learning_rate": 0.0009326589889930698, "loss": 3.83, "step": 7935 }, { "epoch": 0.5394754722108982, "grad_norm": 1.47920823097229, "learning_rate": 0.000932616523984237, "loss": 3.6246, "step": 7940 }, { "epoch": 0.5398151922815599, "grad_norm": 1.3313688039779663, "learning_rate": 0.0009325740589754042, "loss": 3.7194, "step": 7945 }, { "epoch": 0.5401549123522218, "grad_norm": 1.5197137594223022, "learning_rate": 0.0009325315939665716, "loss": 3.8135, "step": 7950 }, { "epoch": 0.5404946324228835, "grad_norm": 1.1682699918746948, "learning_rate": 0.0009324891289577389, "loss": 3.5323, "step": 7955 }, { "epoch": 0.5408343524935453, "grad_norm": 2.091484308242798, "learning_rate": 0.0009324466639489061, "loss": 3.6887, "step": 7960 }, { "epoch": 0.5411740725642071, "grad_norm": 0.9875277876853943, "learning_rate": 0.0009324041989400735, "loss": 3.6893, "step": 7965 }, { "epoch": 0.5415137926348689, "grad_norm": 1.2725211381912231, "learning_rate": 0.0009323617339312407, "loss": 3.6477, "step": 7970 }, { "epoch": 0.5418535127055306, "grad_norm": 1.039962887763977, "learning_rate": 0.0009323192689224079, "loss": 3.7524, "step": 7975 }, { "epoch": 0.5421932327761925, "grad_norm": 1.0999641418457031, "learning_rate": 0.0009322768039135753, "loss": 3.7054, "step": 7980 }, { "epoch": 0.5425329528468542, "grad_norm": 1.0922375917434692, "learning_rate": 0.0009322343389047425, "loss": 3.6485, "step": 7985 }, { "epoch": 0.542872672917516, "grad_norm": 1.116867184638977, "learning_rate": 0.0009321918738959098, "loss": 3.7209, "step": 7990 }, { "epoch": 0.5432123929881777, "grad_norm": 1.4097073078155518, "learning_rate": 0.0009321494088870771, "loss": 3.7682, "step": 7995 }, { "epoch": 0.5435521130588395, "grad_norm": 1.244330883026123, "learning_rate": 0.0009321069438782444, "loss": 3.8275, "step": 8000 }, { "epoch": 0.5438918331295013, "grad_norm": 1.1648627519607544, "learning_rate": 0.0009320644788694116, "loss": 3.5712, "step": 8005 }, { "epoch": 0.544231553200163, "grad_norm": 1.0686423778533936, "learning_rate": 0.0009320220138605789, "loss": 3.8241, "step": 8010 }, { "epoch": 0.5445712732708249, "grad_norm": 0.9846720099449158, "learning_rate": 0.0009319795488517462, "loss": 3.5954, "step": 8015 }, { "epoch": 0.5449109933414866, "grad_norm": 1.12343430519104, "learning_rate": 0.0009319370838429134, "loss": 3.7051, "step": 8020 }, { "epoch": 0.5452507134121484, "grad_norm": 1.1425752639770508, "learning_rate": 0.0009318946188340808, "loss": 3.8743, "step": 8025 }, { "epoch": 0.5455904334828101, "grad_norm": 1.2572709321975708, "learning_rate": 0.000931852153825248, "loss": 3.6645, "step": 8030 }, { "epoch": 0.545930153553472, "grad_norm": 1.0306676626205444, "learning_rate": 0.0009318096888164153, "loss": 3.6918, "step": 8035 }, { "epoch": 0.5462698736241337, "grad_norm": 1.1529666185379028, "learning_rate": 0.0009317672238075826, "loss": 3.495, "step": 8040 }, { "epoch": 0.5466095936947954, "grad_norm": 1.2375340461730957, "learning_rate": 0.0009317247587987498, "loss": 3.7367, "step": 8045 }, { "epoch": 0.5469493137654573, "grad_norm": 1.2010340690612793, "learning_rate": 0.0009316822937899171, "loss": 3.5405, "step": 8050 }, { "epoch": 0.547289033836119, "grad_norm": 1.026538610458374, "learning_rate": 0.0009316398287810844, "loss": 3.6392, "step": 8055 }, { "epoch": 0.5476287539067808, "grad_norm": 1.2080137729644775, "learning_rate": 0.0009315973637722517, "loss": 3.5755, "step": 8060 }, { "epoch": 0.5479684739774426, "grad_norm": 1.1960567235946655, "learning_rate": 0.000931554898763419, "loss": 3.6242, "step": 8065 }, { "epoch": 0.5483081940481044, "grad_norm": 1.293656826019287, "learning_rate": 0.0009315124337545863, "loss": 3.7436, "step": 8070 }, { "epoch": 0.5486479141187661, "grad_norm": 0.9725195169448853, "learning_rate": 0.0009314699687457535, "loss": 3.7168, "step": 8075 }, { "epoch": 0.5489876341894279, "grad_norm": 1.099827766418457, "learning_rate": 0.0009314275037369207, "loss": 3.7043, "step": 8080 }, { "epoch": 0.5493273542600897, "grad_norm": 1.0836575031280518, "learning_rate": 0.0009313850387280881, "loss": 3.6391, "step": 8085 }, { "epoch": 0.5496670743307515, "grad_norm": 1.3097294569015503, "learning_rate": 0.0009313425737192553, "loss": 3.652, "step": 8090 }, { "epoch": 0.5500067944014132, "grad_norm": 1.2588785886764526, "learning_rate": 0.0009313001087104226, "loss": 3.7158, "step": 8095 }, { "epoch": 0.550346514472075, "grad_norm": 1.3679492473602295, "learning_rate": 0.00093125764370159, "loss": 3.6713, "step": 8100 }, { "epoch": 0.5506862345427368, "grad_norm": 1.3806228637695312, "learning_rate": 0.0009312151786927572, "loss": 3.5146, "step": 8105 }, { "epoch": 0.5510259546133985, "grad_norm": 1.398268699645996, "learning_rate": 0.0009311727136839244, "loss": 3.6859, "step": 8110 }, { "epoch": 0.5513656746840603, "grad_norm": 0.9750669002532959, "learning_rate": 0.0009311302486750918, "loss": 3.6191, "step": 8115 }, { "epoch": 0.5517053947547221, "grad_norm": 1.1202324628829956, "learning_rate": 0.000931087783666259, "loss": 3.643, "step": 8120 }, { "epoch": 0.5520451148253839, "grad_norm": 1.0350539684295654, "learning_rate": 0.0009310453186574262, "loss": 3.8031, "step": 8125 }, { "epoch": 0.5523848348960456, "grad_norm": 1.8114619255065918, "learning_rate": 0.0009310028536485937, "loss": 3.834, "step": 8130 }, { "epoch": 0.5527245549667075, "grad_norm": 1.102944254875183, "learning_rate": 0.0009309603886397609, "loss": 3.5651, "step": 8135 }, { "epoch": 0.5530642750373692, "grad_norm": 1.157863736152649, "learning_rate": 0.0009309179236309281, "loss": 3.702, "step": 8140 }, { "epoch": 0.553403995108031, "grad_norm": 1.166772723197937, "learning_rate": 0.0009308754586220954, "loss": 3.7879, "step": 8145 }, { "epoch": 0.5537437151786928, "grad_norm": 1.4049512147903442, "learning_rate": 0.0009308329936132627, "loss": 3.4706, "step": 8150 }, { "epoch": 0.5540834352493546, "grad_norm": 1.3575913906097412, "learning_rate": 0.0009307905286044299, "loss": 3.6283, "step": 8155 }, { "epoch": 0.5544231553200163, "grad_norm": 1.1802104711532593, "learning_rate": 0.0009307480635955972, "loss": 3.5531, "step": 8160 }, { "epoch": 0.554762875390678, "grad_norm": 1.3314342498779297, "learning_rate": 0.0009307055985867646, "loss": 3.9009, "step": 8165 }, { "epoch": 0.5551025954613399, "grad_norm": 0.8917223811149597, "learning_rate": 0.0009306631335779318, "loss": 4.0814, "step": 8170 }, { "epoch": 0.5554423155320016, "grad_norm": 0.9018621444702148, "learning_rate": 0.0009306206685690991, "loss": 3.7518, "step": 8175 }, { "epoch": 0.5557820356026634, "grad_norm": 0.9631090760231018, "learning_rate": 0.0009305782035602663, "loss": 3.6326, "step": 8180 }, { "epoch": 0.5561217556733252, "grad_norm": 0.967559814453125, "learning_rate": 0.0009305357385514336, "loss": 3.5214, "step": 8185 }, { "epoch": 0.556461475743987, "grad_norm": 1.1838181018829346, "learning_rate": 0.0009304932735426009, "loss": 3.7342, "step": 8190 }, { "epoch": 0.5568011958146487, "grad_norm": 1.1743766069412231, "learning_rate": 0.0009304508085337681, "loss": 3.6625, "step": 8195 }, { "epoch": 0.5571409158853105, "grad_norm": 0.9093513488769531, "learning_rate": 0.0009304083435249355, "loss": 3.7659, "step": 8200 }, { "epoch": 0.5574806359559723, "grad_norm": 1.2958861589431763, "learning_rate": 0.0009303658785161028, "loss": 3.6194, "step": 8205 }, { "epoch": 0.557820356026634, "grad_norm": 1.2411913871765137, "learning_rate": 0.00093032341350727, "loss": 3.727, "step": 8210 }, { "epoch": 0.5581600760972958, "grad_norm": 1.1803476810455322, "learning_rate": 0.0009302809484984373, "loss": 3.6517, "step": 8215 }, { "epoch": 0.5584997961679576, "grad_norm": 0.9271569848060608, "learning_rate": 0.0009302384834896046, "loss": 3.8902, "step": 8220 }, { "epoch": 0.5588395162386194, "grad_norm": 1.2285269498825073, "learning_rate": 0.0009301960184807718, "loss": 3.534, "step": 8225 }, { "epoch": 0.5591792363092811, "grad_norm": 0.9973090291023254, "learning_rate": 0.0009301535534719391, "loss": 3.8, "step": 8230 }, { "epoch": 0.559518956379943, "grad_norm": 1.1633230447769165, "learning_rate": 0.0009301110884631065, "loss": 3.6541, "step": 8235 }, { "epoch": 0.5598586764506047, "grad_norm": 1.0104893445968628, "learning_rate": 0.0009300686234542737, "loss": 3.9559, "step": 8240 }, { "epoch": 0.5601983965212665, "grad_norm": 1.1489261388778687, "learning_rate": 0.000930026158445441, "loss": 3.55, "step": 8245 }, { "epoch": 0.5605381165919282, "grad_norm": 1.3437103033065796, "learning_rate": 0.0009299836934366083, "loss": 3.6505, "step": 8250 }, { "epoch": 0.5608778366625901, "grad_norm": 1.1144694089889526, "learning_rate": 0.0009299412284277755, "loss": 3.4918, "step": 8255 }, { "epoch": 0.5612175567332518, "grad_norm": 1.059277892112732, "learning_rate": 0.0009298987634189428, "loss": 3.6194, "step": 8260 }, { "epoch": 0.5615572768039135, "grad_norm": 0.9629926085472107, "learning_rate": 0.00092985629841011, "loss": 3.6708, "step": 8265 }, { "epoch": 0.5618969968745754, "grad_norm": 1.022129774093628, "learning_rate": 0.0009298138334012774, "loss": 3.6721, "step": 8270 }, { "epoch": 0.5622367169452371, "grad_norm": 1.234680414199829, "learning_rate": 0.0009297713683924447, "loss": 3.3855, "step": 8275 }, { "epoch": 0.5625764370158989, "grad_norm": 1.4301639795303345, "learning_rate": 0.000929728903383612, "loss": 3.7044, "step": 8280 }, { "epoch": 0.5629161570865606, "grad_norm": 1.1178141832351685, "learning_rate": 0.0009296864383747792, "loss": 3.6678, "step": 8285 }, { "epoch": 0.5632558771572225, "grad_norm": 1.0708274841308594, "learning_rate": 0.0009296439733659465, "loss": 3.8712, "step": 8290 }, { "epoch": 0.5635955972278842, "grad_norm": 1.26012122631073, "learning_rate": 0.0009296015083571137, "loss": 3.6576, "step": 8295 }, { "epoch": 0.563935317298546, "grad_norm": 0.9677109718322754, "learning_rate": 0.000929559043348281, "loss": 3.814, "step": 8300 }, { "epoch": 0.5642750373692078, "grad_norm": 1.0435636043548584, "learning_rate": 0.0009295165783394484, "loss": 3.6854, "step": 8305 }, { "epoch": 0.5646147574398696, "grad_norm": 1.0354032516479492, "learning_rate": 0.0009294741133306156, "loss": 3.6868, "step": 8310 }, { "epoch": 0.5649544775105313, "grad_norm": 1.1720938682556152, "learning_rate": 0.0009294316483217829, "loss": 3.4466, "step": 8315 }, { "epoch": 0.5652941975811931, "grad_norm": 1.1629348993301392, "learning_rate": 0.0009293891833129502, "loss": 3.4834, "step": 8320 }, { "epoch": 0.5656339176518549, "grad_norm": 1.2596323490142822, "learning_rate": 0.0009293467183041174, "loss": 3.9514, "step": 8325 }, { "epoch": 0.5659736377225166, "grad_norm": 1.3122614622116089, "learning_rate": 0.0009293042532952846, "loss": 3.7452, "step": 8330 }, { "epoch": 0.5663133577931784, "grad_norm": 1.0982940196990967, "learning_rate": 0.000929261788286452, "loss": 3.8681, "step": 8335 }, { "epoch": 0.5666530778638402, "grad_norm": 1.2627911567687988, "learning_rate": 0.0009292193232776193, "loss": 3.759, "step": 8340 }, { "epoch": 0.566992797934502, "grad_norm": 0.9485002756118774, "learning_rate": 0.0009291768582687865, "loss": 3.6201, "step": 8345 }, { "epoch": 0.5673325180051637, "grad_norm": 2.3530304431915283, "learning_rate": 0.0009291343932599539, "loss": 3.9124, "step": 8350 }, { "epoch": 0.5676722380758256, "grad_norm": 1.3477530479431152, "learning_rate": 0.0009290919282511211, "loss": 3.8632, "step": 8355 }, { "epoch": 0.5680119581464873, "grad_norm": 1.1330379247665405, "learning_rate": 0.0009290494632422883, "loss": 3.5733, "step": 8360 }, { "epoch": 0.568351678217149, "grad_norm": 1.1408746242523193, "learning_rate": 0.0009290069982334557, "loss": 3.6236, "step": 8365 }, { "epoch": 0.5686913982878108, "grad_norm": 1.1382471323013306, "learning_rate": 0.0009289645332246229, "loss": 3.6296, "step": 8370 }, { "epoch": 0.5690311183584726, "grad_norm": 0.9852458238601685, "learning_rate": 0.0009289220682157902, "loss": 3.8973, "step": 8375 }, { "epoch": 0.5693708384291344, "grad_norm": 0.9042965173721313, "learning_rate": 0.0009288796032069575, "loss": 3.8789, "step": 8380 }, { "epoch": 0.5697105584997961, "grad_norm": 1.0471912622451782, "learning_rate": 0.0009288371381981248, "loss": 3.7603, "step": 8385 }, { "epoch": 0.570050278570458, "grad_norm": 1.8228298425674438, "learning_rate": 0.000928794673189292, "loss": 3.6368, "step": 8390 }, { "epoch": 0.5703899986411197, "grad_norm": 1.0305967330932617, "learning_rate": 0.0009287522081804593, "loss": 4.0287, "step": 8395 }, { "epoch": 0.5707297187117815, "grad_norm": 0.9414586424827576, "learning_rate": 0.0009287097431716266, "loss": 3.5737, "step": 8400 }, { "epoch": 0.5710694387824433, "grad_norm": 1.0677427053451538, "learning_rate": 0.0009286672781627938, "loss": 3.6982, "step": 8405 }, { "epoch": 0.5714091588531051, "grad_norm": 1.5023539066314697, "learning_rate": 0.0009286248131539612, "loss": 3.7238, "step": 8410 }, { "epoch": 0.5717488789237668, "grad_norm": 1.2275581359863281, "learning_rate": 0.0009285823481451285, "loss": 3.3216, "step": 8415 }, { "epoch": 0.5720885989944285, "grad_norm": 1.229586124420166, "learning_rate": 0.0009285398831362957, "loss": 3.8021, "step": 8420 }, { "epoch": 0.5724283190650904, "grad_norm": 0.8768678307533264, "learning_rate": 0.000928497418127463, "loss": 3.7423, "step": 8425 }, { "epoch": 0.5727680391357521, "grad_norm": 0.9240059852600098, "learning_rate": 0.0009284549531186302, "loss": 3.7119, "step": 8430 }, { "epoch": 0.5731077592064139, "grad_norm": 1.476675033569336, "learning_rate": 0.0009284124881097975, "loss": 3.4197, "step": 8435 }, { "epoch": 0.5734474792770757, "grad_norm": 1.26360023021698, "learning_rate": 0.0009283700231009648, "loss": 3.6761, "step": 8440 }, { "epoch": 0.5737871993477375, "grad_norm": 1.3524465560913086, "learning_rate": 0.0009283275580921321, "loss": 3.5678, "step": 8445 }, { "epoch": 0.5741269194183992, "grad_norm": 1.1595500707626343, "learning_rate": 0.0009282850930832994, "loss": 3.8003, "step": 8450 }, { "epoch": 0.574466639489061, "grad_norm": 1.118168592453003, "learning_rate": 0.0009282426280744667, "loss": 3.3693, "step": 8455 }, { "epoch": 0.5748063595597228, "grad_norm": 1.1162303686141968, "learning_rate": 0.0009282001630656339, "loss": 3.8755, "step": 8460 }, { "epoch": 0.5751460796303846, "grad_norm": 1.0471099615097046, "learning_rate": 0.0009281576980568011, "loss": 3.7939, "step": 8465 }, { "epoch": 0.5754857997010463, "grad_norm": 1.0803805589675903, "learning_rate": 0.0009281152330479685, "loss": 3.6349, "step": 8470 }, { "epoch": 0.5758255197717081, "grad_norm": 1.1541427373886108, "learning_rate": 0.0009280727680391357, "loss": 3.6057, "step": 8475 }, { "epoch": 0.5761652398423699, "grad_norm": 1.2059776782989502, "learning_rate": 0.000928030303030303, "loss": 3.5588, "step": 8480 }, { "epoch": 0.5765049599130316, "grad_norm": 1.1878701448440552, "learning_rate": 0.0009279878380214704, "loss": 3.7989, "step": 8485 }, { "epoch": 0.5768446799836935, "grad_norm": 1.1629884243011475, "learning_rate": 0.0009279453730126376, "loss": 3.4304, "step": 8490 }, { "epoch": 0.5771844000543552, "grad_norm": 1.3321497440338135, "learning_rate": 0.0009279029080038048, "loss": 3.7686, "step": 8495 }, { "epoch": 0.577524120125017, "grad_norm": 0.971908450126648, "learning_rate": 0.0009278604429949722, "loss": 3.7795, "step": 8500 }, { "epoch": 0.5778638401956787, "grad_norm": 1.2241370677947998, "learning_rate": 0.0009278179779861394, "loss": 3.5876, "step": 8505 }, { "epoch": 0.5782035602663406, "grad_norm": 1.1324244737625122, "learning_rate": 0.0009277755129773066, "loss": 3.6177, "step": 8510 }, { "epoch": 0.5785432803370023, "grad_norm": 1.1787105798721313, "learning_rate": 0.0009277330479684741, "loss": 3.6581, "step": 8515 }, { "epoch": 0.578883000407664, "grad_norm": 1.1214566230773926, "learning_rate": 0.0009276905829596413, "loss": 3.4816, "step": 8520 }, { "epoch": 0.5792227204783259, "grad_norm": 1.2784178256988525, "learning_rate": 0.0009276481179508085, "loss": 3.606, "step": 8525 }, { "epoch": 0.5795624405489876, "grad_norm": 0.8157287240028381, "learning_rate": 0.0009276056529419758, "loss": 3.6141, "step": 8530 }, { "epoch": 0.5799021606196494, "grad_norm": 1.3935292959213257, "learning_rate": 0.0009275631879331431, "loss": 3.5791, "step": 8535 }, { "epoch": 0.5802418806903111, "grad_norm": 1.4013972282409668, "learning_rate": 0.0009275207229243103, "loss": 3.5484, "step": 8540 }, { "epoch": 0.580581600760973, "grad_norm": 1.2946747541427612, "learning_rate": 0.0009274782579154777, "loss": 3.8194, "step": 8545 }, { "epoch": 0.5809213208316347, "grad_norm": 3.3205649852752686, "learning_rate": 0.000927435792906645, "loss": 3.4065, "step": 8550 }, { "epoch": 0.5812610409022965, "grad_norm": 1.1778417825698853, "learning_rate": 0.0009273933278978122, "loss": 3.8113, "step": 8555 }, { "epoch": 0.5816007609729583, "grad_norm": 1.3152393102645874, "learning_rate": 0.0009273508628889795, "loss": 3.7858, "step": 8560 }, { "epoch": 0.5819404810436201, "grad_norm": 1.1092833280563354, "learning_rate": 0.0009273083978801468, "loss": 3.6452, "step": 8565 }, { "epoch": 0.5822802011142818, "grad_norm": 1.3172225952148438, "learning_rate": 0.0009272659328713141, "loss": 3.7124, "step": 8570 }, { "epoch": 0.5826199211849437, "grad_norm": 2.3019824028015137, "learning_rate": 0.0009272234678624813, "loss": 3.7271, "step": 8575 }, { "epoch": 0.5829596412556054, "grad_norm": 1.0870271921157837, "learning_rate": 0.0009271810028536486, "loss": 3.5619, "step": 8580 }, { "epoch": 0.5832993613262671, "grad_norm": 0.9753466844558716, "learning_rate": 0.000927138537844816, "loss": 3.7174, "step": 8585 }, { "epoch": 0.5836390813969289, "grad_norm": 1.2457722425460815, "learning_rate": 0.0009270960728359832, "loss": 3.6789, "step": 8590 }, { "epoch": 0.5839788014675907, "grad_norm": 0.886645495891571, "learning_rate": 0.0009270536078271504, "loss": 3.9689, "step": 8595 }, { "epoch": 0.5843185215382525, "grad_norm": 1.1286975145339966, "learning_rate": 0.0009270111428183178, "loss": 3.5836, "step": 8600 }, { "epoch": 0.5846582416089142, "grad_norm": 1.315155029296875, "learning_rate": 0.000926968677809485, "loss": 3.9965, "step": 8605 }, { "epoch": 0.5849979616795761, "grad_norm": 0.9736418128013611, "learning_rate": 0.0009269262128006522, "loss": 3.568, "step": 8610 }, { "epoch": 0.5853376817502378, "grad_norm": 1.076344609260559, "learning_rate": 0.0009268837477918197, "loss": 3.7521, "step": 8615 }, { "epoch": 0.5856774018208996, "grad_norm": 1.4645717144012451, "learning_rate": 0.0009268412827829869, "loss": 3.5934, "step": 8620 }, { "epoch": 0.5860171218915613, "grad_norm": 1.2411576509475708, "learning_rate": 0.0009267988177741541, "loss": 3.7201, "step": 8625 }, { "epoch": 0.5863568419622232, "grad_norm": 1.0895938873291016, "learning_rate": 0.0009267563527653214, "loss": 3.9199, "step": 8630 }, { "epoch": 0.5866965620328849, "grad_norm": 0.8936330676078796, "learning_rate": 0.0009267138877564887, "loss": 3.7859, "step": 8635 }, { "epoch": 0.5870362821035466, "grad_norm": 1.1498703956604004, "learning_rate": 0.0009266714227476559, "loss": 3.2907, "step": 8640 }, { "epoch": 0.5873760021742085, "grad_norm": 1.0005561113357544, "learning_rate": 0.0009266289577388232, "loss": 3.9891, "step": 8645 }, { "epoch": 0.5877157222448702, "grad_norm": 0.9736968874931335, "learning_rate": 0.0009265864927299906, "loss": 3.9936, "step": 8650 }, { "epoch": 0.588055442315532, "grad_norm": 1.3043076992034912, "learning_rate": 0.0009265440277211578, "loss": 3.561, "step": 8655 }, { "epoch": 0.5883951623861938, "grad_norm": 4.146767616271973, "learning_rate": 0.0009265015627123251, "loss": 3.5762, "step": 8660 }, { "epoch": 0.5887348824568556, "grad_norm": 1.044664740562439, "learning_rate": 0.0009264590977034924, "loss": 3.6778, "step": 8665 }, { "epoch": 0.5890746025275173, "grad_norm": 0.8983184695243835, "learning_rate": 0.0009264166326946596, "loss": 3.7975, "step": 8670 }, { "epoch": 0.589414322598179, "grad_norm": 1.138432502746582, "learning_rate": 0.0009263741676858269, "loss": 3.7916, "step": 8675 }, { "epoch": 0.5897540426688409, "grad_norm": 0.9402933120727539, "learning_rate": 0.0009263317026769941, "loss": 3.695, "step": 8680 }, { "epoch": 0.5900937627395026, "grad_norm": 1.2134367227554321, "learning_rate": 0.0009262892376681615, "loss": 3.501, "step": 8685 }, { "epoch": 0.5904334828101644, "grad_norm": 1.6154502630233765, "learning_rate": 0.0009262467726593288, "loss": 3.4393, "step": 8690 }, { "epoch": 0.5907732028808262, "grad_norm": 1.0251001119613647, "learning_rate": 0.000926204307650496, "loss": 3.6722, "step": 8695 }, { "epoch": 0.591112922951488, "grad_norm": 1.3327927589416504, "learning_rate": 0.0009261618426416633, "loss": 3.7394, "step": 8700 }, { "epoch": 0.5914526430221497, "grad_norm": 1.2659369707107544, "learning_rate": 0.0009261193776328306, "loss": 3.8314, "step": 8705 }, { "epoch": 0.5917923630928115, "grad_norm": 1.263846516609192, "learning_rate": 0.0009260769126239978, "loss": 3.6587, "step": 8710 }, { "epoch": 0.5921320831634733, "grad_norm": 1.2626776695251465, "learning_rate": 0.000926034447615165, "loss": 3.5643, "step": 8715 }, { "epoch": 0.5924718032341351, "grad_norm": 9.136425971984863, "learning_rate": 0.0009259919826063325, "loss": 3.6867, "step": 8720 }, { "epoch": 0.5928115233047968, "grad_norm": 1.2668908834457397, "learning_rate": 0.0009259495175974997, "loss": 3.7685, "step": 8725 }, { "epoch": 0.5931512433754587, "grad_norm": 1.142119288444519, "learning_rate": 0.0009259070525886669, "loss": 3.7739, "step": 8730 }, { "epoch": 0.5934909634461204, "grad_norm": 1.3790160417556763, "learning_rate": 0.0009258645875798343, "loss": 3.7362, "step": 8735 }, { "epoch": 0.5938306835167821, "grad_norm": 1.1617828607559204, "learning_rate": 0.0009258221225710015, "loss": 3.6963, "step": 8740 }, { "epoch": 0.594170403587444, "grad_norm": 1.326010823249817, "learning_rate": 0.0009257796575621687, "loss": 3.6908, "step": 8745 }, { "epoch": 0.5945101236581057, "grad_norm": 1.1398262977600098, "learning_rate": 0.0009257371925533361, "loss": 3.7941, "step": 8750 }, { "epoch": 0.5948498437287675, "grad_norm": 1.021653175354004, "learning_rate": 0.0009256947275445034, "loss": 3.535, "step": 8755 }, { "epoch": 0.5951895637994292, "grad_norm": 1.8570992946624756, "learning_rate": 0.0009256522625356706, "loss": 3.7407, "step": 8760 }, { "epoch": 0.5955292838700911, "grad_norm": 1.4160524606704712, "learning_rate": 0.000925609797526838, "loss": 3.6416, "step": 8765 }, { "epoch": 0.5958690039407528, "grad_norm": 1.0310999155044556, "learning_rate": 0.0009255673325180052, "loss": 3.7475, "step": 8770 }, { "epoch": 0.5962087240114146, "grad_norm": 1.4872608184814453, "learning_rate": 0.0009255248675091724, "loss": 3.6403, "step": 8775 }, { "epoch": 0.5965484440820764, "grad_norm": 1.1355475187301636, "learning_rate": 0.0009254824025003397, "loss": 3.6722, "step": 8780 }, { "epoch": 0.5968881641527382, "grad_norm": 1.1247501373291016, "learning_rate": 0.000925439937491507, "loss": 3.6449, "step": 8785 }, { "epoch": 0.5972278842233999, "grad_norm": 1.1438113451004028, "learning_rate": 0.0009253974724826743, "loss": 3.7754, "step": 8790 }, { "epoch": 0.5975676042940616, "grad_norm": 1.0264915227890015, "learning_rate": 0.0009253550074738416, "loss": 3.7984, "step": 8795 }, { "epoch": 0.5979073243647235, "grad_norm": 0.9352721571922302, "learning_rate": 0.0009253125424650089, "loss": 3.5993, "step": 8800 }, { "epoch": 0.5982470444353852, "grad_norm": 1.1309154033660889, "learning_rate": 0.0009252700774561761, "loss": 3.6306, "step": 8805 }, { "epoch": 0.598586764506047, "grad_norm": 1.1174575090408325, "learning_rate": 0.0009252276124473434, "loss": 3.7155, "step": 8810 }, { "epoch": 0.5989264845767088, "grad_norm": 1.5034209489822388, "learning_rate": 0.0009251851474385106, "loss": 3.7986, "step": 8815 }, { "epoch": 0.5992662046473706, "grad_norm": 1.174607276916504, "learning_rate": 0.0009251426824296779, "loss": 3.4732, "step": 8820 }, { "epoch": 0.5996059247180323, "grad_norm": 1.4493346214294434, "learning_rate": 0.0009251002174208453, "loss": 3.5719, "step": 8825 }, { "epoch": 0.5999456447886942, "grad_norm": 1.0409096479415894, "learning_rate": 0.0009250577524120125, "loss": 3.5385, "step": 8830 }, { "epoch": 0.6002853648593559, "grad_norm": 1.0887541770935059, "learning_rate": 0.0009250152874031798, "loss": 3.6914, "step": 8835 }, { "epoch": 0.6006250849300176, "grad_norm": 1.1573729515075684, "learning_rate": 0.0009249728223943471, "loss": 3.9542, "step": 8840 }, { "epoch": 0.6009648050006794, "grad_norm": 0.9840105175971985, "learning_rate": 0.0009249303573855143, "loss": 3.6007, "step": 8845 }, { "epoch": 0.6013045250713412, "grad_norm": 1.0942631959915161, "learning_rate": 0.0009248878923766816, "loss": 3.9015, "step": 8850 }, { "epoch": 0.601644245142003, "grad_norm": 1.2355669736862183, "learning_rate": 0.0009248454273678489, "loss": 3.6962, "step": 8855 }, { "epoch": 0.6019839652126647, "grad_norm": 1.105302095413208, "learning_rate": 0.0009248029623590162, "loss": 3.6342, "step": 8860 }, { "epoch": 0.6023236852833266, "grad_norm": 1.0765794515609741, "learning_rate": 0.0009247604973501834, "loss": 3.5795, "step": 8865 }, { "epoch": 0.6026634053539883, "grad_norm": 1.215333342552185, "learning_rate": 0.0009247180323413508, "loss": 3.5974, "step": 8870 }, { "epoch": 0.6030031254246501, "grad_norm": 1.1730180978775024, "learning_rate": 0.000924675567332518, "loss": 3.601, "step": 8875 }, { "epoch": 0.6033428454953118, "grad_norm": 1.2804863452911377, "learning_rate": 0.0009246331023236852, "loss": 3.48, "step": 8880 }, { "epoch": 0.6036825655659737, "grad_norm": 1.1585206985473633, "learning_rate": 0.0009245906373148526, "loss": 3.8903, "step": 8885 }, { "epoch": 0.6040222856366354, "grad_norm": 1.0270103216171265, "learning_rate": 0.0009245481723060198, "loss": 3.6371, "step": 8890 }, { "epoch": 0.6043620057072971, "grad_norm": 1.2029074430465698, "learning_rate": 0.0009245057072971871, "loss": 3.7371, "step": 8895 }, { "epoch": 0.604701725777959, "grad_norm": 2.90682053565979, "learning_rate": 0.0009244632422883545, "loss": 3.7971, "step": 8900 }, { "epoch": 0.6050414458486207, "grad_norm": 1.6518583297729492, "learning_rate": 0.0009244207772795217, "loss": 3.5953, "step": 8905 }, { "epoch": 0.6053811659192825, "grad_norm": 1.1350295543670654, "learning_rate": 0.000924378312270689, "loss": 3.6267, "step": 8910 }, { "epoch": 0.6057208859899443, "grad_norm": 1.5528162717819214, "learning_rate": 0.0009243358472618562, "loss": 3.5307, "step": 8915 }, { "epoch": 0.6060606060606061, "grad_norm": 0.9489932656288147, "learning_rate": 0.0009242933822530235, "loss": 3.707, "step": 8920 }, { "epoch": 0.6064003261312678, "grad_norm": 1.136697769165039, "learning_rate": 0.0009242509172441908, "loss": 3.6433, "step": 8925 }, { "epoch": 0.6067400462019296, "grad_norm": 2.4359829425811768, "learning_rate": 0.0009242084522353581, "loss": 3.8577, "step": 8930 }, { "epoch": 0.6070797662725914, "grad_norm": 1.6428052186965942, "learning_rate": 0.0009241659872265254, "loss": 3.6704, "step": 8935 }, { "epoch": 0.6074194863432532, "grad_norm": 1.2726483345031738, "learning_rate": 0.0009241235222176927, "loss": 3.6094, "step": 8940 }, { "epoch": 0.6077592064139149, "grad_norm": 1.2551981210708618, "learning_rate": 0.0009240810572088599, "loss": 3.7002, "step": 8945 }, { "epoch": 0.6080989264845768, "grad_norm": 1.155730962753296, "learning_rate": 0.0009240385922000272, "loss": 3.8918, "step": 8950 }, { "epoch": 0.6084386465552385, "grad_norm": 1.3242639303207397, "learning_rate": 0.0009239961271911945, "loss": 3.6084, "step": 8955 }, { "epoch": 0.6087783666259002, "grad_norm": 1.679469108581543, "learning_rate": 0.0009239536621823617, "loss": 3.7305, "step": 8960 }, { "epoch": 0.609118086696562, "grad_norm": 1.40702223777771, "learning_rate": 0.000923911197173529, "loss": 3.4722, "step": 8965 }, { "epoch": 0.6094578067672238, "grad_norm": 0.9789896607398987, "learning_rate": 0.0009238687321646964, "loss": 3.6975, "step": 8970 }, { "epoch": 0.6097975268378856, "grad_norm": 1.3091421127319336, "learning_rate": 0.0009238262671558636, "loss": 3.6101, "step": 8975 }, { "epoch": 0.6101372469085473, "grad_norm": 1.2654308080673218, "learning_rate": 0.0009237838021470308, "loss": 3.5096, "step": 8980 }, { "epoch": 0.6104769669792092, "grad_norm": 1.048289179801941, "learning_rate": 0.0009237413371381982, "loss": 3.4567, "step": 8985 }, { "epoch": 0.6108166870498709, "grad_norm": 0.9472219944000244, "learning_rate": 0.0009236988721293654, "loss": 3.6114, "step": 8990 }, { "epoch": 0.6111564071205327, "grad_norm": 1.3422092199325562, "learning_rate": 0.0009236564071205326, "loss": 4.0014, "step": 8995 }, { "epoch": 0.6114961271911945, "grad_norm": 1.1043121814727783, "learning_rate": 0.0009236139421117001, "loss": 3.643, "step": 9000 }, { "epoch": 0.6118358472618562, "grad_norm": 1.5277009010314941, "learning_rate": 0.0009235714771028673, "loss": 3.7577, "step": 9005 }, { "epoch": 0.612175567332518, "grad_norm": 1.0715502500534058, "learning_rate": 0.0009235290120940345, "loss": 3.695, "step": 9010 }, { "epoch": 0.6125152874031797, "grad_norm": 1.2453010082244873, "learning_rate": 0.0009234865470852018, "loss": 3.6529, "step": 9015 }, { "epoch": 0.6128550074738416, "grad_norm": 1.784346103668213, "learning_rate": 0.0009234440820763691, "loss": 3.7613, "step": 9020 }, { "epoch": 0.6131947275445033, "grad_norm": 1.639682650566101, "learning_rate": 0.0009234016170675363, "loss": 3.6487, "step": 9025 }, { "epoch": 0.6135344476151651, "grad_norm": 1.1731315851211548, "learning_rate": 0.0009233591520587036, "loss": 3.5539, "step": 9030 }, { "epoch": 0.6138741676858269, "grad_norm": 1.161096453666687, "learning_rate": 0.000923316687049871, "loss": 3.5613, "step": 9035 }, { "epoch": 0.6142138877564887, "grad_norm": 0.8666089773178101, "learning_rate": 0.0009232742220410382, "loss": 3.7759, "step": 9040 }, { "epoch": 0.6145536078271504, "grad_norm": 1.3215117454528809, "learning_rate": 0.0009232317570322055, "loss": 3.8094, "step": 9045 }, { "epoch": 0.6148933278978121, "grad_norm": 1.1847416162490845, "learning_rate": 0.0009231892920233728, "loss": 3.6873, "step": 9050 }, { "epoch": 0.615233047968474, "grad_norm": 1.3229241371154785, "learning_rate": 0.00092314682701454, "loss": 3.886, "step": 9055 }, { "epoch": 0.6155727680391357, "grad_norm": 1.1162426471710205, "learning_rate": 0.0009231043620057073, "loss": 3.7815, "step": 9060 }, { "epoch": 0.6159124881097975, "grad_norm": 1.2113887071609497, "learning_rate": 0.0009230618969968745, "loss": 3.809, "step": 9065 }, { "epoch": 0.6162522081804593, "grad_norm": 1.1474205255508423, "learning_rate": 0.0009230194319880419, "loss": 3.5458, "step": 9070 }, { "epoch": 0.6165919282511211, "grad_norm": 1.179518222808838, "learning_rate": 0.0009229769669792092, "loss": 3.4294, "step": 9075 }, { "epoch": 0.6169316483217828, "grad_norm": 2.737806797027588, "learning_rate": 0.0009229345019703764, "loss": 3.7261, "step": 9080 }, { "epoch": 0.6172713683924447, "grad_norm": 1.0322529077529907, "learning_rate": 0.0009228920369615437, "loss": 3.766, "step": 9085 }, { "epoch": 0.6176110884631064, "grad_norm": 1.5612504482269287, "learning_rate": 0.000922849571952711, "loss": 3.5664, "step": 9090 }, { "epoch": 0.6179508085337682, "grad_norm": 0.9420763254165649, "learning_rate": 0.0009228071069438782, "loss": 3.6546, "step": 9095 }, { "epoch": 0.6182905286044299, "grad_norm": 1.3298954963684082, "learning_rate": 0.0009227646419350454, "loss": 3.79, "step": 9100 }, { "epoch": 0.6186302486750918, "grad_norm": 1.1655545234680176, "learning_rate": 0.0009227221769262129, "loss": 3.9068, "step": 9105 }, { "epoch": 0.6189699687457535, "grad_norm": 1.2431715726852417, "learning_rate": 0.0009226797119173801, "loss": 3.2622, "step": 9110 }, { "epoch": 0.6193096888164152, "grad_norm": 1.5095977783203125, "learning_rate": 0.0009226372469085473, "loss": 3.8448, "step": 9115 }, { "epoch": 0.6196494088870771, "grad_norm": 1.0919835567474365, "learning_rate": 0.0009225947818997147, "loss": 3.7006, "step": 9120 }, { "epoch": 0.6199891289577388, "grad_norm": 1.1964879035949707, "learning_rate": 0.0009225523168908819, "loss": 3.8438, "step": 9125 }, { "epoch": 0.6203288490284006, "grad_norm": 1.1953970193862915, "learning_rate": 0.0009225098518820491, "loss": 3.7242, "step": 9130 }, { "epoch": 0.6206685690990623, "grad_norm": 1.136218547821045, "learning_rate": 0.0009224673868732166, "loss": 3.8302, "step": 9135 }, { "epoch": 0.6210082891697242, "grad_norm": 0.9496179819107056, "learning_rate": 0.0009224249218643838, "loss": 3.5739, "step": 9140 }, { "epoch": 0.6213480092403859, "grad_norm": 1.1850438117980957, "learning_rate": 0.000922382456855551, "loss": 3.8698, "step": 9145 }, { "epoch": 0.6216877293110477, "grad_norm": 0.7477059364318848, "learning_rate": 0.0009223399918467184, "loss": 3.5218, "step": 9150 }, { "epoch": 0.6220274493817095, "grad_norm": 0.930789589881897, "learning_rate": 0.0009222975268378856, "loss": 3.6079, "step": 9155 }, { "epoch": 0.6223671694523712, "grad_norm": 1.4445375204086304, "learning_rate": 0.0009222550618290528, "loss": 3.7598, "step": 9160 }, { "epoch": 0.622706889523033, "grad_norm": 1.61017644405365, "learning_rate": 0.0009222125968202201, "loss": 3.6505, "step": 9165 }, { "epoch": 0.6230466095936948, "grad_norm": 1.290149450302124, "learning_rate": 0.0009221701318113875, "loss": 3.6696, "step": 9170 }, { "epoch": 0.6233863296643566, "grad_norm": 1.2520005702972412, "learning_rate": 0.0009221276668025547, "loss": 3.7295, "step": 9175 }, { "epoch": 0.6237260497350183, "grad_norm": 1.5148249864578247, "learning_rate": 0.000922085201793722, "loss": 3.4292, "step": 9180 }, { "epoch": 0.6240657698056801, "grad_norm": 0.9999239444732666, "learning_rate": 0.0009220427367848893, "loss": 3.9514, "step": 9185 }, { "epoch": 0.6244054898763419, "grad_norm": 1.3481850624084473, "learning_rate": 0.0009220002717760565, "loss": 3.784, "step": 9190 }, { "epoch": 0.6247452099470037, "grad_norm": 1.0968656539916992, "learning_rate": 0.0009219578067672238, "loss": 3.8897, "step": 9195 }, { "epoch": 0.6250849300176654, "grad_norm": 1.2976924180984497, "learning_rate": 0.000921915341758391, "loss": 3.4673, "step": 9200 }, { "epoch": 0.6254246500883273, "grad_norm": 1.2199373245239258, "learning_rate": 0.0009218728767495584, "loss": 3.7736, "step": 9205 }, { "epoch": 0.625764370158989, "grad_norm": 1.608184576034546, "learning_rate": 0.0009218304117407257, "loss": 3.9369, "step": 9210 }, { "epoch": 0.6261040902296507, "grad_norm": 1.2682303190231323, "learning_rate": 0.0009217879467318929, "loss": 3.6887, "step": 9215 }, { "epoch": 0.6264438103003126, "grad_norm": 1.0865826606750488, "learning_rate": 0.0009217454817230602, "loss": 3.83, "step": 9220 }, { "epoch": 0.6267835303709743, "grad_norm": 1.282979965209961, "learning_rate": 0.0009217030167142275, "loss": 4.0961, "step": 9225 }, { "epoch": 0.6271232504416361, "grad_norm": 1.5854612588882446, "learning_rate": 0.0009216605517053947, "loss": 3.6897, "step": 9230 }, { "epoch": 0.6274629705122978, "grad_norm": 1.0634318590164185, "learning_rate": 0.000921618086696562, "loss": 3.6929, "step": 9235 }, { "epoch": 0.6278026905829597, "grad_norm": 1.2178301811218262, "learning_rate": 0.0009215756216877294, "loss": 3.7216, "step": 9240 }, { "epoch": 0.6281424106536214, "grad_norm": 1.3154648542404175, "learning_rate": 0.0009215331566788966, "loss": 3.6296, "step": 9245 }, { "epoch": 0.6284821307242832, "grad_norm": 1.2455015182495117, "learning_rate": 0.000921490691670064, "loss": 3.5266, "step": 9250 }, { "epoch": 0.628821850794945, "grad_norm": 1.432206392288208, "learning_rate": 0.0009214482266612312, "loss": 3.4508, "step": 9255 }, { "epoch": 0.6291615708656068, "grad_norm": 1.0732814073562622, "learning_rate": 0.0009214057616523984, "loss": 3.6732, "step": 9260 }, { "epoch": 0.6295012909362685, "grad_norm": 1.3492592573165894, "learning_rate": 0.0009213632966435657, "loss": 3.4976, "step": 9265 }, { "epoch": 0.6298410110069302, "grad_norm": 1.6270604133605957, "learning_rate": 0.000921320831634733, "loss": 3.543, "step": 9270 }, { "epoch": 0.6301807310775921, "grad_norm": 1.1980057954788208, "learning_rate": 0.0009212783666259003, "loss": 3.6573, "step": 9275 }, { "epoch": 0.6305204511482538, "grad_norm": 1.1538492441177368, "learning_rate": 0.0009212359016170676, "loss": 3.8259, "step": 9280 }, { "epoch": 0.6308601712189156, "grad_norm": 1.1702874898910522, "learning_rate": 0.0009211934366082349, "loss": 3.8055, "step": 9285 }, { "epoch": 0.6311998912895774, "grad_norm": 1.3835698366165161, "learning_rate": 0.0009211509715994021, "loss": 3.2668, "step": 9290 }, { "epoch": 0.6315396113602392, "grad_norm": 0.9478338956832886, "learning_rate": 0.0009211085065905694, "loss": 3.6474, "step": 9295 }, { "epoch": 0.6318793314309009, "grad_norm": 1.2526423931121826, "learning_rate": 0.0009210660415817367, "loss": 3.8313, "step": 9300 }, { "epoch": 0.6322190515015628, "grad_norm": 3.4071028232574463, "learning_rate": 0.0009210235765729039, "loss": 3.6482, "step": 9305 }, { "epoch": 0.6325587715722245, "grad_norm": 0.9339808821678162, "learning_rate": 0.0009209811115640713, "loss": 3.4489, "step": 9310 }, { "epoch": 0.6328984916428863, "grad_norm": 1.5404815673828125, "learning_rate": 0.0009209386465552385, "loss": 3.5722, "step": 9315 }, { "epoch": 0.633238211713548, "grad_norm": 1.1295987367630005, "learning_rate": 0.0009208961815464058, "loss": 3.5938, "step": 9320 }, { "epoch": 0.6335779317842098, "grad_norm": 1.9533971548080444, "learning_rate": 0.0009208537165375731, "loss": 3.8074, "step": 9325 }, { "epoch": 0.6339176518548716, "grad_norm": 1.2476787567138672, "learning_rate": 0.0009208112515287403, "loss": 3.8519, "step": 9330 }, { "epoch": 0.6342573719255333, "grad_norm": 2.1454145908355713, "learning_rate": 0.0009207687865199076, "loss": 3.8886, "step": 9335 }, { "epoch": 0.6345970919961952, "grad_norm": 1.1075845956802368, "learning_rate": 0.0009207263215110749, "loss": 3.7643, "step": 9340 }, { "epoch": 0.6349368120668569, "grad_norm": 1.1133500337600708, "learning_rate": 0.0009206838565022422, "loss": 3.7296, "step": 9345 }, { "epoch": 0.6352765321375187, "grad_norm": 1.2114723920822144, "learning_rate": 0.0009206413914934095, "loss": 3.7116, "step": 9350 }, { "epoch": 0.6356162522081804, "grad_norm": 0.9732480049133301, "learning_rate": 0.0009205989264845768, "loss": 3.6473, "step": 9355 }, { "epoch": 0.6359559722788423, "grad_norm": 1.1558653116226196, "learning_rate": 0.000920556461475744, "loss": 3.5567, "step": 9360 }, { "epoch": 0.636295692349504, "grad_norm": 1.7229703664779663, "learning_rate": 0.0009205139964669112, "loss": 3.8417, "step": 9365 }, { "epoch": 0.6366354124201657, "grad_norm": 1.1297739744186401, "learning_rate": 0.0009204715314580786, "loss": 3.9791, "step": 9370 }, { "epoch": 0.6369751324908276, "grad_norm": 1.2944468259811401, "learning_rate": 0.0009204290664492458, "loss": 3.6391, "step": 9375 }, { "epoch": 0.6373148525614893, "grad_norm": 1.17601478099823, "learning_rate": 0.0009203866014404131, "loss": 3.5933, "step": 9380 }, { "epoch": 0.6376545726321511, "grad_norm": 0.9989631175994873, "learning_rate": 0.0009203441364315805, "loss": 3.7306, "step": 9385 }, { "epoch": 0.6379942927028129, "grad_norm": 1.2583248615264893, "learning_rate": 0.0009203016714227477, "loss": 3.6887, "step": 9390 }, { "epoch": 0.6383340127734747, "grad_norm": 1.2317407131195068, "learning_rate": 0.0009202592064139149, "loss": 3.7049, "step": 9395 }, { "epoch": 0.6386737328441364, "grad_norm": 1.042925477027893, "learning_rate": 0.0009202167414050823, "loss": 3.6315, "step": 9400 }, { "epoch": 0.6390134529147982, "grad_norm": 3.180030107498169, "learning_rate": 0.0009201742763962495, "loss": 3.6287, "step": 9405 }, { "epoch": 0.63935317298546, "grad_norm": 1.2691564559936523, "learning_rate": 0.0009201318113874167, "loss": 3.6123, "step": 9410 }, { "epoch": 0.6396928930561218, "grad_norm": 1.1872044801712036, "learning_rate": 0.0009200893463785841, "loss": 3.5472, "step": 9415 }, { "epoch": 0.6400326131267835, "grad_norm": 1.2308619022369385, "learning_rate": 0.0009200468813697514, "loss": 3.6444, "step": 9420 }, { "epoch": 0.6403723331974454, "grad_norm": 1.1435904502868652, "learning_rate": 0.0009200044163609186, "loss": 3.7937, "step": 9425 }, { "epoch": 0.6407120532681071, "grad_norm": 1.1715255975723267, "learning_rate": 0.0009199619513520859, "loss": 3.6048, "step": 9430 }, { "epoch": 0.6410517733387688, "grad_norm": 1.6417700052261353, "learning_rate": 0.0009199194863432532, "loss": 3.5115, "step": 9435 }, { "epoch": 0.6413914934094306, "grad_norm": 1.1326357126235962, "learning_rate": 0.0009198770213344204, "loss": 3.6953, "step": 9440 }, { "epoch": 0.6417312134800924, "grad_norm": 1.292319893836975, "learning_rate": 0.0009198345563255877, "loss": 3.6417, "step": 9445 }, { "epoch": 0.6420709335507542, "grad_norm": 1.462806224822998, "learning_rate": 0.000919792091316755, "loss": 3.6404, "step": 9450 }, { "epoch": 0.6424106536214159, "grad_norm": 1.6839655637741089, "learning_rate": 0.0009197496263079223, "loss": 3.5755, "step": 9455 }, { "epoch": 0.6427503736920778, "grad_norm": 1.534613013267517, "learning_rate": 0.0009197071612990896, "loss": 3.7606, "step": 9460 }, { "epoch": 0.6430900937627395, "grad_norm": 1.3594964742660522, "learning_rate": 0.0009196646962902568, "loss": 3.5576, "step": 9465 }, { "epoch": 0.6434298138334013, "grad_norm": 1.3237297534942627, "learning_rate": 0.0009196222312814241, "loss": 3.6247, "step": 9470 }, { "epoch": 0.6437695339040631, "grad_norm": 1.0494011640548706, "learning_rate": 0.0009195797662725914, "loss": 3.7944, "step": 9475 }, { "epoch": 0.6441092539747248, "grad_norm": 1.0419834852218628, "learning_rate": 0.0009195373012637586, "loss": 3.518, "step": 9480 }, { "epoch": 0.6444489740453866, "grad_norm": 1.1014528274536133, "learning_rate": 0.000919494836254926, "loss": 4.1268, "step": 9485 }, { "epoch": 0.6447886941160483, "grad_norm": 1.3439557552337646, "learning_rate": 0.0009194523712460933, "loss": 3.6613, "step": 9490 }, { "epoch": 0.6451284141867102, "grad_norm": 1.1755105257034302, "learning_rate": 0.0009194099062372605, "loss": 3.6157, "step": 9495 }, { "epoch": 0.6454681342573719, "grad_norm": 1.3568720817565918, "learning_rate": 0.0009193674412284277, "loss": 3.6432, "step": 9500 }, { "epoch": 0.6458078543280337, "grad_norm": 0.9347723722457886, "learning_rate": 0.0009193249762195951, "loss": 3.5368, "step": 9505 }, { "epoch": 0.6461475743986955, "grad_norm": 1.0671249628067017, "learning_rate": 0.0009192825112107623, "loss": 3.5585, "step": 9510 }, { "epoch": 0.6464872944693573, "grad_norm": 0.9771296977996826, "learning_rate": 0.0009192400462019295, "loss": 3.792, "step": 9515 }, { "epoch": 0.646827014540019, "grad_norm": 1.3018873929977417, "learning_rate": 0.000919197581193097, "loss": 3.7953, "step": 9520 }, { "epoch": 0.6471667346106807, "grad_norm": 1.2666610479354858, "learning_rate": 0.0009191551161842642, "loss": 3.7476, "step": 9525 }, { "epoch": 0.6475064546813426, "grad_norm": 0.9919410347938538, "learning_rate": 0.0009191126511754314, "loss": 3.7643, "step": 9530 }, { "epoch": 0.6478461747520043, "grad_norm": 1.1042211055755615, "learning_rate": 0.0009190701861665988, "loss": 3.6138, "step": 9535 }, { "epoch": 0.6481858948226661, "grad_norm": 1.5257556438446045, "learning_rate": 0.000919027721157766, "loss": 3.9083, "step": 9540 }, { "epoch": 0.6485256148933279, "grad_norm": 1.4169899225234985, "learning_rate": 0.0009189852561489332, "loss": 3.7854, "step": 9545 }, { "epoch": 0.6488653349639897, "grad_norm": 1.1392148733139038, "learning_rate": 0.0009189427911401005, "loss": 3.8815, "step": 9550 }, { "epoch": 0.6492050550346514, "grad_norm": 0.9400469660758972, "learning_rate": 0.0009189003261312679, "loss": 3.5906, "step": 9555 }, { "epoch": 0.6495447751053133, "grad_norm": 1.0108259916305542, "learning_rate": 0.0009188578611224351, "loss": 3.7645, "step": 9560 }, { "epoch": 0.649884495175975, "grad_norm": 1.057002067565918, "learning_rate": 0.0009188153961136024, "loss": 3.8384, "step": 9565 }, { "epoch": 0.6502242152466368, "grad_norm": 1.1123708486557007, "learning_rate": 0.0009187729311047697, "loss": 3.6098, "step": 9570 }, { "epoch": 0.6505639353172985, "grad_norm": 1.4539849758148193, "learning_rate": 0.0009187304660959369, "loss": 3.4258, "step": 9575 }, { "epoch": 0.6509036553879604, "grad_norm": 1.1243523359298706, "learning_rate": 0.0009186880010871042, "loss": 3.6879, "step": 9580 }, { "epoch": 0.6512433754586221, "grad_norm": 4.451860427856445, "learning_rate": 0.0009186455360782715, "loss": 3.5026, "step": 9585 }, { "epoch": 0.6515830955292838, "grad_norm": 0.8600742816925049, "learning_rate": 0.0009186030710694389, "loss": 3.6613, "step": 9590 }, { "epoch": 0.6519228155999457, "grad_norm": 1.2628276348114014, "learning_rate": 0.0009185606060606061, "loss": 3.7321, "step": 9595 }, { "epoch": 0.6522625356706074, "grad_norm": 1.1841604709625244, "learning_rate": 0.0009185181410517733, "loss": 3.496, "step": 9600 }, { "epoch": 0.6526022557412692, "grad_norm": 1.0440059900283813, "learning_rate": 0.0009184756760429407, "loss": 3.701, "step": 9605 }, { "epoch": 0.6529419758119309, "grad_norm": 1.1705007553100586, "learning_rate": 0.0009184332110341079, "loss": 3.7458, "step": 9610 }, { "epoch": 0.6532816958825928, "grad_norm": 1.149965524673462, "learning_rate": 0.0009183907460252751, "loss": 3.6949, "step": 9615 }, { "epoch": 0.6536214159532545, "grad_norm": 1.1859850883483887, "learning_rate": 0.0009183482810164426, "loss": 3.615, "step": 9620 }, { "epoch": 0.6539611360239163, "grad_norm": 1.2158559560775757, "learning_rate": 0.0009183058160076098, "loss": 3.883, "step": 9625 }, { "epoch": 0.6543008560945781, "grad_norm": 1.1399497985839844, "learning_rate": 0.000918263350998777, "loss": 3.714, "step": 9630 }, { "epoch": 0.6546405761652399, "grad_norm": 1.0692484378814697, "learning_rate": 0.0009182208859899444, "loss": 3.607, "step": 9635 }, { "epoch": 0.6549802962359016, "grad_norm": 1.4731788635253906, "learning_rate": 0.0009181784209811116, "loss": 3.6661, "step": 9640 }, { "epoch": 0.6553200163065634, "grad_norm": 1.0258409976959229, "learning_rate": 0.0009181359559722788, "loss": 3.5556, "step": 9645 }, { "epoch": 0.6556597363772252, "grad_norm": 1.3043683767318726, "learning_rate": 0.0009180934909634461, "loss": 3.2751, "step": 9650 }, { "epoch": 0.6559994564478869, "grad_norm": 1.222944974899292, "learning_rate": 0.0009180510259546135, "loss": 3.7358, "step": 9655 }, { "epoch": 0.6563391765185487, "grad_norm": 1.1392446756362915, "learning_rate": 0.0009180085609457807, "loss": 3.9832, "step": 9660 }, { "epoch": 0.6566788965892105, "grad_norm": 1.4741792678833008, "learning_rate": 0.000917966095936948, "loss": 3.5675, "step": 9665 }, { "epoch": 0.6570186166598723, "grad_norm": 1.2614558935165405, "learning_rate": 0.0009179236309281153, "loss": 3.2522, "step": 9670 }, { "epoch": 0.657358336730534, "grad_norm": 1.28248929977417, "learning_rate": 0.0009178811659192825, "loss": 3.4809, "step": 9675 }, { "epoch": 0.6576980568011959, "grad_norm": 1.0011554956436157, "learning_rate": 0.0009178387009104498, "loss": 3.8903, "step": 9680 }, { "epoch": 0.6580377768718576, "grad_norm": 1.2435669898986816, "learning_rate": 0.000917796235901617, "loss": 3.482, "step": 9685 }, { "epoch": 0.6583774969425193, "grad_norm": 1.544433355331421, "learning_rate": 0.0009177537708927844, "loss": 3.4156, "step": 9690 }, { "epoch": 0.6587172170131811, "grad_norm": 1.4483829736709595, "learning_rate": 0.0009177113058839517, "loss": 3.7107, "step": 9695 }, { "epoch": 0.6590569370838429, "grad_norm": 1.0983855724334717, "learning_rate": 0.000917668840875119, "loss": 3.6431, "step": 9700 }, { "epoch": 0.6593966571545047, "grad_norm": 1.2256834506988525, "learning_rate": 0.0009176263758662862, "loss": 3.6543, "step": 9705 }, { "epoch": 0.6597363772251664, "grad_norm": 0.8990233540534973, "learning_rate": 0.0009175839108574535, "loss": 3.6485, "step": 9710 }, { "epoch": 0.6600760972958283, "grad_norm": 1.0592541694641113, "learning_rate": 0.0009175414458486207, "loss": 3.6778, "step": 9715 }, { "epoch": 0.66041581736649, "grad_norm": 0.9518468976020813, "learning_rate": 0.000917498980839788, "loss": 4.194, "step": 9720 }, { "epoch": 0.6607555374371518, "grad_norm": 1.246412754058838, "learning_rate": 0.0009174565158309554, "loss": 3.7784, "step": 9725 }, { "epoch": 0.6610952575078136, "grad_norm": 1.4394553899765015, "learning_rate": 0.0009174140508221226, "loss": 3.3805, "step": 9730 }, { "epoch": 0.6614349775784754, "grad_norm": 1.4515434503555298, "learning_rate": 0.0009173715858132899, "loss": 3.6172, "step": 9735 }, { "epoch": 0.6617746976491371, "grad_norm": 1.3130757808685303, "learning_rate": 0.0009173291208044572, "loss": 3.5637, "step": 9740 }, { "epoch": 0.6621144177197988, "grad_norm": 1.1394271850585938, "learning_rate": 0.0009172866557956244, "loss": 3.6692, "step": 9745 }, { "epoch": 0.6624541377904607, "grad_norm": 1.4996399879455566, "learning_rate": 0.0009172441907867916, "loss": 3.7557, "step": 9750 }, { "epoch": 0.6627938578611224, "grad_norm": 1.2392324209213257, "learning_rate": 0.000917201725777959, "loss": 3.8429, "step": 9755 }, { "epoch": 0.6631335779317842, "grad_norm": 1.3441556692123413, "learning_rate": 0.0009171592607691263, "loss": 3.5786, "step": 9760 }, { "epoch": 0.663473298002446, "grad_norm": 1.3264137506484985, "learning_rate": 0.0009171167957602935, "loss": 3.4333, "step": 9765 }, { "epoch": 0.6638130180731078, "grad_norm": 1.2194457054138184, "learning_rate": 0.0009170743307514609, "loss": 3.7999, "step": 9770 }, { "epoch": 0.6641527381437695, "grad_norm": 1.6252371072769165, "learning_rate": 0.0009170318657426281, "loss": 3.4592, "step": 9775 }, { "epoch": 0.6644924582144313, "grad_norm": 0.9553839564323425, "learning_rate": 0.0009169894007337953, "loss": 3.888, "step": 9780 }, { "epoch": 0.6648321782850931, "grad_norm": 0.9729276895523071, "learning_rate": 0.0009169469357249627, "loss": 3.9106, "step": 9785 }, { "epoch": 0.6651718983557549, "grad_norm": 1.067948341369629, "learning_rate": 0.0009169044707161299, "loss": 3.5571, "step": 9790 }, { "epoch": 0.6655116184264166, "grad_norm": 1.5369620323181152, "learning_rate": 0.0009168620057072972, "loss": 3.4938, "step": 9795 }, { "epoch": 0.6658513384970784, "grad_norm": 1.3125444650650024, "learning_rate": 0.0009168195406984645, "loss": 3.5557, "step": 9800 }, { "epoch": 0.6661910585677402, "grad_norm": 1.068041443824768, "learning_rate": 0.0009167770756896318, "loss": 3.5733, "step": 9805 }, { "epoch": 0.6665307786384019, "grad_norm": 0.9821920990943909, "learning_rate": 0.000916734610680799, "loss": 3.6969, "step": 9810 }, { "epoch": 0.6668704987090638, "grad_norm": 1.792399287223816, "learning_rate": 0.0009166921456719663, "loss": 3.8181, "step": 9815 }, { "epoch": 0.6672102187797255, "grad_norm": 1.4332616329193115, "learning_rate": 0.0009166496806631336, "loss": 3.9513, "step": 9820 }, { "epoch": 0.6675499388503873, "grad_norm": 12.4526948928833, "learning_rate": 0.0009166072156543008, "loss": 3.7547, "step": 9825 }, { "epoch": 0.667889658921049, "grad_norm": 1.093277096748352, "learning_rate": 0.0009165647506454682, "loss": 3.6604, "step": 9830 }, { "epoch": 0.6682293789917109, "grad_norm": 1.2967170476913452, "learning_rate": 0.0009165222856366355, "loss": 3.6559, "step": 9835 }, { "epoch": 0.6685690990623726, "grad_norm": 1.3278918266296387, "learning_rate": 0.0009164798206278027, "loss": 3.7353, "step": 9840 }, { "epoch": 0.6689088191330343, "grad_norm": 1.2703101634979248, "learning_rate": 0.00091643735561897, "loss": 3.8221, "step": 9845 }, { "epoch": 0.6692485392036962, "grad_norm": 1.0879133939743042, "learning_rate": 0.0009163948906101372, "loss": 3.5214, "step": 9850 }, { "epoch": 0.6695882592743579, "grad_norm": 1.3343391418457031, "learning_rate": 0.0009163524256013045, "loss": 3.9527, "step": 9855 }, { "epoch": 0.6699279793450197, "grad_norm": 1.0177228450775146, "learning_rate": 0.0009163099605924718, "loss": 3.8018, "step": 9860 }, { "epoch": 0.6702676994156814, "grad_norm": 1.0147511959075928, "learning_rate": 0.0009162674955836391, "loss": 3.79, "step": 9865 }, { "epoch": 0.6706074194863433, "grad_norm": 1.694706678390503, "learning_rate": 0.0009162250305748064, "loss": 3.5638, "step": 9870 }, { "epoch": 0.670947139557005, "grad_norm": 0.967812180519104, "learning_rate": 0.0009161825655659737, "loss": 4.0276, "step": 9875 }, { "epoch": 0.6712868596276668, "grad_norm": 1.15088951587677, "learning_rate": 0.0009161401005571409, "loss": 3.7539, "step": 9880 }, { "epoch": 0.6716265796983286, "grad_norm": 2.041304111480713, "learning_rate": 0.0009160976355483081, "loss": 3.7835, "step": 9885 }, { "epoch": 0.6719662997689904, "grad_norm": 1.1055501699447632, "learning_rate": 0.0009160551705394755, "loss": 3.5796, "step": 9890 }, { "epoch": 0.6723060198396521, "grad_norm": 1.175445318222046, "learning_rate": 0.0009160127055306427, "loss": 3.4382, "step": 9895 }, { "epoch": 0.672645739910314, "grad_norm": 1.2017370462417603, "learning_rate": 0.00091597024052181, "loss": 3.8155, "step": 9900 }, { "epoch": 0.6729854599809757, "grad_norm": 1.1295603513717651, "learning_rate": 0.0009159277755129774, "loss": 3.4282, "step": 9905 }, { "epoch": 0.6733251800516374, "grad_norm": 0.9265357851982117, "learning_rate": 0.0009158853105041446, "loss": 3.7641, "step": 9910 }, { "epoch": 0.6736649001222992, "grad_norm": 1.0473254919052124, "learning_rate": 0.0009158428454953118, "loss": 3.8033, "step": 9915 }, { "epoch": 0.674004620192961, "grad_norm": 1.1439317464828491, "learning_rate": 0.0009158003804864792, "loss": 3.7265, "step": 9920 }, { "epoch": 0.6743443402636228, "grad_norm": 1.4678452014923096, "learning_rate": 0.0009157579154776464, "loss": 3.5245, "step": 9925 }, { "epoch": 0.6746840603342845, "grad_norm": 0.8866499066352844, "learning_rate": 0.0009157154504688137, "loss": 3.7057, "step": 9930 }, { "epoch": 0.6750237804049464, "grad_norm": 1.263838529586792, "learning_rate": 0.0009156729854599811, "loss": 3.738, "step": 9935 }, { "epoch": 0.6753635004756081, "grad_norm": 1.2749247550964355, "learning_rate": 0.0009156305204511483, "loss": 3.7188, "step": 9940 }, { "epoch": 0.6757032205462699, "grad_norm": 1.065765619277954, "learning_rate": 0.0009155880554423156, "loss": 3.6284, "step": 9945 }, { "epoch": 0.6760429406169316, "grad_norm": 1.019703984260559, "learning_rate": 0.0009155455904334828, "loss": 3.8866, "step": 9950 }, { "epoch": 0.6763826606875935, "grad_norm": 1.1609525680541992, "learning_rate": 0.0009155031254246501, "loss": 3.6435, "step": 9955 }, { "epoch": 0.6767223807582552, "grad_norm": 1.1939761638641357, "learning_rate": 0.0009154606604158174, "loss": 3.6994, "step": 9960 }, { "epoch": 0.6770621008289169, "grad_norm": 1.2080419063568115, "learning_rate": 0.0009154181954069846, "loss": 3.6856, "step": 9965 }, { "epoch": 0.6774018208995788, "grad_norm": 1.2816352844238281, "learning_rate": 0.000915375730398152, "loss": 3.5842, "step": 9970 }, { "epoch": 0.6777415409702405, "grad_norm": 1.2930136919021606, "learning_rate": 0.0009153332653893193, "loss": 3.6005, "step": 9975 }, { "epoch": 0.6780812610409023, "grad_norm": 1.112051248550415, "learning_rate": 0.0009152908003804865, "loss": 3.7124, "step": 9980 }, { "epoch": 0.6784209811115641, "grad_norm": 1.2149434089660645, "learning_rate": 0.0009152483353716537, "loss": 3.5708, "step": 9985 }, { "epoch": 0.6787607011822259, "grad_norm": 1.0350990295410156, "learning_rate": 0.0009152058703628211, "loss": 3.7548, "step": 9990 }, { "epoch": 0.6791004212528876, "grad_norm": 1.0763710737228394, "learning_rate": 0.0009151634053539883, "loss": 3.432, "step": 9995 }, { "epoch": 0.6794401413235494, "grad_norm": 1.641727089881897, "learning_rate": 0.0009151209403451555, "loss": 3.5743, "step": 10000 }, { "epoch": 0.6797798613942112, "grad_norm": 1.2064820528030396, "learning_rate": 0.000915078475336323, "loss": 3.9351, "step": 10005 }, { "epoch": 0.680119581464873, "grad_norm": 2.876999855041504, "learning_rate": 0.0009150360103274902, "loss": 3.7434, "step": 10010 }, { "epoch": 0.6804593015355347, "grad_norm": 1.0672874450683594, "learning_rate": 0.0009149935453186574, "loss": 3.6589, "step": 10015 }, { "epoch": 0.6807990216061965, "grad_norm": 1.1143921613693237, "learning_rate": 0.0009149510803098248, "loss": 3.5905, "step": 10020 }, { "epoch": 0.6811387416768583, "grad_norm": 1.1888877153396606, "learning_rate": 0.000914908615300992, "loss": 3.8413, "step": 10025 }, { "epoch": 0.68147846174752, "grad_norm": 5.231160640716553, "learning_rate": 0.0009148661502921592, "loss": 3.5384, "step": 10030 }, { "epoch": 0.6818181818181818, "grad_norm": 1.1157763004302979, "learning_rate": 0.0009148236852833266, "loss": 3.9566, "step": 10035 }, { "epoch": 0.6821579018888436, "grad_norm": 2.076530933380127, "learning_rate": 0.0009147812202744939, "loss": 3.6312, "step": 10040 }, { "epoch": 0.6824976219595054, "grad_norm": 1.141019582748413, "learning_rate": 0.0009147387552656611, "loss": 3.5581, "step": 10045 }, { "epoch": 0.6828373420301671, "grad_norm": 1.2315690517425537, "learning_rate": 0.0009146962902568284, "loss": 3.4768, "step": 10050 }, { "epoch": 0.683177062100829, "grad_norm": 1.1492469310760498, "learning_rate": 0.0009146538252479957, "loss": 3.7711, "step": 10055 }, { "epoch": 0.6835167821714907, "grad_norm": 1.0269432067871094, "learning_rate": 0.0009146113602391629, "loss": 3.8425, "step": 10060 }, { "epoch": 0.6838565022421524, "grad_norm": 1.1005327701568604, "learning_rate": 0.0009145688952303302, "loss": 3.7191, "step": 10065 }, { "epoch": 0.6841962223128143, "grad_norm": 1.0725760459899902, "learning_rate": 0.0009145264302214975, "loss": 3.9121, "step": 10070 }, { "epoch": 0.684535942383476, "grad_norm": 3.1587440967559814, "learning_rate": 0.0009144839652126648, "loss": 3.3933, "step": 10075 }, { "epoch": 0.6848756624541378, "grad_norm": 1.2572013139724731, "learning_rate": 0.0009144415002038321, "loss": 3.6062, "step": 10080 }, { "epoch": 0.6852153825247995, "grad_norm": 1.0583091974258423, "learning_rate": 0.0009143990351949994, "loss": 3.6433, "step": 10085 }, { "epoch": 0.6855551025954614, "grad_norm": 1.1038916110992432, "learning_rate": 0.0009143565701861666, "loss": 3.6474, "step": 10090 }, { "epoch": 0.6858948226661231, "grad_norm": 1.2132172584533691, "learning_rate": 0.0009143141051773339, "loss": 3.5819, "step": 10095 }, { "epoch": 0.6862345427367849, "grad_norm": 1.5730916261672974, "learning_rate": 0.0009142716401685011, "loss": 3.7442, "step": 10100 }, { "epoch": 0.6865742628074467, "grad_norm": 1.3177262544631958, "learning_rate": 0.0009142291751596684, "loss": 3.5832, "step": 10105 }, { "epoch": 0.6869139828781085, "grad_norm": 1.1547642946243286, "learning_rate": 0.0009141867101508358, "loss": 3.6601, "step": 10110 }, { "epoch": 0.6872537029487702, "grad_norm": 1.2815319299697876, "learning_rate": 0.000914144245142003, "loss": 3.7935, "step": 10115 }, { "epoch": 0.6875934230194319, "grad_norm": 1.5822850465774536, "learning_rate": 0.0009141017801331703, "loss": 3.4091, "step": 10120 }, { "epoch": 0.6879331430900938, "grad_norm": 1.2340657711029053, "learning_rate": 0.0009140593151243376, "loss": 3.7924, "step": 10125 }, { "epoch": 0.6882728631607555, "grad_norm": 1.1817152500152588, "learning_rate": 0.0009140168501155048, "loss": 3.5254, "step": 10130 }, { "epoch": 0.6886125832314173, "grad_norm": 0.8590853214263916, "learning_rate": 0.000913974385106672, "loss": 3.7372, "step": 10135 }, { "epoch": 0.6889523033020791, "grad_norm": 1.38112473487854, "learning_rate": 0.0009139319200978394, "loss": 3.5508, "step": 10140 }, { "epoch": 0.6892920233727409, "grad_norm": 1.0187846422195435, "learning_rate": 0.0009138894550890067, "loss": 3.7577, "step": 10145 }, { "epoch": 0.6896317434434026, "grad_norm": 1.1282687187194824, "learning_rate": 0.0009138469900801739, "loss": 3.7697, "step": 10150 }, { "epoch": 0.6899714635140645, "grad_norm": 1.2274410724639893, "learning_rate": 0.0009138045250713413, "loss": 3.6685, "step": 10155 }, { "epoch": 0.6903111835847262, "grad_norm": 1.3886449337005615, "learning_rate": 0.0009137620600625085, "loss": 3.6018, "step": 10160 }, { "epoch": 0.690650903655388, "grad_norm": 1.5834449529647827, "learning_rate": 0.0009137195950536757, "loss": 3.5823, "step": 10165 }, { "epoch": 0.6909906237260497, "grad_norm": 1.3317846059799194, "learning_rate": 0.0009136771300448431, "loss": 3.6044, "step": 10170 }, { "epoch": 0.6913303437967115, "grad_norm": 1.1768476963043213, "learning_rate": 0.0009136346650360103, "loss": 3.4725, "step": 10175 }, { "epoch": 0.6916700638673733, "grad_norm": 1.3783037662506104, "learning_rate": 0.0009135922000271776, "loss": 3.8956, "step": 10180 }, { "epoch": 0.692009783938035, "grad_norm": 1.0312321186065674, "learning_rate": 0.000913549735018345, "loss": 3.2313, "step": 10185 }, { "epoch": 0.6923495040086969, "grad_norm": 1.1274757385253906, "learning_rate": 0.0009135072700095122, "loss": 3.5499, "step": 10190 }, { "epoch": 0.6926892240793586, "grad_norm": 1.0552892684936523, "learning_rate": 0.0009134648050006794, "loss": 3.746, "step": 10195 }, { "epoch": 0.6930289441500204, "grad_norm": 1.1841036081314087, "learning_rate": 0.0009134223399918467, "loss": 3.6565, "step": 10200 }, { "epoch": 0.6933686642206821, "grad_norm": 1.3338420391082764, "learning_rate": 0.000913379874983014, "loss": 3.3599, "step": 10205 }, { "epoch": 0.693708384291344, "grad_norm": 0.9082456231117249, "learning_rate": 0.0009133374099741812, "loss": 3.6741, "step": 10210 }, { "epoch": 0.6940481043620057, "grad_norm": 1.8371293544769287, "learning_rate": 0.0009132949449653486, "loss": 3.795, "step": 10215 }, { "epoch": 0.6943878244326674, "grad_norm": 1.337483286857605, "learning_rate": 0.0009132524799565159, "loss": 3.7459, "step": 10220 }, { "epoch": 0.6947275445033293, "grad_norm": 1.663328766822815, "learning_rate": 0.0009132100149476831, "loss": 3.708, "step": 10225 }, { "epoch": 0.695067264573991, "grad_norm": 1.2042278051376343, "learning_rate": 0.0009131675499388504, "loss": 3.7646, "step": 10230 }, { "epoch": 0.6954069846446528, "grad_norm": 1.1943027973175049, "learning_rate": 0.0009131250849300176, "loss": 3.7347, "step": 10235 }, { "epoch": 0.6957467047153146, "grad_norm": 1.0906248092651367, "learning_rate": 0.0009130826199211849, "loss": 3.6948, "step": 10240 }, { "epoch": 0.6960864247859764, "grad_norm": 3.6802313327789307, "learning_rate": 0.0009130401549123523, "loss": 3.8552, "step": 10245 }, { "epoch": 0.6964261448566381, "grad_norm": 1.7402929067611694, "learning_rate": 0.0009129976899035195, "loss": 3.6851, "step": 10250 }, { "epoch": 0.6967658649272999, "grad_norm": 1.6085401773452759, "learning_rate": 0.0009129552248946868, "loss": 3.5827, "step": 10255 }, { "epoch": 0.6971055849979617, "grad_norm": 1.6676709651947021, "learning_rate": 0.0009129127598858541, "loss": 3.7569, "step": 10260 }, { "epoch": 0.6974453050686235, "grad_norm": 1.4212422370910645, "learning_rate": 0.0009128702948770213, "loss": 3.6699, "step": 10265 }, { "epoch": 0.6977850251392852, "grad_norm": 1.1283814907073975, "learning_rate": 0.0009128278298681887, "loss": 3.8442, "step": 10270 }, { "epoch": 0.698124745209947, "grad_norm": 1.0334235429763794, "learning_rate": 0.0009127853648593559, "loss": 3.5412, "step": 10275 }, { "epoch": 0.6984644652806088, "grad_norm": 1.3299715518951416, "learning_rate": 0.0009127428998505232, "loss": 3.6876, "step": 10280 }, { "epoch": 0.6988041853512705, "grad_norm": 1.0917026996612549, "learning_rate": 0.0009127004348416906, "loss": 3.6479, "step": 10285 }, { "epoch": 0.6991439054219323, "grad_norm": 1.3009107112884521, "learning_rate": 0.0009126579698328578, "loss": 3.6087, "step": 10290 }, { "epoch": 0.6994836254925941, "grad_norm": 1.1358500719070435, "learning_rate": 0.000912615504824025, "loss": 3.3138, "step": 10295 }, { "epoch": 0.6998233455632559, "grad_norm": 1.0704761743545532, "learning_rate": 0.0009125730398151923, "loss": 3.6822, "step": 10300 }, { "epoch": 0.7001630656339176, "grad_norm": 1.6572788953781128, "learning_rate": 0.0009125305748063596, "loss": 3.5973, "step": 10305 }, { "epoch": 0.7005027857045795, "grad_norm": 1.414404273033142, "learning_rate": 0.0009124881097975268, "loss": 3.5607, "step": 10310 }, { "epoch": 0.7008425057752412, "grad_norm": 1.307868242263794, "learning_rate": 0.0009124456447886942, "loss": 3.6544, "step": 10315 }, { "epoch": 0.701182225845903, "grad_norm": 1.0812915563583374, "learning_rate": 0.0009124031797798615, "loss": 3.5338, "step": 10320 }, { "epoch": 0.7015219459165648, "grad_norm": 1.0789785385131836, "learning_rate": 0.0009123607147710287, "loss": 3.6561, "step": 10325 }, { "epoch": 0.7018616659872265, "grad_norm": 1.212778091430664, "learning_rate": 0.000912318249762196, "loss": 3.6589, "step": 10330 }, { "epoch": 0.7022013860578883, "grad_norm": 1.048552393913269, "learning_rate": 0.0009122757847533632, "loss": 3.6198, "step": 10335 }, { "epoch": 0.70254110612855, "grad_norm": 0.9788283705711365, "learning_rate": 0.0009122333197445305, "loss": 3.5019, "step": 10340 }, { "epoch": 0.7028808261992119, "grad_norm": 1.4455678462982178, "learning_rate": 0.0009121908547356978, "loss": 3.8256, "step": 10345 }, { "epoch": 0.7032205462698736, "grad_norm": 1.1649855375289917, "learning_rate": 0.0009121483897268651, "loss": 3.5944, "step": 10350 }, { "epoch": 0.7035602663405354, "grad_norm": 1.102576494216919, "learning_rate": 0.0009121059247180324, "loss": 3.7114, "step": 10355 }, { "epoch": 0.7038999864111972, "grad_norm": 1.379135012626648, "learning_rate": 0.0009120634597091997, "loss": 3.8813, "step": 10360 }, { "epoch": 0.704239706481859, "grad_norm": 1.443343162536621, "learning_rate": 0.0009120209947003669, "loss": 3.3992, "step": 10365 }, { "epoch": 0.7045794265525207, "grad_norm": 1.500177025794983, "learning_rate": 0.0009119785296915342, "loss": 3.746, "step": 10370 }, { "epoch": 0.7049191466231824, "grad_norm": 1.0450958013534546, "learning_rate": 0.0009119360646827015, "loss": 3.6014, "step": 10375 }, { "epoch": 0.7052588666938443, "grad_norm": 0.9837779998779297, "learning_rate": 0.0009118935996738687, "loss": 3.7684, "step": 10380 }, { "epoch": 0.705598586764506, "grad_norm": 1.1270365715026855, "learning_rate": 0.000911851134665036, "loss": 3.7639, "step": 10385 }, { "epoch": 0.7059383068351678, "grad_norm": 1.2072405815124512, "learning_rate": 0.0009118086696562034, "loss": 3.6904, "step": 10390 }, { "epoch": 0.7062780269058296, "grad_norm": 1.1073763370513916, "learning_rate": 0.0009117662046473706, "loss": 3.7609, "step": 10395 }, { "epoch": 0.7066177469764914, "grad_norm": 1.0592936277389526, "learning_rate": 0.0009117237396385378, "loss": 3.6646, "step": 10400 }, { "epoch": 0.7069574670471531, "grad_norm": 1.10124933719635, "learning_rate": 0.0009116812746297052, "loss": 3.6136, "step": 10405 }, { "epoch": 0.707297187117815, "grad_norm": 1.0713011026382446, "learning_rate": 0.0009116388096208724, "loss": 3.7308, "step": 10410 }, { "epoch": 0.7076369071884767, "grad_norm": 0.9510875940322876, "learning_rate": 0.0009115963446120396, "loss": 3.7515, "step": 10415 }, { "epoch": 0.7079766272591385, "grad_norm": 1.1153173446655273, "learning_rate": 0.0009115538796032071, "loss": 3.7876, "step": 10420 }, { "epoch": 0.7083163473298002, "grad_norm": 1.1247469186782837, "learning_rate": 0.0009115114145943743, "loss": 3.7828, "step": 10425 }, { "epoch": 0.708656067400462, "grad_norm": 1.2200671434402466, "learning_rate": 0.0009114689495855415, "loss": 3.9412, "step": 10430 }, { "epoch": 0.7089957874711238, "grad_norm": 0.9485909342765808, "learning_rate": 0.0009114264845767088, "loss": 3.8419, "step": 10435 }, { "epoch": 0.7093355075417855, "grad_norm": 0.9972096681594849, "learning_rate": 0.0009113840195678761, "loss": 3.819, "step": 10440 }, { "epoch": 0.7096752276124474, "grad_norm": 1.0116872787475586, "learning_rate": 0.0009113415545590433, "loss": 3.7771, "step": 10445 }, { "epoch": 0.7100149476831091, "grad_norm": 1.0462998151779175, "learning_rate": 0.0009112990895502106, "loss": 3.6964, "step": 10450 }, { "epoch": 0.7103546677537709, "grad_norm": 1.225805640220642, "learning_rate": 0.000911256624541378, "loss": 3.8353, "step": 10455 }, { "epoch": 0.7106943878244326, "grad_norm": 1.0314103364944458, "learning_rate": 0.0009112141595325452, "loss": 3.66, "step": 10460 }, { "epoch": 0.7110341078950945, "grad_norm": 1.213556170463562, "learning_rate": 0.0009111716945237125, "loss": 3.6433, "step": 10465 }, { "epoch": 0.7113738279657562, "grad_norm": 0.8877604603767395, "learning_rate": 0.0009111292295148798, "loss": 3.8228, "step": 10470 }, { "epoch": 0.711713548036418, "grad_norm": 1.2374861240386963, "learning_rate": 0.000911086764506047, "loss": 3.5836, "step": 10475 }, { "epoch": 0.7120532681070798, "grad_norm": 1.1741164922714233, "learning_rate": 0.0009110442994972143, "loss": 3.6599, "step": 10480 }, { "epoch": 0.7123929881777415, "grad_norm": 1.3277822732925415, "learning_rate": 0.0009110018344883815, "loss": 3.3841, "step": 10485 }, { "epoch": 0.7127327082484033, "grad_norm": 1.024092197418213, "learning_rate": 0.0009109593694795489, "loss": 3.7042, "step": 10490 }, { "epoch": 0.7130724283190651, "grad_norm": 1.6533769369125366, "learning_rate": 0.0009109169044707162, "loss": 3.7392, "step": 10495 }, { "epoch": 0.7134121483897269, "grad_norm": 1.08949875831604, "learning_rate": 0.0009108744394618834, "loss": 3.6925, "step": 10500 }, { "epoch": 0.7137518684603886, "grad_norm": 1.2491750717163086, "learning_rate": 0.0009108319744530507, "loss": 3.737, "step": 10505 }, { "epoch": 0.7140915885310504, "grad_norm": 0.9591490030288696, "learning_rate": 0.000910789509444218, "loss": 3.923, "step": 10510 }, { "epoch": 0.7144313086017122, "grad_norm": 0.9938195943832397, "learning_rate": 0.0009107470444353852, "loss": 3.5034, "step": 10515 }, { "epoch": 0.714771028672374, "grad_norm": 1.3975765705108643, "learning_rate": 0.0009107045794265524, "loss": 3.5981, "step": 10520 }, { "epoch": 0.7151107487430357, "grad_norm": 1.4399049282073975, "learning_rate": 0.0009106621144177199, "loss": 3.6655, "step": 10525 }, { "epoch": 0.7154504688136976, "grad_norm": 1.0683108568191528, "learning_rate": 0.0009106196494088871, "loss": 3.4655, "step": 10530 }, { "epoch": 0.7157901888843593, "grad_norm": 1.0297884941101074, "learning_rate": 0.0009105771844000543, "loss": 3.7349, "step": 10535 }, { "epoch": 0.716129908955021, "grad_norm": 1.2289235591888428, "learning_rate": 0.0009105347193912217, "loss": 3.7308, "step": 10540 }, { "epoch": 0.7164696290256828, "grad_norm": 1.190564513206482, "learning_rate": 0.0009104922543823889, "loss": 3.5774, "step": 10545 }, { "epoch": 0.7168093490963446, "grad_norm": 1.496804118156433, "learning_rate": 0.0009104497893735561, "loss": 3.8153, "step": 10550 }, { "epoch": 0.7171490691670064, "grad_norm": 0.9796428680419922, "learning_rate": 0.0009104073243647235, "loss": 3.7528, "step": 10555 }, { "epoch": 0.7174887892376681, "grad_norm": 0.9924182891845703, "learning_rate": 0.0009103648593558908, "loss": 3.7331, "step": 10560 }, { "epoch": 0.71782850930833, "grad_norm": 0.9254345297813416, "learning_rate": 0.000910322394347058, "loss": 3.7892, "step": 10565 }, { "epoch": 0.7181682293789917, "grad_norm": 1.233482837677002, "learning_rate": 0.0009102799293382254, "loss": 3.885, "step": 10570 }, { "epoch": 0.7185079494496535, "grad_norm": 1.2911405563354492, "learning_rate": 0.0009102374643293926, "loss": 3.6368, "step": 10575 }, { "epoch": 0.7188476695203153, "grad_norm": 1.2918637990951538, "learning_rate": 0.0009101949993205598, "loss": 3.6535, "step": 10580 }, { "epoch": 0.719187389590977, "grad_norm": 1.4980357885360718, "learning_rate": 0.0009101525343117271, "loss": 3.4943, "step": 10585 }, { "epoch": 0.7195271096616388, "grad_norm": 1.2384989261627197, "learning_rate": 0.0009101100693028944, "loss": 3.8323, "step": 10590 }, { "epoch": 0.7198668297323005, "grad_norm": 1.4167989492416382, "learning_rate": 0.0009100676042940617, "loss": 3.536, "step": 10595 }, { "epoch": 0.7202065498029624, "grad_norm": 1.0761891603469849, "learning_rate": 0.000910025139285229, "loss": 3.5663, "step": 10600 }, { "epoch": 0.7205462698736241, "grad_norm": 1.1872715950012207, "learning_rate": 0.0009099826742763963, "loss": 3.7554, "step": 10605 }, { "epoch": 0.7208859899442859, "grad_norm": 0.9410343766212463, "learning_rate": 0.0009099402092675636, "loss": 3.6847, "step": 10610 }, { "epoch": 0.7212257100149477, "grad_norm": 1.0036933422088623, "learning_rate": 0.0009098977442587308, "loss": 3.6585, "step": 10615 }, { "epoch": 0.7215654300856095, "grad_norm": 1.2760361433029175, "learning_rate": 0.000909855279249898, "loss": 3.6758, "step": 10620 }, { "epoch": 0.7219051501562712, "grad_norm": 1.112778663635254, "learning_rate": 0.0009098128142410654, "loss": 3.8047, "step": 10625 }, { "epoch": 0.722244870226933, "grad_norm": 0.9998484253883362, "learning_rate": 0.0009097703492322327, "loss": 3.7385, "step": 10630 }, { "epoch": 0.7225845902975948, "grad_norm": 1.0411229133605957, "learning_rate": 0.0009097278842233999, "loss": 3.6688, "step": 10635 }, { "epoch": 0.7229243103682566, "grad_norm": 1.0861897468566895, "learning_rate": 0.0009096854192145673, "loss": 3.6013, "step": 10640 }, { "epoch": 0.7232640304389183, "grad_norm": 1.4443111419677734, "learning_rate": 0.0009096429542057345, "loss": 3.6895, "step": 10645 }, { "epoch": 0.7236037505095801, "grad_norm": 1.5542433261871338, "learning_rate": 0.0009096004891969017, "loss": 3.5835, "step": 10650 }, { "epoch": 0.7239434705802419, "grad_norm": 1.1831473112106323, "learning_rate": 0.0009095580241880691, "loss": 3.6367, "step": 10655 }, { "epoch": 0.7242831906509036, "grad_norm": 1.1603373289108276, "learning_rate": 0.0009095155591792363, "loss": 3.3369, "step": 10660 }, { "epoch": 0.7246229107215655, "grad_norm": 1.0624864101409912, "learning_rate": 0.0009094730941704036, "loss": 3.7082, "step": 10665 }, { "epoch": 0.7249626307922272, "grad_norm": 1.1560646295547485, "learning_rate": 0.000909430629161571, "loss": 3.6991, "step": 10670 }, { "epoch": 0.725302350862889, "grad_norm": 1.2560880184173584, "learning_rate": 0.0009093881641527382, "loss": 3.782, "step": 10675 }, { "epoch": 0.7256420709335507, "grad_norm": 1.1399208307266235, "learning_rate": 0.0009093456991439054, "loss": 3.752, "step": 10680 }, { "epoch": 0.7259817910042126, "grad_norm": 1.7158286571502686, "learning_rate": 0.0009093032341350727, "loss": 3.6238, "step": 10685 }, { "epoch": 0.7263215110748743, "grad_norm": 1.063470482826233, "learning_rate": 0.00090926076912624, "loss": 3.7465, "step": 10690 }, { "epoch": 0.726661231145536, "grad_norm": 1.0131163597106934, "learning_rate": 0.0009092183041174072, "loss": 3.7272, "step": 10695 }, { "epoch": 0.7270009512161979, "grad_norm": 1.3888128995895386, "learning_rate": 0.0009091758391085746, "loss": 3.8233, "step": 10700 }, { "epoch": 0.7273406712868596, "grad_norm": 1.2252932786941528, "learning_rate": 0.0009091333740997419, "loss": 3.7958, "step": 10705 }, { "epoch": 0.7276803913575214, "grad_norm": 1.2386016845703125, "learning_rate": 0.0009090909090909091, "loss": 3.8987, "step": 10710 }, { "epoch": 0.7280201114281831, "grad_norm": 0.9655929207801819, "learning_rate": 0.0009090484440820764, "loss": 3.7963, "step": 10715 }, { "epoch": 0.728359831498845, "grad_norm": 1.361639142036438, "learning_rate": 0.0009090059790732437, "loss": 3.5227, "step": 10720 }, { "epoch": 0.7286995515695067, "grad_norm": 0.9149715900421143, "learning_rate": 0.0009089635140644109, "loss": 3.7274, "step": 10725 }, { "epoch": 0.7290392716401685, "grad_norm": 1.0652536153793335, "learning_rate": 0.0009089210490555782, "loss": 3.5193, "step": 10730 }, { "epoch": 0.7293789917108303, "grad_norm": 1.1093122959136963, "learning_rate": 0.0009088785840467455, "loss": 3.7856, "step": 10735 }, { "epoch": 0.7297187117814921, "grad_norm": 1.0428962707519531, "learning_rate": 0.0009088361190379128, "loss": 3.879, "step": 10740 }, { "epoch": 0.7300584318521538, "grad_norm": 1.1360379457473755, "learning_rate": 0.0009087936540290801, "loss": 3.5142, "step": 10745 }, { "epoch": 0.7303981519228157, "grad_norm": 1.4411098957061768, "learning_rate": 0.0009087511890202473, "loss": 3.7005, "step": 10750 }, { "epoch": 0.7307378719934774, "grad_norm": 1.176649808883667, "learning_rate": 0.0009087087240114146, "loss": 3.6262, "step": 10755 }, { "epoch": 0.7310775920641391, "grad_norm": 1.228650450706482, "learning_rate": 0.0009086662590025819, "loss": 3.8506, "step": 10760 }, { "epoch": 0.7314173121348009, "grad_norm": 1.2642097473144531, "learning_rate": 0.0009086237939937491, "loss": 3.7342, "step": 10765 }, { "epoch": 0.7317570322054627, "grad_norm": 1.1342964172363281, "learning_rate": 0.0009085813289849165, "loss": 3.572, "step": 10770 }, { "epoch": 0.7320967522761245, "grad_norm": 1.0598485469818115, "learning_rate": 0.0009085388639760838, "loss": 3.856, "step": 10775 }, { "epoch": 0.7324364723467862, "grad_norm": 1.0123907327651978, "learning_rate": 0.000908496398967251, "loss": 3.6587, "step": 10780 }, { "epoch": 0.7327761924174481, "grad_norm": 1.0982228517532349, "learning_rate": 0.0009084539339584182, "loss": 3.7848, "step": 10785 }, { "epoch": 0.7331159124881098, "grad_norm": 1.112412929534912, "learning_rate": 0.0009084114689495856, "loss": 3.5337, "step": 10790 }, { "epoch": 0.7334556325587716, "grad_norm": 1.3715084791183472, "learning_rate": 0.0009083690039407528, "loss": 3.6316, "step": 10795 }, { "epoch": 0.7337953526294333, "grad_norm": 1.2828166484832764, "learning_rate": 0.00090832653893192, "loss": 3.5702, "step": 10800 }, { "epoch": 0.7341350727000951, "grad_norm": 1.493672490119934, "learning_rate": 0.0009082840739230875, "loss": 3.7955, "step": 10805 }, { "epoch": 0.7344747927707569, "grad_norm": 1.1877272129058838, "learning_rate": 0.0009082416089142547, "loss": 3.7724, "step": 10810 }, { "epoch": 0.7348145128414186, "grad_norm": 1.343377947807312, "learning_rate": 0.0009081991439054219, "loss": 3.7382, "step": 10815 }, { "epoch": 0.7351542329120805, "grad_norm": 0.9453116655349731, "learning_rate": 0.0009081566788965893, "loss": 3.6337, "step": 10820 }, { "epoch": 0.7354939529827422, "grad_norm": 1.0724432468414307, "learning_rate": 0.0009081142138877565, "loss": 3.7748, "step": 10825 }, { "epoch": 0.735833673053404, "grad_norm": 1.2012907266616821, "learning_rate": 0.0009080717488789237, "loss": 3.6086, "step": 10830 }, { "epoch": 0.7361733931240658, "grad_norm": 1.1293063163757324, "learning_rate": 0.0009080292838700911, "loss": 3.7539, "step": 10835 }, { "epoch": 0.7365131131947276, "grad_norm": 1.1230089664459229, "learning_rate": 0.0009079868188612584, "loss": 3.5804, "step": 10840 }, { "epoch": 0.7368528332653893, "grad_norm": 1.2838736772537231, "learning_rate": 0.0009079443538524256, "loss": 3.5673, "step": 10845 }, { "epoch": 0.737192553336051, "grad_norm": 1.1381614208221436, "learning_rate": 0.0009079018888435929, "loss": 3.7498, "step": 10850 }, { "epoch": 0.7375322734067129, "grad_norm": 1.0910000801086426, "learning_rate": 0.0009078594238347602, "loss": 3.623, "step": 10855 }, { "epoch": 0.7378719934773746, "grad_norm": 1.0418381690979004, "learning_rate": 0.0009078169588259274, "loss": 3.7647, "step": 10860 }, { "epoch": 0.7382117135480364, "grad_norm": 0.9507369995117188, "learning_rate": 0.0009077744938170947, "loss": 3.828, "step": 10865 }, { "epoch": 0.7385514336186982, "grad_norm": 1.0548641681671143, "learning_rate": 0.000907732028808262, "loss": 3.6276, "step": 10870 }, { "epoch": 0.73889115368936, "grad_norm": 1.4218403100967407, "learning_rate": 0.0009076895637994293, "loss": 3.7077, "step": 10875 }, { "epoch": 0.7392308737600217, "grad_norm": 1.1129872798919678, "learning_rate": 0.0009076470987905966, "loss": 3.6882, "step": 10880 }, { "epoch": 0.7395705938306835, "grad_norm": 1.0615687370300293, "learning_rate": 0.0009076046337817638, "loss": 3.6767, "step": 10885 }, { "epoch": 0.7399103139013453, "grad_norm": 1.2654942274093628, "learning_rate": 0.0009075621687729311, "loss": 3.6052, "step": 10890 }, { "epoch": 0.7402500339720071, "grad_norm": 1.215562343597412, "learning_rate": 0.0009075197037640984, "loss": 3.6489, "step": 10895 }, { "epoch": 0.7405897540426688, "grad_norm": 1.131305456161499, "learning_rate": 0.0009074772387552656, "loss": 3.5813, "step": 10900 }, { "epoch": 0.7409294741133307, "grad_norm": 1.0731620788574219, "learning_rate": 0.000907434773746433, "loss": 3.7562, "step": 10905 }, { "epoch": 0.7412691941839924, "grad_norm": 1.0951396226882935, "learning_rate": 0.0009073923087376003, "loss": 3.7055, "step": 10910 }, { "epoch": 0.7416089142546541, "grad_norm": 0.9408296942710876, "learning_rate": 0.0009073498437287675, "loss": 3.9053, "step": 10915 }, { "epoch": 0.741948634325316, "grad_norm": 1.4325203895568848, "learning_rate": 0.0009073073787199347, "loss": 3.619, "step": 10920 }, { "epoch": 0.7422883543959777, "grad_norm": 1.2051990032196045, "learning_rate": 0.0009072649137111021, "loss": 3.6124, "step": 10925 }, { "epoch": 0.7426280744666395, "grad_norm": 1.486413598060608, "learning_rate": 0.0009072224487022693, "loss": 3.7454, "step": 10930 }, { "epoch": 0.7429677945373012, "grad_norm": 1.3523499965667725, "learning_rate": 0.0009071799836934365, "loss": 3.5866, "step": 10935 }, { "epoch": 0.7433075146079631, "grad_norm": 1.4687514305114746, "learning_rate": 0.000907137518684604, "loss": 3.6168, "step": 10940 }, { "epoch": 0.7436472346786248, "grad_norm": 1.0269639492034912, "learning_rate": 0.0009070950536757712, "loss": 3.5946, "step": 10945 }, { "epoch": 0.7439869547492866, "grad_norm": 1.0905097723007202, "learning_rate": 0.0009070525886669385, "loss": 4.055, "step": 10950 }, { "epoch": 0.7443266748199484, "grad_norm": 1.0651636123657227, "learning_rate": 0.0009070101236581058, "loss": 3.4904, "step": 10955 }, { "epoch": 0.7446663948906102, "grad_norm": 0.9466719031333923, "learning_rate": 0.000906967658649273, "loss": 3.5848, "step": 10960 }, { "epoch": 0.7450061149612719, "grad_norm": 1.105934977531433, "learning_rate": 0.0009069251936404403, "loss": 3.8375, "step": 10965 }, { "epoch": 0.7453458350319336, "grad_norm": 1.7780436277389526, "learning_rate": 0.0009068827286316075, "loss": 3.5217, "step": 10970 }, { "epoch": 0.7456855551025955, "grad_norm": 1.1351090669631958, "learning_rate": 0.0009068402636227749, "loss": 3.7455, "step": 10975 }, { "epoch": 0.7460252751732572, "grad_norm": 1.3616564273834229, "learning_rate": 0.0009067977986139422, "loss": 3.5539, "step": 10980 }, { "epoch": 0.746364995243919, "grad_norm": 1.1142597198486328, "learning_rate": 0.0009067553336051094, "loss": 3.74, "step": 10985 }, { "epoch": 0.7467047153145808, "grad_norm": 1.094972848892212, "learning_rate": 0.0009067128685962767, "loss": 3.5657, "step": 10990 }, { "epoch": 0.7470444353852426, "grad_norm": 0.9758894443511963, "learning_rate": 0.000906670403587444, "loss": 3.3883, "step": 10995 }, { "epoch": 0.7473841554559043, "grad_norm": 1.0405532121658325, "learning_rate": 0.0009066279385786112, "loss": 3.4657, "step": 11000 }, { "epoch": 0.7477238755265662, "grad_norm": 1.2233445644378662, "learning_rate": 0.0009065854735697785, "loss": 3.5723, "step": 11005 }, { "epoch": 0.7480635955972279, "grad_norm": 1.297682762145996, "learning_rate": 0.0009065430085609459, "loss": 3.6222, "step": 11010 }, { "epoch": 0.7484033156678896, "grad_norm": 1.1441024541854858, "learning_rate": 0.0009065005435521131, "loss": 3.3587, "step": 11015 }, { "epoch": 0.7487430357385514, "grad_norm": 1.1430257558822632, "learning_rate": 0.0009064580785432803, "loss": 3.7525, "step": 11020 }, { "epoch": 0.7490827558092132, "grad_norm": 1.0587873458862305, "learning_rate": 0.0009064156135344477, "loss": 3.6581, "step": 11025 }, { "epoch": 0.749422475879875, "grad_norm": 2.5622007846832275, "learning_rate": 0.0009063731485256149, "loss": 3.6758, "step": 11030 }, { "epoch": 0.7497621959505367, "grad_norm": 1.1237140893936157, "learning_rate": 0.0009063306835167821, "loss": 3.721, "step": 11035 }, { "epoch": 0.7501019160211986, "grad_norm": 1.0980969667434692, "learning_rate": 0.0009062882185079495, "loss": 3.8066, "step": 11040 }, { "epoch": 0.7504416360918603, "grad_norm": 0.8904997110366821, "learning_rate": 0.0009062457534991168, "loss": 3.7026, "step": 11045 }, { "epoch": 0.7507813561625221, "grad_norm": 1.2440863847732544, "learning_rate": 0.000906203288490284, "loss": 3.7098, "step": 11050 }, { "epoch": 0.7511210762331838, "grad_norm": 1.2513766288757324, "learning_rate": 0.0009061608234814514, "loss": 3.402, "step": 11055 }, { "epoch": 0.7514607963038457, "grad_norm": 1.1792038679122925, "learning_rate": 0.0009061183584726186, "loss": 3.7254, "step": 11060 }, { "epoch": 0.7518005163745074, "grad_norm": 1.2177377939224243, "learning_rate": 0.0009060758934637858, "loss": 3.8562, "step": 11065 }, { "epoch": 0.7521402364451691, "grad_norm": 1.455505609512329, "learning_rate": 0.0009060334284549531, "loss": 3.57, "step": 11070 }, { "epoch": 0.752479956515831, "grad_norm": 1.0231338739395142, "learning_rate": 0.0009059909634461204, "loss": 3.7087, "step": 11075 }, { "epoch": 0.7528196765864927, "grad_norm": 0.9604082703590393, "learning_rate": 0.0009059484984372877, "loss": 3.667, "step": 11080 }, { "epoch": 0.7531593966571545, "grad_norm": 1.4608066082000732, "learning_rate": 0.000905906033428455, "loss": 3.513, "step": 11085 }, { "epoch": 0.7534991167278163, "grad_norm": 1.5895240306854248, "learning_rate": 0.0009058635684196223, "loss": 3.6407, "step": 11090 }, { "epoch": 0.7538388367984781, "grad_norm": 0.9389051198959351, "learning_rate": 0.0009058211034107895, "loss": 3.7863, "step": 11095 }, { "epoch": 0.7541785568691398, "grad_norm": 1.0466972589492798, "learning_rate": 0.0009057786384019568, "loss": 3.5198, "step": 11100 }, { "epoch": 0.7545182769398016, "grad_norm": 0.9516034722328186, "learning_rate": 0.000905736173393124, "loss": 3.6855, "step": 11105 }, { "epoch": 0.7548579970104634, "grad_norm": 1.0485482215881348, "learning_rate": 0.0009056937083842913, "loss": 3.56, "step": 11110 }, { "epoch": 0.7551977170811252, "grad_norm": 1.1238075494766235, "learning_rate": 0.0009056512433754587, "loss": 3.6037, "step": 11115 }, { "epoch": 0.7555374371517869, "grad_norm": 1.1570730209350586, "learning_rate": 0.000905608778366626, "loss": 3.6547, "step": 11120 }, { "epoch": 0.7558771572224487, "grad_norm": 0.9960611462593079, "learning_rate": 0.0009055663133577932, "loss": 3.7893, "step": 11125 }, { "epoch": 0.7562168772931105, "grad_norm": 1.1161993741989136, "learning_rate": 0.0009055238483489605, "loss": 3.7324, "step": 11130 }, { "epoch": 0.7565565973637722, "grad_norm": 1.1473573446273804, "learning_rate": 0.0009054898763418943, "loss": 3.8671, "step": 11135 }, { "epoch": 0.756896317434434, "grad_norm": 0.9488974809646606, "learning_rate": 0.0009054474113330616, "loss": 3.7608, "step": 11140 }, { "epoch": 0.7572360375050958, "grad_norm": 1.3159146308898926, "learning_rate": 0.0009054049463242289, "loss": 3.4452, "step": 11145 }, { "epoch": 0.7575757575757576, "grad_norm": 1.3849284648895264, "learning_rate": 0.0009053624813153961, "loss": 3.9536, "step": 11150 }, { "epoch": 0.7579154776464193, "grad_norm": 1.4052455425262451, "learning_rate": 0.0009053200163065635, "loss": 3.71, "step": 11155 }, { "epoch": 0.7582551977170812, "grad_norm": 3.777299404144287, "learning_rate": 0.0009052775512977307, "loss": 3.6854, "step": 11160 }, { "epoch": 0.7585949177877429, "grad_norm": 1.4338997602462769, "learning_rate": 0.0009052350862888979, "loss": 3.459, "step": 11165 }, { "epoch": 0.7589346378584046, "grad_norm": 1.1901211738586426, "learning_rate": 0.0009051926212800653, "loss": 3.5148, "step": 11170 }, { "epoch": 0.7592743579290665, "grad_norm": 1.209290623664856, "learning_rate": 0.0009051501562712326, "loss": 3.6951, "step": 11175 }, { "epoch": 0.7596140779997282, "grad_norm": 1.7000969648361206, "learning_rate": 0.0009051076912623998, "loss": 3.4824, "step": 11180 }, { "epoch": 0.75995379807039, "grad_norm": 1.0550355911254883, "learning_rate": 0.0009050652262535671, "loss": 3.4544, "step": 11185 }, { "epoch": 0.7602935181410517, "grad_norm": 1.0165150165557861, "learning_rate": 0.0009050227612447344, "loss": 3.7255, "step": 11190 }, { "epoch": 0.7606332382117136, "grad_norm": 0.9583268165588379, "learning_rate": 0.0009049802962359016, "loss": 3.7432, "step": 11195 }, { "epoch": 0.7609729582823753, "grad_norm": 1.2435275316238403, "learning_rate": 0.0009049378312270689, "loss": 3.5758, "step": 11200 }, { "epoch": 0.7613126783530371, "grad_norm": 1.156388759613037, "learning_rate": 0.0009048953662182363, "loss": 3.7004, "step": 11205 }, { "epoch": 0.7616523984236989, "grad_norm": 1.2347761392593384, "learning_rate": 0.0009048529012094035, "loss": 3.6729, "step": 11210 }, { "epoch": 0.7619921184943607, "grad_norm": 1.1360732316970825, "learning_rate": 0.0009048104362005708, "loss": 3.5449, "step": 11215 }, { "epoch": 0.7623318385650224, "grad_norm": 1.069248914718628, "learning_rate": 0.000904767971191738, "loss": 3.7739, "step": 11220 }, { "epoch": 0.7626715586356841, "grad_norm": 1.0236073732376099, "learning_rate": 0.0009047255061829053, "loss": 3.5351, "step": 11225 }, { "epoch": 0.763011278706346, "grad_norm": 0.9823689460754395, "learning_rate": 0.0009046830411740726, "loss": 3.8237, "step": 11230 }, { "epoch": 0.7633509987770077, "grad_norm": 1.170551061630249, "learning_rate": 0.0009046405761652398, "loss": 3.7444, "step": 11235 }, { "epoch": 0.7636907188476695, "grad_norm": 1.1160615682601929, "learning_rate": 0.0009045981111564072, "loss": 3.6882, "step": 11240 }, { "epoch": 0.7640304389183313, "grad_norm": 1.163201093673706, "learning_rate": 0.0009045556461475745, "loss": 3.3551, "step": 11245 }, { "epoch": 0.7643701589889931, "grad_norm": 1.1081149578094482, "learning_rate": 0.0009045131811387417, "loss": 3.5626, "step": 11250 }, { "epoch": 0.7647098790596548, "grad_norm": 1.0091444253921509, "learning_rate": 0.0009044707161299089, "loss": 3.8894, "step": 11255 }, { "epoch": 0.7650495991303167, "grad_norm": 0.9822831749916077, "learning_rate": 0.0009044282511210763, "loss": 3.762, "step": 11260 }, { "epoch": 0.7653893192009784, "grad_norm": 1.0057791471481323, "learning_rate": 0.0009043857861122435, "loss": 3.6533, "step": 11265 }, { "epoch": 0.7657290392716402, "grad_norm": 1.4051861763000488, "learning_rate": 0.0009043433211034107, "loss": 3.7396, "step": 11270 }, { "epoch": 0.7660687593423019, "grad_norm": 1.374118447303772, "learning_rate": 0.0009043008560945782, "loss": 3.7212, "step": 11275 }, { "epoch": 0.7664084794129638, "grad_norm": 1.2433252334594727, "learning_rate": 0.0009042583910857454, "loss": 3.6493, "step": 11280 }, { "epoch": 0.7667481994836255, "grad_norm": 1.2564690113067627, "learning_rate": 0.0009042159260769126, "loss": 3.5556, "step": 11285 }, { "epoch": 0.7670879195542872, "grad_norm": 1.0543134212493896, "learning_rate": 0.00090417346106808, "loss": 3.7001, "step": 11290 }, { "epoch": 0.7674276396249491, "grad_norm": 1.0506154298782349, "learning_rate": 0.0009041309960592472, "loss": 3.5031, "step": 11295 }, { "epoch": 0.7677673596956108, "grad_norm": 0.8771756291389465, "learning_rate": 0.0009040885310504144, "loss": 3.6515, "step": 11300 }, { "epoch": 0.7681070797662726, "grad_norm": 1.185825228691101, "learning_rate": 0.0009040460660415817, "loss": 3.5576, "step": 11305 }, { "epoch": 0.7684467998369343, "grad_norm": 1.2985180616378784, "learning_rate": 0.0009040036010327491, "loss": 3.7517, "step": 11310 }, { "epoch": 0.7687865199075962, "grad_norm": 1.3406376838684082, "learning_rate": 0.0009039611360239163, "loss": 3.6085, "step": 11315 }, { "epoch": 0.7691262399782579, "grad_norm": 1.1287217140197754, "learning_rate": 0.0009039186710150836, "loss": 3.8081, "step": 11320 }, { "epoch": 0.7694659600489197, "grad_norm": 1.2906699180603027, "learning_rate": 0.0009038762060062509, "loss": 3.5695, "step": 11325 }, { "epoch": 0.7698056801195815, "grad_norm": 1.3657374382019043, "learning_rate": 0.0009038337409974181, "loss": 3.5478, "step": 11330 }, { "epoch": 0.7701454001902432, "grad_norm": 1.7384371757507324, "learning_rate": 0.0009037912759885854, "loss": 3.4622, "step": 11335 }, { "epoch": 0.770485120260905, "grad_norm": 1.2641595602035522, "learning_rate": 0.0009037488109797527, "loss": 3.3506, "step": 11340 }, { "epoch": 0.7708248403315668, "grad_norm": 1.3427014350891113, "learning_rate": 0.00090370634597092, "loss": 3.7192, "step": 11345 }, { "epoch": 0.7711645604022286, "grad_norm": 0.957493782043457, "learning_rate": 0.0009036638809620873, "loss": 3.6279, "step": 11350 }, { "epoch": 0.7715042804728903, "grad_norm": 1.4218937158584595, "learning_rate": 0.0009036214159532545, "loss": 3.5062, "step": 11355 }, { "epoch": 0.7718440005435521, "grad_norm": 1.3693535327911377, "learning_rate": 0.0009035789509444218, "loss": 3.9516, "step": 11360 }, { "epoch": 0.7721837206142139, "grad_norm": 0.8733910918235779, "learning_rate": 0.0009035364859355891, "loss": 3.7283, "step": 11365 }, { "epoch": 0.7725234406848757, "grad_norm": 1.1885744333267212, "learning_rate": 0.0009034940209267563, "loss": 3.8657, "step": 11370 }, { "epoch": 0.7728631607555374, "grad_norm": 1.5532022714614868, "learning_rate": 0.0009034515559179236, "loss": 3.567, "step": 11375 }, { "epoch": 0.7732028808261993, "grad_norm": 1.2669931650161743, "learning_rate": 0.000903409090909091, "loss": 3.4909, "step": 11380 }, { "epoch": 0.773542600896861, "grad_norm": 1.2827540636062622, "learning_rate": 0.0009033666259002582, "loss": 3.7015, "step": 11385 }, { "epoch": 0.7738823209675227, "grad_norm": 1.302189826965332, "learning_rate": 0.0009033241608914255, "loss": 3.8404, "step": 11390 }, { "epoch": 0.7742220410381845, "grad_norm": 1.437527060508728, "learning_rate": 0.0009032816958825928, "loss": 3.7112, "step": 11395 }, { "epoch": 0.7745617611088463, "grad_norm": 1.0676789283752441, "learning_rate": 0.00090323923087376, "loss": 3.6074, "step": 11400 }, { "epoch": 0.7749014811795081, "grad_norm": 1.1182674169540405, "learning_rate": 0.0009031967658649272, "loss": 3.7564, "step": 11405 }, { "epoch": 0.7752412012501698, "grad_norm": 1.059097170829773, "learning_rate": 0.0009031543008560946, "loss": 3.9363, "step": 11410 }, { "epoch": 0.7755809213208317, "grad_norm": 1.5001918077468872, "learning_rate": 0.0009031118358472619, "loss": 3.7363, "step": 11415 }, { "epoch": 0.7759206413914934, "grad_norm": 1.5209766626358032, "learning_rate": 0.0009030693708384291, "loss": 3.8811, "step": 11420 }, { "epoch": 0.7762603614621552, "grad_norm": 1.3660088777542114, "learning_rate": 0.0009030269058295965, "loss": 3.3746, "step": 11425 }, { "epoch": 0.776600081532817, "grad_norm": 1.2472984790802002, "learning_rate": 0.0009029844408207637, "loss": 3.407, "step": 11430 }, { "epoch": 0.7769398016034788, "grad_norm": 1.1346116065979004, "learning_rate": 0.0009029419758119309, "loss": 3.5955, "step": 11435 }, { "epoch": 0.7772795216741405, "grad_norm": 1.200441837310791, "learning_rate": 0.0009028995108030983, "loss": 3.7032, "step": 11440 }, { "epoch": 0.7776192417448022, "grad_norm": 1.1271647214889526, "learning_rate": 0.0009028570457942655, "loss": 3.4746, "step": 11445 }, { "epoch": 0.7779589618154641, "grad_norm": 1.4868184328079224, "learning_rate": 0.0009028145807854328, "loss": 3.6765, "step": 11450 }, { "epoch": 0.7782986818861258, "grad_norm": 1.2059701681137085, "learning_rate": 0.0009027721157766001, "loss": 3.8665, "step": 11455 }, { "epoch": 0.7786384019567876, "grad_norm": 0.9802495837211609, "learning_rate": 0.0009027296507677674, "loss": 3.4315, "step": 11460 }, { "epoch": 0.7789781220274494, "grad_norm": 1.0098767280578613, "learning_rate": 0.0009026871857589346, "loss": 3.8444, "step": 11465 }, { "epoch": 0.7793178420981112, "grad_norm": 1.1583572626113892, "learning_rate": 0.0009026447207501019, "loss": 3.6527, "step": 11470 }, { "epoch": 0.7796575621687729, "grad_norm": 1.183027744293213, "learning_rate": 0.0009026022557412692, "loss": 3.7534, "step": 11475 }, { "epoch": 0.7799972822394347, "grad_norm": 1.1439937353134155, "learning_rate": 0.0009025597907324364, "loss": 3.657, "step": 11480 }, { "epoch": 0.7803370023100965, "grad_norm": 1.029974102973938, "learning_rate": 0.0009025173257236038, "loss": 3.5886, "step": 11485 }, { "epoch": 0.7806767223807582, "grad_norm": 1.4456583261489868, "learning_rate": 0.0009024748607147711, "loss": 3.5779, "step": 11490 }, { "epoch": 0.78101644245142, "grad_norm": 1.0656052827835083, "learning_rate": 0.0009024323957059384, "loss": 3.7103, "step": 11495 }, { "epoch": 0.7813561625220818, "grad_norm": 1.2906185388565063, "learning_rate": 0.0009023899306971056, "loss": 3.31, "step": 11500 }, { "epoch": 0.7816958825927436, "grad_norm": 1.031654715538025, "learning_rate": 0.0009023474656882728, "loss": 3.5758, "step": 11505 }, { "epoch": 0.7820356026634053, "grad_norm": 1.0310009717941284, "learning_rate": 0.0009023050006794402, "loss": 3.8984, "step": 11510 }, { "epoch": 0.7823753227340672, "grad_norm": 1.118891716003418, "learning_rate": 0.0009022625356706074, "loss": 3.6931, "step": 11515 }, { "epoch": 0.7827150428047289, "grad_norm": 1.1372493505477905, "learning_rate": 0.0009022200706617747, "loss": 3.7807, "step": 11520 }, { "epoch": 0.7830547628753907, "grad_norm": 1.1583126783370972, "learning_rate": 0.0009021776056529421, "loss": 3.9112, "step": 11525 }, { "epoch": 0.7833944829460524, "grad_norm": 1.1455066204071045, "learning_rate": 0.0009021351406441093, "loss": 3.5995, "step": 11530 }, { "epoch": 0.7837342030167143, "grad_norm": 1.181458830833435, "learning_rate": 0.0009020926756352765, "loss": 3.4789, "step": 11535 }, { "epoch": 0.784073923087376, "grad_norm": 0.976072371006012, "learning_rate": 0.0009020502106264439, "loss": 3.8698, "step": 11540 }, { "epoch": 0.7844136431580377, "grad_norm": 1.1924517154693604, "learning_rate": 0.0009020077456176111, "loss": 3.6725, "step": 11545 }, { "epoch": 0.7847533632286996, "grad_norm": 6.294796943664551, "learning_rate": 0.0009019652806087783, "loss": 3.7489, "step": 11550 }, { "epoch": 0.7850930832993613, "grad_norm": 1.0683611631393433, "learning_rate": 0.0009019228155999458, "loss": 3.6468, "step": 11555 }, { "epoch": 0.7854328033700231, "grad_norm": 1.1752569675445557, "learning_rate": 0.000901880350591113, "loss": 3.8018, "step": 11560 }, { "epoch": 0.7857725234406848, "grad_norm": 1.5400934219360352, "learning_rate": 0.0009018378855822802, "loss": 3.6147, "step": 11565 }, { "epoch": 0.7861122435113467, "grad_norm": 1.5856702327728271, "learning_rate": 0.0009017954205734475, "loss": 3.4662, "step": 11570 }, { "epoch": 0.7864519635820084, "grad_norm": 2.1892056465148926, "learning_rate": 0.0009017529555646148, "loss": 3.5819, "step": 11575 }, { "epoch": 0.7867916836526702, "grad_norm": 1.232287049293518, "learning_rate": 0.000901710490555782, "loss": 3.9787, "step": 11580 }, { "epoch": 0.787131403723332, "grad_norm": 1.0638782978057861, "learning_rate": 0.0009016680255469494, "loss": 3.6003, "step": 11585 }, { "epoch": 0.7874711237939938, "grad_norm": 0.9167444705963135, "learning_rate": 0.0009016255605381167, "loss": 3.6511, "step": 11590 }, { "epoch": 0.7878108438646555, "grad_norm": 1.1392573118209839, "learning_rate": 0.0009015830955292839, "loss": 3.601, "step": 11595 }, { "epoch": 0.7881505639353173, "grad_norm": 1.05461585521698, "learning_rate": 0.0009015406305204512, "loss": 3.5533, "step": 11600 }, { "epoch": 0.7884902840059791, "grad_norm": 1.374056100845337, "learning_rate": 0.0009014981655116184, "loss": 3.8827, "step": 11605 }, { "epoch": 0.7888300040766408, "grad_norm": 1.2914804220199585, "learning_rate": 0.0009014557005027857, "loss": 3.8969, "step": 11610 }, { "epoch": 0.7891697241473026, "grad_norm": 1.26808762550354, "learning_rate": 0.000901413235493953, "loss": 3.4839, "step": 11615 }, { "epoch": 0.7895094442179644, "grad_norm": 1.2681502103805542, "learning_rate": 0.0009013707704851203, "loss": 3.5781, "step": 11620 }, { "epoch": 0.7898491642886262, "grad_norm": 1.1382930278778076, "learning_rate": 0.0009013283054762876, "loss": 3.555, "step": 11625 }, { "epoch": 0.7901888843592879, "grad_norm": 1.214998483657837, "learning_rate": 0.0009012858404674549, "loss": 3.6632, "step": 11630 }, { "epoch": 0.7905286044299498, "grad_norm": 1.164186716079712, "learning_rate": 0.0009012433754586221, "loss": 3.6151, "step": 11635 }, { "epoch": 0.7908683245006115, "grad_norm": 1.0948787927627563, "learning_rate": 0.0009012009104497893, "loss": 3.8284, "step": 11640 }, { "epoch": 0.7912080445712733, "grad_norm": 1.154451608657837, "learning_rate": 0.0009011584454409567, "loss": 3.4241, "step": 11645 }, { "epoch": 0.791547764641935, "grad_norm": 1.137039065361023, "learning_rate": 0.0009011159804321239, "loss": 3.6905, "step": 11650 }, { "epoch": 0.7918874847125968, "grad_norm": 1.4206236600875854, "learning_rate": 0.0009010735154232912, "loss": 3.7211, "step": 11655 }, { "epoch": 0.7922272047832586, "grad_norm": 1.3741389513015747, "learning_rate": 0.0009010310504144586, "loss": 3.757, "step": 11660 }, { "epoch": 0.7925669248539203, "grad_norm": 1.3395718336105347, "learning_rate": 0.0009009885854056258, "loss": 3.6065, "step": 11665 }, { "epoch": 0.7929066449245822, "grad_norm": 1.1754305362701416, "learning_rate": 0.000900946120396793, "loss": 3.6222, "step": 11670 }, { "epoch": 0.7932463649952439, "grad_norm": 1.1503872871398926, "learning_rate": 0.0009009036553879604, "loss": 3.6391, "step": 11675 }, { "epoch": 0.7935860850659057, "grad_norm": 1.0430324077606201, "learning_rate": 0.0009008611903791276, "loss": 3.7079, "step": 11680 }, { "epoch": 0.7939258051365675, "grad_norm": 1.2230489253997803, "learning_rate": 0.0009008187253702948, "loss": 3.9934, "step": 11685 }, { "epoch": 0.7942655252072293, "grad_norm": 1.1371499300003052, "learning_rate": 0.0009007762603614623, "loss": 3.4855, "step": 11690 }, { "epoch": 0.794605245277891, "grad_norm": 1.1212841272354126, "learning_rate": 0.0009007337953526295, "loss": 3.7412, "step": 11695 }, { "epoch": 0.7949449653485527, "grad_norm": 1.0659664869308472, "learning_rate": 0.0009006913303437967, "loss": 3.6605, "step": 11700 }, { "epoch": 0.7952846854192146, "grad_norm": 1.6921460628509521, "learning_rate": 0.000900648865334964, "loss": 3.4822, "step": 11705 }, { "epoch": 0.7956244054898763, "grad_norm": 1.050740122795105, "learning_rate": 0.0009006064003261313, "loss": 3.458, "step": 11710 }, { "epoch": 0.7959641255605381, "grad_norm": 0.9140154719352722, "learning_rate": 0.0009005639353172985, "loss": 3.7847, "step": 11715 }, { "epoch": 0.7963038456311999, "grad_norm": 1.1320194005966187, "learning_rate": 0.0009005214703084658, "loss": 3.758, "step": 11720 }, { "epoch": 0.7966435657018617, "grad_norm": 1.0618723630905151, "learning_rate": 0.0009004790052996332, "loss": 3.6459, "step": 11725 }, { "epoch": 0.7969832857725234, "grad_norm": 1.1878490447998047, "learning_rate": 0.0009004365402908004, "loss": 3.7719, "step": 11730 }, { "epoch": 0.7973230058431852, "grad_norm": 1.2015661001205444, "learning_rate": 0.0009003940752819677, "loss": 3.6658, "step": 11735 }, { "epoch": 0.797662725913847, "grad_norm": 1.1709516048431396, "learning_rate": 0.000900351610273135, "loss": 3.4725, "step": 11740 }, { "epoch": 0.7980024459845088, "grad_norm": 1.2089287042617798, "learning_rate": 0.0009003091452643022, "loss": 3.6459, "step": 11745 }, { "epoch": 0.7983421660551705, "grad_norm": 1.4979184865951538, "learning_rate": 0.0009002666802554695, "loss": 3.7923, "step": 11750 }, { "epoch": 0.7986818861258324, "grad_norm": 1.088310718536377, "learning_rate": 0.0009002242152466367, "loss": 3.3264, "step": 11755 }, { "epoch": 0.7990216061964941, "grad_norm": 1.5467205047607422, "learning_rate": 0.0009001817502378041, "loss": 3.6988, "step": 11760 }, { "epoch": 0.7993613262671558, "grad_norm": 1.2216379642486572, "learning_rate": 0.0009001392852289714, "loss": 3.7061, "step": 11765 }, { "epoch": 0.7997010463378177, "grad_norm": 0.8465067744255066, "learning_rate": 0.0009000968202201386, "loss": 3.7354, "step": 11770 }, { "epoch": 0.8000407664084794, "grad_norm": 0.8909204602241516, "learning_rate": 0.0009000543552113059, "loss": 3.5501, "step": 11775 }, { "epoch": 0.8003804864791412, "grad_norm": 1.0386770963668823, "learning_rate": 0.0009000118902024732, "loss": 3.8725, "step": 11780 }, { "epoch": 0.8007202065498029, "grad_norm": 1.2358222007751465, "learning_rate": 0.0008999694251936404, "loss": 3.8545, "step": 11785 }, { "epoch": 0.8010599266204648, "grad_norm": 1.0000734329223633, "learning_rate": 0.0008999269601848076, "loss": 3.375, "step": 11790 }, { "epoch": 0.8013996466911265, "grad_norm": 1.1553330421447754, "learning_rate": 0.0008998844951759751, "loss": 3.7099, "step": 11795 }, { "epoch": 0.8017393667617883, "grad_norm": 1.3177491426467896, "learning_rate": 0.0008998420301671423, "loss": 3.8077, "step": 11800 }, { "epoch": 0.8020790868324501, "grad_norm": 1.28020441532135, "learning_rate": 0.0008997995651583095, "loss": 3.9512, "step": 11805 }, { "epoch": 0.8024188069031118, "grad_norm": 1.3557357788085938, "learning_rate": 0.0008997571001494769, "loss": 3.446, "step": 11810 }, { "epoch": 0.8027585269737736, "grad_norm": 1.5354830026626587, "learning_rate": 0.0008997146351406441, "loss": 3.5577, "step": 11815 }, { "epoch": 0.8030982470444353, "grad_norm": 1.260514259338379, "learning_rate": 0.0008996721701318113, "loss": 3.6492, "step": 11820 }, { "epoch": 0.8034379671150972, "grad_norm": 1.1649789810180664, "learning_rate": 0.0008996297051229787, "loss": 3.8224, "step": 11825 }, { "epoch": 0.8037776871857589, "grad_norm": 1.0528123378753662, "learning_rate": 0.000899587240114146, "loss": 3.7359, "step": 11830 }, { "epoch": 0.8041174072564207, "grad_norm": 1.0877418518066406, "learning_rate": 0.0008995447751053133, "loss": 3.759, "step": 11835 }, { "epoch": 0.8044571273270825, "grad_norm": 1.714828610420227, "learning_rate": 0.0008995023100964806, "loss": 3.843, "step": 11840 }, { "epoch": 0.8047968473977443, "grad_norm": 1.171982765197754, "learning_rate": 0.0008994598450876478, "loss": 3.545, "step": 11845 }, { "epoch": 0.805136567468406, "grad_norm": 1.4654988050460815, "learning_rate": 0.0008994173800788151, "loss": 3.7823, "step": 11850 }, { "epoch": 0.8054762875390679, "grad_norm": 1.835706353187561, "learning_rate": 0.0008993749150699823, "loss": 3.5186, "step": 11855 }, { "epoch": 0.8058160076097296, "grad_norm": 1.116430401802063, "learning_rate": 0.0008993324500611496, "loss": 3.4327, "step": 11860 }, { "epoch": 0.8061557276803913, "grad_norm": 1.8852900266647339, "learning_rate": 0.000899289985052317, "loss": 3.5834, "step": 11865 }, { "epoch": 0.8064954477510531, "grad_norm": 1.0114552974700928, "learning_rate": 0.0008992475200434842, "loss": 3.7512, "step": 11870 }, { "epoch": 0.8068351678217149, "grad_norm": 1.0331611633300781, "learning_rate": 0.0008992050550346515, "loss": 3.6951, "step": 11875 }, { "epoch": 0.8071748878923767, "grad_norm": 1.2131614685058594, "learning_rate": 0.0008991625900258188, "loss": 3.203, "step": 11880 }, { "epoch": 0.8075146079630384, "grad_norm": 1.179734468460083, "learning_rate": 0.000899120125016986, "loss": 3.6719, "step": 11885 }, { "epoch": 0.8078543280337003, "grad_norm": 1.2663074731826782, "learning_rate": 0.0008990776600081532, "loss": 3.406, "step": 11890 }, { "epoch": 0.808194048104362, "grad_norm": 1.2273640632629395, "learning_rate": 0.0008990351949993206, "loss": 3.6905, "step": 11895 }, { "epoch": 0.8085337681750238, "grad_norm": 1.2446808815002441, "learning_rate": 0.0008989927299904879, "loss": 3.4703, "step": 11900 }, { "epoch": 0.8088734882456855, "grad_norm": 1.1523271799087524, "learning_rate": 0.0008989502649816551, "loss": 3.6058, "step": 11905 }, { "epoch": 0.8092132083163474, "grad_norm": 1.0853782892227173, "learning_rate": 0.0008989077999728225, "loss": 3.7639, "step": 11910 }, { "epoch": 0.8095529283870091, "grad_norm": 1.0518195629119873, "learning_rate": 0.0008988653349639897, "loss": 3.7958, "step": 11915 }, { "epoch": 0.8098926484576708, "grad_norm": 0.999330461025238, "learning_rate": 0.0008988228699551569, "loss": 3.4468, "step": 11920 }, { "epoch": 0.8102323685283327, "grad_norm": 0.9755830764770508, "learning_rate": 0.0008987804049463243, "loss": 3.6796, "step": 11925 }, { "epoch": 0.8105720885989944, "grad_norm": 1.1613025665283203, "learning_rate": 0.0008987379399374915, "loss": 3.71, "step": 11930 }, { "epoch": 0.8109118086696562, "grad_norm": 1.031726360321045, "learning_rate": 0.0008986954749286588, "loss": 3.7403, "step": 11935 }, { "epoch": 0.811251528740318, "grad_norm": 1.0026038885116577, "learning_rate": 0.0008986530099198262, "loss": 3.4966, "step": 11940 }, { "epoch": 0.8115912488109798, "grad_norm": 1.0221785306930542, "learning_rate": 0.0008986105449109934, "loss": 3.4663, "step": 11945 }, { "epoch": 0.8119309688816415, "grad_norm": 1.1437405347824097, "learning_rate": 0.0008985680799021606, "loss": 3.7429, "step": 11950 }, { "epoch": 0.8122706889523033, "grad_norm": 1.6888322830200195, "learning_rate": 0.0008985256148933279, "loss": 3.589, "step": 11955 }, { "epoch": 0.8126104090229651, "grad_norm": 1.1150739192962646, "learning_rate": 0.0008984831498844952, "loss": 3.6065, "step": 11960 }, { "epoch": 0.8129501290936268, "grad_norm": 1.0133496522903442, "learning_rate": 0.0008984406848756624, "loss": 3.6135, "step": 11965 }, { "epoch": 0.8132898491642886, "grad_norm": 1.3311457633972168, "learning_rate": 0.0008983982198668298, "loss": 3.6642, "step": 11970 }, { "epoch": 0.8136295692349504, "grad_norm": 1.7036324739456177, "learning_rate": 0.0008983557548579971, "loss": 3.8544, "step": 11975 }, { "epoch": 0.8139692893056122, "grad_norm": 1.42259681224823, "learning_rate": 0.0008983132898491643, "loss": 3.527, "step": 11980 }, { "epoch": 0.8143090093762739, "grad_norm": 1.1011102199554443, "learning_rate": 0.0008982708248403316, "loss": 3.5452, "step": 11985 }, { "epoch": 0.8146487294469357, "grad_norm": 1.3816139698028564, "learning_rate": 0.0008982283598314988, "loss": 3.8322, "step": 11990 }, { "epoch": 0.8149884495175975, "grad_norm": 1.505685806274414, "learning_rate": 0.0008981858948226661, "loss": 3.4262, "step": 11995 }, { "epoch": 0.8153281695882593, "grad_norm": 1.2864488363265991, "learning_rate": 0.0008981434298138334, "loss": 3.6338, "step": 12000 }, { "epoch": 0.815667889658921, "grad_norm": 1.651896357536316, "learning_rate": 0.0008981009648050007, "loss": 3.7356, "step": 12005 }, { "epoch": 0.8160076097295829, "grad_norm": 1.2832982540130615, "learning_rate": 0.000898058499796168, "loss": 3.7208, "step": 12010 }, { "epoch": 0.8163473298002446, "grad_norm": 1.1180880069732666, "learning_rate": 0.0008980160347873353, "loss": 3.8344, "step": 12015 }, { "epoch": 0.8166870498709063, "grad_norm": 1.3047735691070557, "learning_rate": 0.0008979735697785025, "loss": 3.7085, "step": 12020 }, { "epoch": 0.8170267699415682, "grad_norm": 1.1425451040267944, "learning_rate": 0.0008979311047696698, "loss": 3.2514, "step": 12025 }, { "epoch": 0.8173664900122299, "grad_norm": 1.04619562625885, "learning_rate": 0.0008978886397608371, "loss": 3.6627, "step": 12030 }, { "epoch": 0.8177062100828917, "grad_norm": 1.3751226663589478, "learning_rate": 0.0008978461747520043, "loss": 3.5794, "step": 12035 }, { "epoch": 0.8180459301535534, "grad_norm": 1.149665117263794, "learning_rate": 0.0008978037097431716, "loss": 3.5457, "step": 12040 }, { "epoch": 0.8183856502242153, "grad_norm": 0.8531579971313477, "learning_rate": 0.000897761244734339, "loss": 3.9709, "step": 12045 }, { "epoch": 0.818725370294877, "grad_norm": 1.0238009691238403, "learning_rate": 0.0008977187797255062, "loss": 3.6765, "step": 12050 }, { "epoch": 0.8190650903655388, "grad_norm": 1.4629586935043335, "learning_rate": 0.0008976763147166734, "loss": 3.6946, "step": 12055 }, { "epoch": 0.8194048104362006, "grad_norm": 1.1626982688903809, "learning_rate": 0.0008976338497078408, "loss": 3.5791, "step": 12060 }, { "epoch": 0.8197445305068624, "grad_norm": 1.0790706872940063, "learning_rate": 0.000897591384699008, "loss": 3.7631, "step": 12065 }, { "epoch": 0.8200842505775241, "grad_norm": 1.457220435142517, "learning_rate": 0.0008975489196901752, "loss": 3.6158, "step": 12070 }, { "epoch": 0.8204239706481858, "grad_norm": 1.033413052558899, "learning_rate": 0.0008975064546813427, "loss": 3.8045, "step": 12075 }, { "epoch": 0.8207636907188477, "grad_norm": 1.3364896774291992, "learning_rate": 0.0008974639896725099, "loss": 3.4783, "step": 12080 }, { "epoch": 0.8211034107895094, "grad_norm": 1.1903765201568604, "learning_rate": 0.0008974215246636771, "loss": 3.7042, "step": 12085 }, { "epoch": 0.8214431308601712, "grad_norm": 1.352070927619934, "learning_rate": 0.0008973790596548444, "loss": 3.827, "step": 12090 }, { "epoch": 0.821782850930833, "grad_norm": 1.2908453941345215, "learning_rate": 0.0008973365946460117, "loss": 3.7921, "step": 12095 }, { "epoch": 0.8221225710014948, "grad_norm": 1.159242868423462, "learning_rate": 0.0008972941296371789, "loss": 3.6424, "step": 12100 }, { "epoch": 0.8224622910721565, "grad_norm": 1.2542734146118164, "learning_rate": 0.0008972516646283462, "loss": 3.5652, "step": 12105 }, { "epoch": 0.8228020111428184, "grad_norm": 1.3505280017852783, "learning_rate": 0.0008972091996195136, "loss": 3.7214, "step": 12110 }, { "epoch": 0.8231417312134801, "grad_norm": 1.0873758792877197, "learning_rate": 0.0008971667346106808, "loss": 3.6992, "step": 12115 }, { "epoch": 0.8234814512841419, "grad_norm": 1.1823632717132568, "learning_rate": 0.0008971242696018481, "loss": 3.671, "step": 12120 }, { "epoch": 0.8238211713548036, "grad_norm": 1.4073837995529175, "learning_rate": 0.0008970818045930154, "loss": 3.6831, "step": 12125 }, { "epoch": 0.8241608914254654, "grad_norm": 1.043290615081787, "learning_rate": 0.0008970393395841826, "loss": 3.9021, "step": 12130 }, { "epoch": 0.8245006114961272, "grad_norm": 1.3074363470077515, "learning_rate": 0.0008969968745753499, "loss": 3.4886, "step": 12135 }, { "epoch": 0.8248403315667889, "grad_norm": 1.0418975353240967, "learning_rate": 0.0008969544095665171, "loss": 3.7175, "step": 12140 }, { "epoch": 0.8251800516374508, "grad_norm": 1.1429251432418823, "learning_rate": 0.0008969119445576845, "loss": 3.6962, "step": 12145 }, { "epoch": 0.8255197717081125, "grad_norm": 1.1894124746322632, "learning_rate": 0.0008968694795488518, "loss": 3.5519, "step": 12150 }, { "epoch": 0.8258594917787743, "grad_norm": 1.0639863014221191, "learning_rate": 0.000896827014540019, "loss": 3.7388, "step": 12155 }, { "epoch": 0.826199211849436, "grad_norm": 1.2773538827896118, "learning_rate": 0.0008967845495311863, "loss": 3.6239, "step": 12160 }, { "epoch": 0.8265389319200979, "grad_norm": 1.2101491689682007, "learning_rate": 0.0008967420845223536, "loss": 3.8025, "step": 12165 }, { "epoch": 0.8268786519907596, "grad_norm": 1.2863247394561768, "learning_rate": 0.0008966996195135208, "loss": 3.63, "step": 12170 }, { "epoch": 0.8272183720614213, "grad_norm": 1.881058931350708, "learning_rate": 0.0008966571545046883, "loss": 3.5098, "step": 12175 }, { "epoch": 0.8275580921320832, "grad_norm": 0.9339013695716858, "learning_rate": 0.0008966146894958555, "loss": 3.7374, "step": 12180 }, { "epoch": 0.8278978122027449, "grad_norm": 0.9513421058654785, "learning_rate": 0.0008965722244870227, "loss": 3.6023, "step": 12185 }, { "epoch": 0.8282375322734067, "grad_norm": 4.480744361877441, "learning_rate": 0.00089652975947819, "loss": 3.8386, "step": 12190 }, { "epoch": 0.8285772523440685, "grad_norm": 1.0155706405639648, "learning_rate": 0.0008964872944693573, "loss": 3.8652, "step": 12195 }, { "epoch": 0.8289169724147303, "grad_norm": 1.0726414918899536, "learning_rate": 0.0008964448294605245, "loss": 3.8566, "step": 12200 }, { "epoch": 0.829256692485392, "grad_norm": 1.1810272932052612, "learning_rate": 0.0008964023644516918, "loss": 3.8189, "step": 12205 }, { "epoch": 0.8295964125560538, "grad_norm": 1.2424088716506958, "learning_rate": 0.0008963598994428592, "loss": 3.5868, "step": 12210 }, { "epoch": 0.8299361326267156, "grad_norm": 1.4758234024047852, "learning_rate": 0.0008963174344340264, "loss": 3.6621, "step": 12215 }, { "epoch": 0.8302758526973774, "grad_norm": 1.1068686246871948, "learning_rate": 0.0008962749694251937, "loss": 3.5791, "step": 12220 }, { "epoch": 0.8306155727680391, "grad_norm": 1.219010591506958, "learning_rate": 0.000896232504416361, "loss": 3.7824, "step": 12225 }, { "epoch": 0.830955292838701, "grad_norm": 1.1266810894012451, "learning_rate": 0.0008961900394075282, "loss": 3.5189, "step": 12230 }, { "epoch": 0.8312950129093627, "grad_norm": 1.1213878393173218, "learning_rate": 0.0008961475743986955, "loss": 3.1798, "step": 12235 }, { "epoch": 0.8316347329800244, "grad_norm": 1.2301719188690186, "learning_rate": 0.0008961051093898627, "loss": 3.4854, "step": 12240 }, { "epoch": 0.8319744530506862, "grad_norm": 1.2507743835449219, "learning_rate": 0.0008960626443810301, "loss": 3.3038, "step": 12245 }, { "epoch": 0.832314173121348, "grad_norm": 1.090584635734558, "learning_rate": 0.0008960201793721974, "loss": 3.6015, "step": 12250 }, { "epoch": 0.8326538931920098, "grad_norm": 1.4034544229507446, "learning_rate": 0.0008959777143633646, "loss": 3.4664, "step": 12255 }, { "epoch": 0.8329936132626715, "grad_norm": 1.3960731029510498, "learning_rate": 0.0008959352493545319, "loss": 3.4978, "step": 12260 }, { "epoch": 0.8333333333333334, "grad_norm": 1.407705307006836, "learning_rate": 0.0008958927843456992, "loss": 3.7568, "step": 12265 }, { "epoch": 0.8336730534039951, "grad_norm": 1.7534234523773193, "learning_rate": 0.0008958503193368664, "loss": 3.2736, "step": 12270 }, { "epoch": 0.8340127734746569, "grad_norm": 1.0351641178131104, "learning_rate": 0.0008958078543280336, "loss": 3.7956, "step": 12275 }, { "epoch": 0.8343524935453187, "grad_norm": 1.2052944898605347, "learning_rate": 0.0008957653893192011, "loss": 3.7198, "step": 12280 }, { "epoch": 0.8346922136159804, "grad_norm": 1.3373260498046875, "learning_rate": 0.0008957229243103683, "loss": 3.5866, "step": 12285 }, { "epoch": 0.8350319336866422, "grad_norm": 1.3843950033187866, "learning_rate": 0.0008956804593015355, "loss": 3.6361, "step": 12290 }, { "epoch": 0.8353716537573039, "grad_norm": 1.1788839101791382, "learning_rate": 0.0008956379942927029, "loss": 3.5835, "step": 12295 }, { "epoch": 0.8357113738279658, "grad_norm": 1.275902509689331, "learning_rate": 0.0008955955292838701, "loss": 3.7037, "step": 12300 }, { "epoch": 0.8360510938986275, "grad_norm": 1.7554689645767212, "learning_rate": 0.0008955530642750373, "loss": 3.3768, "step": 12305 }, { "epoch": 0.8363908139692893, "grad_norm": 1.3911511898040771, "learning_rate": 0.0008955105992662047, "loss": 3.8064, "step": 12310 }, { "epoch": 0.8367305340399511, "grad_norm": 1.3447402715682983, "learning_rate": 0.000895468134257372, "loss": 3.52, "step": 12315 }, { "epoch": 0.8370702541106129, "grad_norm": 1.2758293151855469, "learning_rate": 0.0008954256692485392, "loss": 3.722, "step": 12320 }, { "epoch": 0.8374099741812746, "grad_norm": 1.3055059909820557, "learning_rate": 0.0008953832042397066, "loss": 3.7898, "step": 12325 }, { "epoch": 0.8377496942519363, "grad_norm": 1.1381572484970093, "learning_rate": 0.0008953407392308738, "loss": 3.2759, "step": 12330 }, { "epoch": 0.8380894143225982, "grad_norm": 1.133855938911438, "learning_rate": 0.000895298274222041, "loss": 3.8759, "step": 12335 }, { "epoch": 0.83842913439326, "grad_norm": 1.3946386575698853, "learning_rate": 0.0008952558092132083, "loss": 3.6972, "step": 12340 }, { "epoch": 0.8387688544639217, "grad_norm": 1.4684547185897827, "learning_rate": 0.0008952133442043756, "loss": 3.5801, "step": 12345 }, { "epoch": 0.8391085745345835, "grad_norm": 1.1729063987731934, "learning_rate": 0.0008951708791955429, "loss": 3.6384, "step": 12350 }, { "epoch": 0.8394482946052453, "grad_norm": 1.088158369064331, "learning_rate": 0.0008951284141867102, "loss": 3.6508, "step": 12355 }, { "epoch": 0.839788014675907, "grad_norm": 1.2438381910324097, "learning_rate": 0.0008950859491778775, "loss": 3.6865, "step": 12360 }, { "epoch": 0.8401277347465689, "grad_norm": 0.9452694058418274, "learning_rate": 0.0008950434841690447, "loss": 3.944, "step": 12365 }, { "epoch": 0.8404674548172306, "grad_norm": 1.4279100894927979, "learning_rate": 0.000895001019160212, "loss": 3.7712, "step": 12370 }, { "epoch": 0.8408071748878924, "grad_norm": 1.3461556434631348, "learning_rate": 0.0008949585541513792, "loss": 3.6239, "step": 12375 }, { "epoch": 0.8411468949585541, "grad_norm": 1.3405888080596924, "learning_rate": 0.0008949160891425465, "loss": 3.6307, "step": 12380 }, { "epoch": 0.841486615029216, "grad_norm": 1.3551428318023682, "learning_rate": 0.0008948736241337139, "loss": 3.8136, "step": 12385 }, { "epoch": 0.8418263350998777, "grad_norm": 1.2316255569458008, "learning_rate": 0.0008948311591248811, "loss": 3.4757, "step": 12390 }, { "epoch": 0.8421660551705394, "grad_norm": 1.3667089939117432, "learning_rate": 0.0008947886941160484, "loss": 3.5087, "step": 12395 }, { "epoch": 0.8425057752412013, "grad_norm": 1.3739694356918335, "learning_rate": 0.0008947462291072157, "loss": 3.7598, "step": 12400 }, { "epoch": 0.842845495311863, "grad_norm": 1.124772548675537, "learning_rate": 0.0008947037640983829, "loss": 3.6536, "step": 12405 }, { "epoch": 0.8431852153825248, "grad_norm": 1.3089420795440674, "learning_rate": 0.0008946612990895502, "loss": 3.8624, "step": 12410 }, { "epoch": 0.8435249354531865, "grad_norm": 1.2774535417556763, "learning_rate": 0.0008946188340807175, "loss": 3.7888, "step": 12415 }, { "epoch": 0.8438646555238484, "grad_norm": 1.2385011911392212, "learning_rate": 0.0008945763690718848, "loss": 3.7376, "step": 12420 }, { "epoch": 0.8442043755945101, "grad_norm": 1.2377432584762573, "learning_rate": 0.000894533904063052, "loss": 3.8954, "step": 12425 }, { "epoch": 0.8445440956651719, "grad_norm": 0.9972906112670898, "learning_rate": 0.0008944914390542194, "loss": 3.666, "step": 12430 }, { "epoch": 0.8448838157358337, "grad_norm": 1.300369381904602, "learning_rate": 0.0008944489740453866, "loss": 3.8193, "step": 12435 }, { "epoch": 0.8452235358064955, "grad_norm": 1.4704227447509766, "learning_rate": 0.0008944065090365538, "loss": 3.6417, "step": 12440 }, { "epoch": 0.8455632558771572, "grad_norm": 1.1746008396148682, "learning_rate": 0.0008943640440277212, "loss": 3.5627, "step": 12445 }, { "epoch": 0.845902975947819, "grad_norm": 1.0745973587036133, "learning_rate": 0.0008943215790188884, "loss": 3.6329, "step": 12450 }, { "epoch": 0.8462426960184808, "grad_norm": 1.4410079717636108, "learning_rate": 0.0008942791140100557, "loss": 3.961, "step": 12455 }, { "epoch": 0.8465824160891425, "grad_norm": 1.4388024806976318, "learning_rate": 0.0008942366490012231, "loss": 3.6227, "step": 12460 }, { "epoch": 0.8469221361598043, "grad_norm": 0.9857900738716125, "learning_rate": 0.0008941941839923903, "loss": 3.743, "step": 12465 }, { "epoch": 0.8472618562304661, "grad_norm": 1.2936960458755493, "learning_rate": 0.0008941517189835575, "loss": 3.5265, "step": 12470 }, { "epoch": 0.8476015763011279, "grad_norm": 1.0882163047790527, "learning_rate": 0.0008941092539747249, "loss": 3.2802, "step": 12475 }, { "epoch": 0.8479412963717896, "grad_norm": 2.534053087234497, "learning_rate": 0.0008940667889658921, "loss": 3.5367, "step": 12480 }, { "epoch": 0.8482810164424515, "grad_norm": 1.2128846645355225, "learning_rate": 0.0008940243239570593, "loss": 3.6401, "step": 12485 }, { "epoch": 0.8486207365131132, "grad_norm": 1.6494146585464478, "learning_rate": 0.0008939818589482267, "loss": 3.7005, "step": 12490 }, { "epoch": 0.848960456583775, "grad_norm": 1.1653285026550293, "learning_rate": 0.000893939393939394, "loss": 3.75, "step": 12495 }, { "epoch": 0.8493001766544367, "grad_norm": 1.4918458461761475, "learning_rate": 0.0008938969289305612, "loss": 3.7211, "step": 12500 }, { "epoch": 0.8496398967250985, "grad_norm": 3.1092147827148438, "learning_rate": 0.0008938544639217285, "loss": 3.6465, "step": 12505 }, { "epoch": 0.8499796167957603, "grad_norm": 1.3978471755981445, "learning_rate": 0.0008938119989128958, "loss": 3.5174, "step": 12510 }, { "epoch": 0.850319336866422, "grad_norm": 1.0521824359893799, "learning_rate": 0.0008937695339040631, "loss": 3.4441, "step": 12515 }, { "epoch": 0.8506590569370839, "grad_norm": 1.1784002780914307, "learning_rate": 0.0008937270688952303, "loss": 3.6529, "step": 12520 }, { "epoch": 0.8509987770077456, "grad_norm": 1.2923048734664917, "learning_rate": 0.0008936846038863977, "loss": 3.7022, "step": 12525 }, { "epoch": 0.8513384970784074, "grad_norm": 1.3110612630844116, "learning_rate": 0.000893642138877565, "loss": 3.7291, "step": 12530 }, { "epoch": 0.8516782171490692, "grad_norm": 1.2511200904846191, "learning_rate": 0.0008935996738687322, "loss": 3.9638, "step": 12535 }, { "epoch": 0.852017937219731, "grad_norm": 1.5386048555374146, "learning_rate": 0.0008935572088598994, "loss": 3.7674, "step": 12540 }, { "epoch": 0.8523576572903927, "grad_norm": 1.1822712421417236, "learning_rate": 0.0008935147438510668, "loss": 3.762, "step": 12545 }, { "epoch": 0.8526973773610544, "grad_norm": 1.8834359645843506, "learning_rate": 0.000893472278842234, "loss": 3.8003, "step": 12550 }, { "epoch": 0.8530370974317163, "grad_norm": 1.4483500719070435, "learning_rate": 0.0008934298138334012, "loss": 3.7105, "step": 12555 }, { "epoch": 0.853376817502378, "grad_norm": 4.198465824127197, "learning_rate": 0.0008933873488245687, "loss": 3.4827, "step": 12560 }, { "epoch": 0.8537165375730398, "grad_norm": 0.9812795519828796, "learning_rate": 0.0008933448838157359, "loss": 3.879, "step": 12565 }, { "epoch": 0.8540562576437016, "grad_norm": 1.4161157608032227, "learning_rate": 0.0008933024188069031, "loss": 3.5344, "step": 12570 }, { "epoch": 0.8543959777143634, "grad_norm": 1.1501537561416626, "learning_rate": 0.0008932599537980705, "loss": 3.6879, "step": 12575 }, { "epoch": 0.8547356977850251, "grad_norm": 1.234977126121521, "learning_rate": 0.0008932174887892377, "loss": 3.6222, "step": 12580 }, { "epoch": 0.8550754178556869, "grad_norm": 1.4975793361663818, "learning_rate": 0.0008931750237804049, "loss": 3.4273, "step": 12585 }, { "epoch": 0.8554151379263487, "grad_norm": 1.1529535055160522, "learning_rate": 0.0008931325587715722, "loss": 3.7406, "step": 12590 }, { "epoch": 0.8557548579970105, "grad_norm": 1.2134901285171509, "learning_rate": 0.0008930900937627396, "loss": 3.8463, "step": 12595 }, { "epoch": 0.8560945780676722, "grad_norm": 1.2289810180664062, "learning_rate": 0.0008930476287539068, "loss": 3.7711, "step": 12600 }, { "epoch": 0.856434298138334, "grad_norm": 1.4885609149932861, "learning_rate": 0.0008930051637450741, "loss": 3.4467, "step": 12605 }, { "epoch": 0.8567740182089958, "grad_norm": 1.0229580402374268, "learning_rate": 0.0008929626987362414, "loss": 3.684, "step": 12610 }, { "epoch": 0.8571137382796575, "grad_norm": 1.165162205696106, "learning_rate": 0.0008929202337274086, "loss": 3.5517, "step": 12615 }, { "epoch": 0.8574534583503194, "grad_norm": 1.4150593280792236, "learning_rate": 0.0008928777687185759, "loss": 4.05, "step": 12620 }, { "epoch": 0.8577931784209811, "grad_norm": 1.3052473068237305, "learning_rate": 0.0008928353037097431, "loss": 3.8075, "step": 12625 }, { "epoch": 0.8581328984916429, "grad_norm": 1.3084776401519775, "learning_rate": 0.0008927928387009105, "loss": 3.8121, "step": 12630 }, { "epoch": 0.8584726185623046, "grad_norm": 1.535833716392517, "learning_rate": 0.0008927503736920778, "loss": 3.507, "step": 12635 }, { "epoch": 0.8588123386329665, "grad_norm": 1.1342695951461792, "learning_rate": 0.000892707908683245, "loss": 3.7965, "step": 12640 }, { "epoch": 0.8591520587036282, "grad_norm": 1.8402591943740845, "learning_rate": 0.0008926654436744123, "loss": 3.7744, "step": 12645 }, { "epoch": 0.85949177877429, "grad_norm": 1.2753657102584839, "learning_rate": 0.0008926229786655796, "loss": 3.6983, "step": 12650 }, { "epoch": 0.8598314988449518, "grad_norm": 1.5267066955566406, "learning_rate": 0.0008925805136567468, "loss": 3.6273, "step": 12655 }, { "epoch": 0.8601712189156135, "grad_norm": 1.183228850364685, "learning_rate": 0.000892538048647914, "loss": 3.5839, "step": 12660 }, { "epoch": 0.8605109389862753, "grad_norm": 1.1666234731674194, "learning_rate": 0.0008924955836390815, "loss": 3.5802, "step": 12665 }, { "epoch": 0.860850659056937, "grad_norm": 1.1797183752059937, "learning_rate": 0.0008924531186302487, "loss": 3.6586, "step": 12670 }, { "epoch": 0.8611903791275989, "grad_norm": 0.9152116775512695, "learning_rate": 0.0008924106536214159, "loss": 3.7455, "step": 12675 }, { "epoch": 0.8615300991982606, "grad_norm": 1.4733765125274658, "learning_rate": 0.0008923681886125833, "loss": 3.7121, "step": 12680 }, { "epoch": 0.8618698192689224, "grad_norm": 1.250320315361023, "learning_rate": 0.0008923257236037505, "loss": 3.7596, "step": 12685 }, { "epoch": 0.8622095393395842, "grad_norm": 1.4928174018859863, "learning_rate": 0.0008922832585949177, "loss": 3.4468, "step": 12690 }, { "epoch": 0.862549259410246, "grad_norm": 1.151477336883545, "learning_rate": 0.0008922407935860851, "loss": 3.7067, "step": 12695 }, { "epoch": 0.8628889794809077, "grad_norm": 1.4225906133651733, "learning_rate": 0.0008921983285772524, "loss": 3.7475, "step": 12700 }, { "epoch": 0.8632286995515696, "grad_norm": 1.1005563735961914, "learning_rate": 0.0008921558635684196, "loss": 3.3809, "step": 12705 }, { "epoch": 0.8635684196222313, "grad_norm": 1.2098838090896606, "learning_rate": 0.000892113398559587, "loss": 3.4664, "step": 12710 }, { "epoch": 0.863908139692893, "grad_norm": 1.4967461824417114, "learning_rate": 0.0008920709335507542, "loss": 3.7751, "step": 12715 }, { "epoch": 0.8642478597635548, "grad_norm": 1.049377202987671, "learning_rate": 0.0008920284685419214, "loss": 3.7853, "step": 12720 }, { "epoch": 0.8645875798342166, "grad_norm": 1.678743839263916, "learning_rate": 0.0008919860035330887, "loss": 3.8215, "step": 12725 }, { "epoch": 0.8649272999048784, "grad_norm": 1.0466912984848022, "learning_rate": 0.000891943538524256, "loss": 3.627, "step": 12730 }, { "epoch": 0.8652670199755401, "grad_norm": 1.4080239534378052, "learning_rate": 0.0008919010735154233, "loss": 3.9817, "step": 12735 }, { "epoch": 0.865606740046202, "grad_norm": 1.426977515220642, "learning_rate": 0.0008918586085065906, "loss": 3.8138, "step": 12740 }, { "epoch": 0.8659464601168637, "grad_norm": 1.3621857166290283, "learning_rate": 0.0008918161434977579, "loss": 3.7092, "step": 12745 }, { "epoch": 0.8662861801875255, "grad_norm": 0.99385666847229, "learning_rate": 0.0008917736784889251, "loss": 3.7738, "step": 12750 }, { "epoch": 0.8666259002581872, "grad_norm": 1.1342391967773438, "learning_rate": 0.0008917312134800924, "loss": 3.5699, "step": 12755 }, { "epoch": 0.866965620328849, "grad_norm": 1.3680689334869385, "learning_rate": 0.0008916887484712597, "loss": 3.5647, "step": 12760 }, { "epoch": 0.8673053403995108, "grad_norm": 1.5714354515075684, "learning_rate": 0.0008916462834624269, "loss": 3.7557, "step": 12765 }, { "epoch": 0.8676450604701725, "grad_norm": 1.2160687446594238, "learning_rate": 0.0008916038184535943, "loss": 3.7495, "step": 12770 }, { "epoch": 0.8679847805408344, "grad_norm": 1.1242257356643677, "learning_rate": 0.0008915613534447615, "loss": 3.7988, "step": 12775 }, { "epoch": 0.8683245006114961, "grad_norm": 1.2212510108947754, "learning_rate": 0.0008915188884359288, "loss": 3.6071, "step": 12780 }, { "epoch": 0.8686642206821579, "grad_norm": 1.2275527715682983, "learning_rate": 0.0008914764234270961, "loss": 3.874, "step": 12785 }, { "epoch": 0.8690039407528197, "grad_norm": 1.2301522493362427, "learning_rate": 0.0008914339584182633, "loss": 3.5918, "step": 12790 }, { "epoch": 0.8693436608234815, "grad_norm": 0.9253072142601013, "learning_rate": 0.0008913914934094306, "loss": 3.7323, "step": 12795 }, { "epoch": 0.8696833808941432, "grad_norm": 1.173310399055481, "learning_rate": 0.000891349028400598, "loss": 3.8709, "step": 12800 }, { "epoch": 0.870023100964805, "grad_norm": 1.1781901121139526, "learning_rate": 0.0008913065633917652, "loss": 3.8443, "step": 12805 }, { "epoch": 0.8703628210354668, "grad_norm": 1.1630043983459473, "learning_rate": 0.0008912640983829325, "loss": 3.8499, "step": 12810 }, { "epoch": 0.8707025411061285, "grad_norm": 1.2723264694213867, "learning_rate": 0.0008912216333740998, "loss": 3.7293, "step": 12815 }, { "epoch": 0.8710422611767903, "grad_norm": 1.0234392881393433, "learning_rate": 0.000891179168365267, "loss": 3.7194, "step": 12820 }, { "epoch": 0.8713819812474521, "grad_norm": 1.5475430488586426, "learning_rate": 0.0008911367033564342, "loss": 3.8409, "step": 12825 }, { "epoch": 0.8717217013181139, "grad_norm": 0.9632904529571533, "learning_rate": 0.0008910942383476016, "loss": 3.5998, "step": 12830 }, { "epoch": 0.8720614213887756, "grad_norm": 0.9839092493057251, "learning_rate": 0.0008910517733387689, "loss": 3.4936, "step": 12835 }, { "epoch": 0.8724011414594374, "grad_norm": 1.2259682416915894, "learning_rate": 0.0008910093083299361, "loss": 3.629, "step": 12840 }, { "epoch": 0.8727408615300992, "grad_norm": 1.0871951580047607, "learning_rate": 0.0008909668433211035, "loss": 3.5218, "step": 12845 }, { "epoch": 0.873080581600761, "grad_norm": 1.1955701112747192, "learning_rate": 0.0008909243783122707, "loss": 4.0311, "step": 12850 }, { "epoch": 0.8734203016714227, "grad_norm": 1.1380928754806519, "learning_rate": 0.000890881913303438, "loss": 3.7757, "step": 12855 }, { "epoch": 0.8737600217420846, "grad_norm": 1.1296710968017578, "learning_rate": 0.0008908394482946053, "loss": 3.5914, "step": 12860 }, { "epoch": 0.8740997418127463, "grad_norm": 1.009884238243103, "learning_rate": 0.0008907969832857725, "loss": 3.5438, "step": 12865 }, { "epoch": 0.874439461883408, "grad_norm": 2.0458288192749023, "learning_rate": 0.0008907545182769399, "loss": 3.4137, "step": 12870 }, { "epoch": 0.8747791819540699, "grad_norm": 1.1804662942886353, "learning_rate": 0.0008907120532681071, "loss": 3.7433, "step": 12875 }, { "epoch": 0.8751189020247316, "grad_norm": 1.2473260164260864, "learning_rate": 0.0008906695882592744, "loss": 3.3751, "step": 12880 }, { "epoch": 0.8754586220953934, "grad_norm": 1.1269844770431519, "learning_rate": 0.0008906271232504417, "loss": 3.7194, "step": 12885 }, { "epoch": 0.8757983421660551, "grad_norm": 0.9917231798171997, "learning_rate": 0.0008905846582416089, "loss": 3.5491, "step": 12890 }, { "epoch": 0.876138062236717, "grad_norm": 1.2913542985916138, "learning_rate": 0.0008905421932327762, "loss": 3.7256, "step": 12895 }, { "epoch": 0.8764777823073787, "grad_norm": 1.100599765777588, "learning_rate": 0.0008904997282239435, "loss": 3.9103, "step": 12900 }, { "epoch": 0.8768175023780405, "grad_norm": 1.230551838874817, "learning_rate": 0.0008904572632151108, "loss": 3.6173, "step": 12905 }, { "epoch": 0.8771572224487023, "grad_norm": 1.4434441328048706, "learning_rate": 0.0008904147982062781, "loss": 3.6656, "step": 12910 }, { "epoch": 0.877496942519364, "grad_norm": 1.1212341785430908, "learning_rate": 0.0008903723331974454, "loss": 3.3941, "step": 12915 }, { "epoch": 0.8778366625900258, "grad_norm": 1.2975363731384277, "learning_rate": 0.0008903298681886126, "loss": 3.6307, "step": 12920 }, { "epoch": 0.8781763826606876, "grad_norm": 1.2344962358474731, "learning_rate": 0.0008902874031797798, "loss": 3.436, "step": 12925 }, { "epoch": 0.8785161027313494, "grad_norm": 1.2863240242004395, "learning_rate": 0.0008902449381709472, "loss": 3.6702, "step": 12930 }, { "epoch": 0.8788558228020111, "grad_norm": 1.2049676179885864, "learning_rate": 0.0008902024731621144, "loss": 3.6225, "step": 12935 }, { "epoch": 0.8791955428726729, "grad_norm": 1.3980114459991455, "learning_rate": 0.0008901600081532817, "loss": 3.6646, "step": 12940 }, { "epoch": 0.8795352629433347, "grad_norm": 1.0302512645721436, "learning_rate": 0.0008901175431444491, "loss": 3.6624, "step": 12945 }, { "epoch": 0.8798749830139965, "grad_norm": 1.1877480745315552, "learning_rate": 0.0008900750781356163, "loss": 3.3526, "step": 12950 }, { "epoch": 0.8802147030846582, "grad_norm": 1.3566126823425293, "learning_rate": 0.0008900326131267835, "loss": 3.8146, "step": 12955 }, { "epoch": 0.8805544231553201, "grad_norm": 1.1876009702682495, "learning_rate": 0.0008899901481179509, "loss": 3.6768, "step": 12960 }, { "epoch": 0.8808941432259818, "grad_norm": 1.377869963645935, "learning_rate": 0.0008899476831091181, "loss": 3.7432, "step": 12965 }, { "epoch": 0.8812338632966435, "grad_norm": 1.13657546043396, "learning_rate": 0.0008899052181002853, "loss": 3.7078, "step": 12970 }, { "epoch": 0.8815735833673053, "grad_norm": 1.3983463048934937, "learning_rate": 0.0008898627530914528, "loss": 3.6875, "step": 12975 }, { "epoch": 0.8819133034379671, "grad_norm": 1.1038823127746582, "learning_rate": 0.00088982028808262, "loss": 3.8163, "step": 12980 }, { "epoch": 0.8822530235086289, "grad_norm": 1.3287203311920166, "learning_rate": 0.0008897778230737872, "loss": 3.6074, "step": 12985 }, { "epoch": 0.8825927435792906, "grad_norm": 1.6350245475769043, "learning_rate": 0.0008897353580649545, "loss": 3.8015, "step": 12990 }, { "epoch": 0.8829324636499525, "grad_norm": 1.4514131546020508, "learning_rate": 0.0008896928930561218, "loss": 3.718, "step": 12995 }, { "epoch": 0.8832721837206142, "grad_norm": 1.1473720073699951, "learning_rate": 0.000889650428047289, "loss": 3.6998, "step": 13000 }, { "epoch": 0.883611903791276, "grad_norm": 1.6112507581710815, "learning_rate": 0.0008896079630384563, "loss": 3.7509, "step": 13005 }, { "epoch": 0.8839516238619378, "grad_norm": 1.0363117456436157, "learning_rate": 0.0008895654980296237, "loss": 3.4649, "step": 13010 }, { "epoch": 0.8842913439325996, "grad_norm": 1.0311036109924316, "learning_rate": 0.0008895230330207909, "loss": 3.4908, "step": 13015 }, { "epoch": 0.8846310640032613, "grad_norm": 1.0112276077270508, "learning_rate": 0.0008894805680119582, "loss": 3.54, "step": 13020 }, { "epoch": 0.884970784073923, "grad_norm": 1.540605902671814, "learning_rate": 0.0008894381030031254, "loss": 3.5243, "step": 13025 }, { "epoch": 0.8853105041445849, "grad_norm": 1.3251091241836548, "learning_rate": 0.0008893956379942927, "loss": 3.5593, "step": 13030 }, { "epoch": 0.8856502242152466, "grad_norm": 1.3720605373382568, "learning_rate": 0.00088935317298546, "loss": 3.6262, "step": 13035 }, { "epoch": 0.8859899442859084, "grad_norm": 1.2837045192718506, "learning_rate": 0.0008893107079766272, "loss": 3.5479, "step": 13040 }, { "epoch": 0.8863296643565702, "grad_norm": 0.982765793800354, "learning_rate": 0.0008892682429677946, "loss": 3.5963, "step": 13045 }, { "epoch": 0.886669384427232, "grad_norm": 1.3103806972503662, "learning_rate": 0.0008892257779589619, "loss": 3.7859, "step": 13050 }, { "epoch": 0.8870091044978937, "grad_norm": 1.1575360298156738, "learning_rate": 0.0008891833129501291, "loss": 3.6168, "step": 13055 }, { "epoch": 0.8873488245685555, "grad_norm": 1.3809927701950073, "learning_rate": 0.0008891408479412963, "loss": 3.5766, "step": 13060 }, { "epoch": 0.8876885446392173, "grad_norm": 0.9957054853439331, "learning_rate": 0.0008890983829324637, "loss": 3.6627, "step": 13065 }, { "epoch": 0.8880282647098791, "grad_norm": 1.5863523483276367, "learning_rate": 0.0008890559179236309, "loss": 3.5573, "step": 13070 }, { "epoch": 0.8883679847805408, "grad_norm": 1.2722114324569702, "learning_rate": 0.0008890134529147981, "loss": 3.688, "step": 13075 }, { "epoch": 0.8887077048512027, "grad_norm": 1.0695710182189941, "learning_rate": 0.0008889709879059656, "loss": 3.663, "step": 13080 }, { "epoch": 0.8890474249218644, "grad_norm": 1.1886130571365356, "learning_rate": 0.0008889285228971328, "loss": 3.61, "step": 13085 }, { "epoch": 0.8893871449925261, "grad_norm": 1.2556360960006714, "learning_rate": 0.0008888860578883, "loss": 3.6924, "step": 13090 }, { "epoch": 0.889726865063188, "grad_norm": 1.2313467264175415, "learning_rate": 0.0008888435928794674, "loss": 3.6888, "step": 13095 }, { "epoch": 0.8900665851338497, "grad_norm": 1.0707939863204956, "learning_rate": 0.0008888011278706346, "loss": 3.6317, "step": 13100 }, { "epoch": 0.8904063052045115, "grad_norm": 1.2580441236495972, "learning_rate": 0.0008887586628618018, "loss": 3.6393, "step": 13105 }, { "epoch": 0.8907460252751732, "grad_norm": 1.5443764925003052, "learning_rate": 0.0008887161978529691, "loss": 3.7101, "step": 13110 }, { "epoch": 0.8910857453458351, "grad_norm": 1.2367218732833862, "learning_rate": 0.0008886737328441365, "loss": 3.6059, "step": 13115 }, { "epoch": 0.8914254654164968, "grad_norm": 1.1286455392837524, "learning_rate": 0.0008886312678353037, "loss": 3.8082, "step": 13120 }, { "epoch": 0.8917651854871586, "grad_norm": 1.1404178142547607, "learning_rate": 0.000888588802826471, "loss": 3.4538, "step": 13125 }, { "epoch": 0.8921049055578204, "grad_norm": 1.577135682106018, "learning_rate": 0.0008885463378176383, "loss": 3.4512, "step": 13130 }, { "epoch": 0.8924446256284821, "grad_norm": 1.5482406616210938, "learning_rate": 0.0008885038728088055, "loss": 3.5439, "step": 13135 }, { "epoch": 0.8927843456991439, "grad_norm": 1.041913628578186, "learning_rate": 0.0008884614077999728, "loss": 3.6827, "step": 13140 }, { "epoch": 0.8931240657698056, "grad_norm": 1.7212363481521606, "learning_rate": 0.0008884189427911401, "loss": 3.8079, "step": 13145 }, { "epoch": 0.8934637858404675, "grad_norm": 1.3451628684997559, "learning_rate": 0.0008883764777823074, "loss": 3.6175, "step": 13150 }, { "epoch": 0.8938035059111292, "grad_norm": 1.0903377532958984, "learning_rate": 0.0008883340127734747, "loss": 3.7707, "step": 13155 }, { "epoch": 0.894143225981791, "grad_norm": 1.2344369888305664, "learning_rate": 0.000888291547764642, "loss": 3.3744, "step": 13160 }, { "epoch": 0.8944829460524528, "grad_norm": 1.7297823429107666, "learning_rate": 0.0008882490827558092, "loss": 3.4307, "step": 13165 }, { "epoch": 0.8948226661231146, "grad_norm": 1.1313594579696655, "learning_rate": 0.0008882066177469765, "loss": 3.4969, "step": 13170 }, { "epoch": 0.8951623861937763, "grad_norm": 0.8365592360496521, "learning_rate": 0.0008881641527381437, "loss": 3.7596, "step": 13175 }, { "epoch": 0.8955021062644382, "grad_norm": 2.082674980163574, "learning_rate": 0.000888121687729311, "loss": 3.654, "step": 13180 }, { "epoch": 0.8958418263350999, "grad_norm": 1.3660578727722168, "learning_rate": 0.0008880792227204784, "loss": 3.9335, "step": 13185 }, { "epoch": 0.8961815464057616, "grad_norm": 2.176542043685913, "learning_rate": 0.0008880367577116456, "loss": 3.6357, "step": 13190 }, { "epoch": 0.8965212664764234, "grad_norm": 1.2505117654800415, "learning_rate": 0.000887994292702813, "loss": 3.7514, "step": 13195 }, { "epoch": 0.8968609865470852, "grad_norm": 1.7506145238876343, "learning_rate": 0.0008879518276939802, "loss": 3.6177, "step": 13200 }, { "epoch": 0.897200706617747, "grad_norm": 2.38708233833313, "learning_rate": 0.0008879093626851474, "loss": 3.5015, "step": 13205 }, { "epoch": 0.8975404266884087, "grad_norm": 1.3204840421676636, "learning_rate": 0.0008878668976763148, "loss": 3.5947, "step": 13210 }, { "epoch": 0.8978801467590706, "grad_norm": 1.2959553003311157, "learning_rate": 0.000887824432667482, "loss": 3.568, "step": 13215 }, { "epoch": 0.8982198668297323, "grad_norm": 1.289971113204956, "learning_rate": 0.0008877819676586493, "loss": 3.6648, "step": 13220 }, { "epoch": 0.8985595869003941, "grad_norm": 1.275198221206665, "learning_rate": 0.0008877395026498166, "loss": 3.8269, "step": 13225 }, { "epoch": 0.8988993069710558, "grad_norm": 1.4683687686920166, "learning_rate": 0.0008876970376409839, "loss": 3.8528, "step": 13230 }, { "epoch": 0.8992390270417177, "grad_norm": 1.5089524984359741, "learning_rate": 0.0008876545726321511, "loss": 3.5173, "step": 13235 }, { "epoch": 0.8995787471123794, "grad_norm": 1.2925670146942139, "learning_rate": 0.0008876121076233184, "loss": 3.8957, "step": 13240 }, { "epoch": 0.8999184671830411, "grad_norm": 1.0611220598220825, "learning_rate": 0.0008875696426144857, "loss": 3.4317, "step": 13245 }, { "epoch": 0.900258187253703, "grad_norm": 1.2582794427871704, "learning_rate": 0.0008875271776056529, "loss": 3.588, "step": 13250 }, { "epoch": 0.9005979073243647, "grad_norm": 1.1837396621704102, "learning_rate": 0.0008874847125968203, "loss": 3.6773, "step": 13255 }, { "epoch": 0.9009376273950265, "grad_norm": 1.5072866678237915, "learning_rate": 0.0008874422475879876, "loss": 3.7004, "step": 13260 }, { "epoch": 0.9012773474656883, "grad_norm": 1.2681010961532593, "learning_rate": 0.0008873997825791548, "loss": 3.5967, "step": 13265 }, { "epoch": 0.9016170675363501, "grad_norm": 1.6119776964187622, "learning_rate": 0.0008873573175703221, "loss": 3.6483, "step": 13270 }, { "epoch": 0.9019567876070118, "grad_norm": 1.4881774187088013, "learning_rate": 0.0008873148525614893, "loss": 3.8644, "step": 13275 }, { "epoch": 0.9022965076776736, "grad_norm": 1.4228190183639526, "learning_rate": 0.0008872723875526566, "loss": 3.968, "step": 13280 }, { "epoch": 0.9026362277483354, "grad_norm": 1.192259430885315, "learning_rate": 0.000887229922543824, "loss": 3.6704, "step": 13285 }, { "epoch": 0.9029759478189971, "grad_norm": 1.4747867584228516, "learning_rate": 0.0008871874575349912, "loss": 3.6735, "step": 13290 }, { "epoch": 0.9033156678896589, "grad_norm": 1.1578336954116821, "learning_rate": 0.0008871449925261585, "loss": 3.8486, "step": 13295 }, { "epoch": 0.9036553879603207, "grad_norm": 1.1175806522369385, "learning_rate": 0.0008871025275173258, "loss": 3.7724, "step": 13300 }, { "epoch": 0.9039951080309825, "grad_norm": 1.3745509386062622, "learning_rate": 0.000887060062508493, "loss": 3.452, "step": 13305 }, { "epoch": 0.9043348281016442, "grad_norm": 1.0368167161941528, "learning_rate": 0.0008870175974996602, "loss": 3.7445, "step": 13310 }, { "epoch": 0.904674548172306, "grad_norm": 0.9941036701202393, "learning_rate": 0.0008869751324908276, "loss": 3.9421, "step": 13315 }, { "epoch": 0.9050142682429678, "grad_norm": 1.2585958242416382, "learning_rate": 0.0008869326674819949, "loss": 3.5537, "step": 13320 }, { "epoch": 0.9053539883136296, "grad_norm": 1.1822377443313599, "learning_rate": 0.0008868902024731621, "loss": 3.5574, "step": 13325 }, { "epoch": 0.9056937083842913, "grad_norm": 1.3604127168655396, "learning_rate": 0.0008868477374643295, "loss": 3.6016, "step": 13330 }, { "epoch": 0.9060334284549532, "grad_norm": 1.1365103721618652, "learning_rate": 0.0008868052724554967, "loss": 3.815, "step": 13335 }, { "epoch": 0.9063731485256149, "grad_norm": 1.310734510421753, "learning_rate": 0.0008867628074466639, "loss": 3.5424, "step": 13340 }, { "epoch": 0.9067128685962766, "grad_norm": 1.0693243741989136, "learning_rate": 0.0008867203424378313, "loss": 3.7474, "step": 13345 }, { "epoch": 0.9070525886669385, "grad_norm": 1.3508833646774292, "learning_rate": 0.0008866778774289985, "loss": 3.6583, "step": 13350 }, { "epoch": 0.9073923087376002, "grad_norm": 1.2899266481399536, "learning_rate": 0.0008866354124201658, "loss": 3.6644, "step": 13355 }, { "epoch": 0.907732028808262, "grad_norm": 1.160899043083191, "learning_rate": 0.0008865929474113332, "loss": 3.5949, "step": 13360 }, { "epoch": 0.9080717488789237, "grad_norm": 1.530305027961731, "learning_rate": 0.0008865504824025004, "loss": 3.6045, "step": 13365 }, { "epoch": 0.9084114689495856, "grad_norm": 1.366618037223816, "learning_rate": 0.0008865080173936676, "loss": 3.6421, "step": 13370 }, { "epoch": 0.9087511890202473, "grad_norm": 1.136313796043396, "learning_rate": 0.0008864655523848349, "loss": 3.2991, "step": 13375 }, { "epoch": 0.9090909090909091, "grad_norm": 1.0600324869155884, "learning_rate": 0.0008864230873760022, "loss": 3.8876, "step": 13380 }, { "epoch": 0.9094306291615709, "grad_norm": 1.0865321159362793, "learning_rate": 0.0008863806223671694, "loss": 3.8939, "step": 13385 }, { "epoch": 0.9097703492322327, "grad_norm": 1.5523617267608643, "learning_rate": 0.0008863381573583368, "loss": 3.7534, "step": 13390 }, { "epoch": 0.9101100693028944, "grad_norm": 1.4287605285644531, "learning_rate": 0.0008862956923495041, "loss": 3.5395, "step": 13395 }, { "epoch": 0.9104497893735561, "grad_norm": 1.0820205211639404, "learning_rate": 0.0008862532273406713, "loss": 3.377, "step": 13400 }, { "epoch": 0.910789509444218, "grad_norm": 1.4199540615081787, "learning_rate": 0.0008862107623318386, "loss": 4.1166, "step": 13405 }, { "epoch": 0.9111292295148797, "grad_norm": 1.0481789112091064, "learning_rate": 0.0008861682973230058, "loss": 3.4397, "step": 13410 }, { "epoch": 0.9114689495855415, "grad_norm": 0.9064306616783142, "learning_rate": 0.0008861258323141731, "loss": 3.6837, "step": 13415 }, { "epoch": 0.9118086696562033, "grad_norm": 1.1715648174285889, "learning_rate": 0.0008860833673053404, "loss": 3.9001, "step": 13420 }, { "epoch": 0.9121483897268651, "grad_norm": 1.387894868850708, "learning_rate": 0.0008860409022965077, "loss": 3.6216, "step": 13425 }, { "epoch": 0.9124881097975268, "grad_norm": 1.0953913927078247, "learning_rate": 0.000885998437287675, "loss": 3.4028, "step": 13430 }, { "epoch": 0.9128278298681887, "grad_norm": 9.743432998657227, "learning_rate": 0.0008859559722788423, "loss": 3.6442, "step": 13435 }, { "epoch": 0.9131675499388504, "grad_norm": 1.2681050300598145, "learning_rate": 0.0008859135072700095, "loss": 3.4453, "step": 13440 }, { "epoch": 0.9135072700095122, "grad_norm": 1.2247123718261719, "learning_rate": 0.0008858710422611768, "loss": 3.5884, "step": 13445 }, { "epoch": 0.9138469900801739, "grad_norm": 1.1919983625411987, "learning_rate": 0.0008858285772523441, "loss": 3.5317, "step": 13450 }, { "epoch": 0.9141867101508357, "grad_norm": 1.0701299905776978, "learning_rate": 0.0008857861122435113, "loss": 3.5799, "step": 13455 }, { "epoch": 0.9145264302214975, "grad_norm": 1.7923578023910522, "learning_rate": 0.0008857436472346786, "loss": 3.8968, "step": 13460 }, { "epoch": 0.9148661502921592, "grad_norm": 1.2079137563705444, "learning_rate": 0.000885701182225846, "loss": 3.7392, "step": 13465 }, { "epoch": 0.9152058703628211, "grad_norm": 1.1915786266326904, "learning_rate": 0.0008856587172170132, "loss": 3.8488, "step": 13470 }, { "epoch": 0.9155455904334828, "grad_norm": 1.1715049743652344, "learning_rate": 0.0008856162522081804, "loss": 3.6558, "step": 13475 }, { "epoch": 0.9158853105041446, "grad_norm": 1.2295781373977661, "learning_rate": 0.0008855737871993478, "loss": 3.5021, "step": 13480 }, { "epoch": 0.9162250305748063, "grad_norm": 1.0514143705368042, "learning_rate": 0.000885531322190515, "loss": 3.5882, "step": 13485 }, { "epoch": 0.9165647506454682, "grad_norm": 1.177862286567688, "learning_rate": 0.0008854888571816822, "loss": 3.791, "step": 13490 }, { "epoch": 0.9169044707161299, "grad_norm": 1.4430205821990967, "learning_rate": 0.0008854463921728497, "loss": 3.7391, "step": 13495 }, { "epoch": 0.9172441907867916, "grad_norm": 1.159075140953064, "learning_rate": 0.0008854039271640169, "loss": 3.5383, "step": 13500 }, { "epoch": 0.9175839108574535, "grad_norm": 1.2578719854354858, "learning_rate": 0.0008853614621551841, "loss": 3.6649, "step": 13505 }, { "epoch": 0.9179236309281152, "grad_norm": 1.0370455980300903, "learning_rate": 0.0008853189971463514, "loss": 3.9877, "step": 13510 }, { "epoch": 0.918263350998777, "grad_norm": 1.2231982946395874, "learning_rate": 0.0008852765321375187, "loss": 3.2407, "step": 13515 }, { "epoch": 0.9186030710694388, "grad_norm": 1.7158890962600708, "learning_rate": 0.0008852340671286859, "loss": 3.7143, "step": 13520 }, { "epoch": 0.9189427911401006, "grad_norm": 1.1838371753692627, "learning_rate": 0.0008851916021198532, "loss": 3.6679, "step": 13525 }, { "epoch": 0.9192825112107623, "grad_norm": 1.2598528861999512, "learning_rate": 0.0008851491371110206, "loss": 3.4098, "step": 13530 }, { "epoch": 0.9196222312814241, "grad_norm": 1.1684772968292236, "learning_rate": 0.0008851066721021879, "loss": 3.8105, "step": 13535 }, { "epoch": 0.9199619513520859, "grad_norm": 1.0002024173736572, "learning_rate": 0.0008850642070933551, "loss": 3.8736, "step": 13540 }, { "epoch": 0.9203016714227477, "grad_norm": 1.515525221824646, "learning_rate": 0.0008850217420845224, "loss": 3.5785, "step": 13545 }, { "epoch": 0.9206413914934094, "grad_norm": 1.089027762413025, "learning_rate": 0.0008849792770756897, "loss": 3.8113, "step": 13550 }, { "epoch": 0.9209811115640713, "grad_norm": 1.3131077289581299, "learning_rate": 0.0008849368120668569, "loss": 3.6878, "step": 13555 }, { "epoch": 0.921320831634733, "grad_norm": 1.5071438550949097, "learning_rate": 0.0008848943470580241, "loss": 3.647, "step": 13560 }, { "epoch": 0.9216605517053947, "grad_norm": 1.2571130990982056, "learning_rate": 0.0008848518820491916, "loss": 3.6743, "step": 13565 }, { "epoch": 0.9220002717760565, "grad_norm": 1.1678053140640259, "learning_rate": 0.0008848094170403588, "loss": 3.688, "step": 13570 }, { "epoch": 0.9223399918467183, "grad_norm": 1.0401265621185303, "learning_rate": 0.000884766952031526, "loss": 3.7275, "step": 13575 }, { "epoch": 0.9226797119173801, "grad_norm": 1.2608497142791748, "learning_rate": 0.0008847244870226934, "loss": 3.5778, "step": 13580 }, { "epoch": 0.9230194319880418, "grad_norm": 1.6294174194335938, "learning_rate": 0.0008846820220138606, "loss": 3.3659, "step": 13585 }, { "epoch": 0.9233591520587037, "grad_norm": 1.061644196510315, "learning_rate": 0.0008846395570050278, "loss": 3.6964, "step": 13590 }, { "epoch": 0.9236988721293654, "grad_norm": 1.1823369264602661, "learning_rate": 0.0008845970919961952, "loss": 3.647, "step": 13595 }, { "epoch": 0.9240385922000272, "grad_norm": 1.1110295057296753, "learning_rate": 0.0008845546269873625, "loss": 3.565, "step": 13600 }, { "epoch": 0.924378312270689, "grad_norm": 1.2129404544830322, "learning_rate": 0.0008845121619785297, "loss": 3.8305, "step": 13605 }, { "epoch": 0.9247180323413507, "grad_norm": 0.9534480571746826, "learning_rate": 0.000884469696969697, "loss": 3.7469, "step": 13610 }, { "epoch": 0.9250577524120125, "grad_norm": 1.2030699253082275, "learning_rate": 0.0008844272319608643, "loss": 3.5616, "step": 13615 }, { "epoch": 0.9253974724826742, "grad_norm": 1.1624113321304321, "learning_rate": 0.0008843847669520315, "loss": 3.7355, "step": 13620 }, { "epoch": 0.9257371925533361, "grad_norm": 0.9667927622795105, "learning_rate": 0.0008843423019431988, "loss": 3.76, "step": 13625 }, { "epoch": 0.9260769126239978, "grad_norm": 0.8340460062026978, "learning_rate": 0.0008842998369343661, "loss": 3.626, "step": 13630 }, { "epoch": 0.9264166326946596, "grad_norm": 1.1484729051589966, "learning_rate": 0.0008842573719255334, "loss": 3.8412, "step": 13635 }, { "epoch": 0.9267563527653214, "grad_norm": 1.4694770574569702, "learning_rate": 0.0008842149069167007, "loss": 3.6532, "step": 13640 }, { "epoch": 0.9270960728359832, "grad_norm": 1.0489370822906494, "learning_rate": 0.000884172441907868, "loss": 3.4162, "step": 13645 }, { "epoch": 0.9274357929066449, "grad_norm": 1.3452420234680176, "learning_rate": 0.0008841299768990352, "loss": 3.8212, "step": 13650 }, { "epoch": 0.9277755129773066, "grad_norm": 1.2711772918701172, "learning_rate": 0.0008840875118902025, "loss": 3.7359, "step": 13655 }, { "epoch": 0.9281152330479685, "grad_norm": 0.9878261685371399, "learning_rate": 0.0008840450468813697, "loss": 3.794, "step": 13660 }, { "epoch": 0.9284549531186302, "grad_norm": 1.1049275398254395, "learning_rate": 0.000884002581872537, "loss": 3.6401, "step": 13665 }, { "epoch": 0.928794673189292, "grad_norm": 1.1678237915039062, "learning_rate": 0.0008839601168637044, "loss": 3.8065, "step": 13670 }, { "epoch": 0.9291343932599538, "grad_norm": 1.430268406867981, "learning_rate": 0.0008839176518548716, "loss": 3.8088, "step": 13675 }, { "epoch": 0.9294741133306156, "grad_norm": 1.2549635171890259, "learning_rate": 0.0008838751868460389, "loss": 3.7127, "step": 13680 }, { "epoch": 0.9298138334012773, "grad_norm": 1.2277075052261353, "learning_rate": 0.0008838327218372062, "loss": 3.4087, "step": 13685 }, { "epoch": 0.9301535534719392, "grad_norm": 1.1633461713790894, "learning_rate": 0.0008837902568283734, "loss": 3.5131, "step": 13690 }, { "epoch": 0.9304932735426009, "grad_norm": 1.398976445198059, "learning_rate": 0.0008837477918195406, "loss": 3.5956, "step": 13695 }, { "epoch": 0.9308329936132627, "grad_norm": 1.0869405269622803, "learning_rate": 0.000883705326810708, "loss": 3.537, "step": 13700 }, { "epoch": 0.9311727136839244, "grad_norm": 1.2447527647018433, "learning_rate": 0.0008836628618018753, "loss": 3.5636, "step": 13705 }, { "epoch": 0.9315124337545863, "grad_norm": 0.9864159226417542, "learning_rate": 0.0008836203967930425, "loss": 3.6319, "step": 13710 }, { "epoch": 0.931852153825248, "grad_norm": 1.1072784662246704, "learning_rate": 0.0008835779317842099, "loss": 3.8386, "step": 13715 }, { "epoch": 0.9321918738959097, "grad_norm": 1.2302870750427246, "learning_rate": 0.0008835354667753771, "loss": 3.6366, "step": 13720 }, { "epoch": 0.9325315939665716, "grad_norm": 1.32057523727417, "learning_rate": 0.0008834930017665443, "loss": 3.4021, "step": 13725 }, { "epoch": 0.9328713140372333, "grad_norm": 1.0731652975082397, "learning_rate": 0.0008834505367577117, "loss": 3.5382, "step": 13730 }, { "epoch": 0.9332110341078951, "grad_norm": 1.184198260307312, "learning_rate": 0.0008834080717488789, "loss": 3.8786, "step": 13735 }, { "epoch": 0.9335507541785568, "grad_norm": 1.1093766689300537, "learning_rate": 0.0008833656067400462, "loss": 3.6697, "step": 13740 }, { "epoch": 0.9338904742492187, "grad_norm": 1.2387014627456665, "learning_rate": 0.0008833231417312136, "loss": 3.7425, "step": 13745 }, { "epoch": 0.9342301943198804, "grad_norm": 1.25377357006073, "learning_rate": 0.0008832806767223808, "loss": 3.1989, "step": 13750 }, { "epoch": 0.9345699143905422, "grad_norm": 0.9722593426704407, "learning_rate": 0.000883238211713548, "loss": 3.8497, "step": 13755 }, { "epoch": 0.934909634461204, "grad_norm": 1.3273773193359375, "learning_rate": 0.0008831957467047153, "loss": 3.6185, "step": 13760 }, { "epoch": 0.9352493545318658, "grad_norm": 1.1958868503570557, "learning_rate": 0.0008831532816958826, "loss": 3.6684, "step": 13765 }, { "epoch": 0.9355890746025275, "grad_norm": 0.9380003809928894, "learning_rate": 0.0008831108166870498, "loss": 3.7783, "step": 13770 }, { "epoch": 0.9359287946731893, "grad_norm": 1.1377553939819336, "learning_rate": 0.0008830683516782172, "loss": 3.4209, "step": 13775 }, { "epoch": 0.9362685147438511, "grad_norm": 1.4113825559616089, "learning_rate": 0.0008830258866693845, "loss": 3.5765, "step": 13780 }, { "epoch": 0.9366082348145128, "grad_norm": 0.9355831146240234, "learning_rate": 0.0008829834216605517, "loss": 3.6955, "step": 13785 }, { "epoch": 0.9369479548851746, "grad_norm": 1.0445994138717651, "learning_rate": 0.000882940956651719, "loss": 3.6129, "step": 13790 }, { "epoch": 0.9372876749558364, "grad_norm": 1.3933593034744263, "learning_rate": 0.0008828984916428862, "loss": 3.7087, "step": 13795 }, { "epoch": 0.9376273950264982, "grad_norm": 1.1103403568267822, "learning_rate": 0.0008828560266340535, "loss": 3.406, "step": 13800 }, { "epoch": 0.9379671150971599, "grad_norm": 1.002160668373108, "learning_rate": 0.0008828135616252208, "loss": 3.6318, "step": 13805 }, { "epoch": 0.9383068351678218, "grad_norm": 1.2856354713439941, "learning_rate": 0.0008827710966163881, "loss": 3.3108, "step": 13810 }, { "epoch": 0.9386465552384835, "grad_norm": 1.2478870153427124, "learning_rate": 0.0008827286316075554, "loss": 3.6261, "step": 13815 }, { "epoch": 0.9389862753091452, "grad_norm": 2.202183485031128, "learning_rate": 0.0008826861665987227, "loss": 3.7187, "step": 13820 }, { "epoch": 0.939325995379807, "grad_norm": 1.4374901056289673, "learning_rate": 0.0008826437015898899, "loss": 3.3103, "step": 13825 }, { "epoch": 0.9396657154504688, "grad_norm": 1.4265103340148926, "learning_rate": 0.0008826012365810572, "loss": 3.7141, "step": 13830 }, { "epoch": 0.9400054355211306, "grad_norm": 1.216286301612854, "learning_rate": 0.0008825587715722245, "loss": 3.5394, "step": 13835 }, { "epoch": 0.9403451555917923, "grad_norm": 1.0592319965362549, "learning_rate": 0.0008825163065633917, "loss": 3.6039, "step": 13840 }, { "epoch": 0.9406848756624542, "grad_norm": 1.493869423866272, "learning_rate": 0.000882473841554559, "loss": 3.4544, "step": 13845 }, { "epoch": 0.9410245957331159, "grad_norm": 1.0164966583251953, "learning_rate": 0.0008824313765457264, "loss": 3.7629, "step": 13850 }, { "epoch": 0.9413643158037777, "grad_norm": 0.9465974569320679, "learning_rate": 0.0008823889115368936, "loss": 3.727, "step": 13855 }, { "epoch": 0.9417040358744395, "grad_norm": 1.2534750699996948, "learning_rate": 0.0008823464465280608, "loss": 3.7027, "step": 13860 }, { "epoch": 0.9420437559451013, "grad_norm": 1.5418519973754883, "learning_rate": 0.0008823039815192282, "loss": 3.4023, "step": 13865 }, { "epoch": 0.942383476015763, "grad_norm": 1.2898017168045044, "learning_rate": 0.0008822615165103954, "loss": 3.6258, "step": 13870 }, { "epoch": 0.9427231960864247, "grad_norm": 1.4677867889404297, "learning_rate": 0.0008822190515015628, "loss": 3.6803, "step": 13875 }, { "epoch": 0.9430629161570866, "grad_norm": 1.337448239326477, "learning_rate": 0.0008821765864927301, "loss": 3.6373, "step": 13880 }, { "epoch": 0.9434026362277483, "grad_norm": 1.243034839630127, "learning_rate": 0.0008821341214838973, "loss": 3.3972, "step": 13885 }, { "epoch": 0.9437423562984101, "grad_norm": 1.099745750427246, "learning_rate": 0.0008820916564750646, "loss": 3.8262, "step": 13890 }, { "epoch": 0.9440820763690719, "grad_norm": 1.1988896131515503, "learning_rate": 0.0008820491914662319, "loss": 3.7854, "step": 13895 }, { "epoch": 0.9444217964397337, "grad_norm": 1.6837581396102905, "learning_rate": 0.0008820067264573991, "loss": 3.5802, "step": 13900 }, { "epoch": 0.9447615165103954, "grad_norm": 1.3799346685409546, "learning_rate": 0.0008819642614485664, "loss": 3.6508, "step": 13905 }, { "epoch": 0.9451012365810572, "grad_norm": 1.2317233085632324, "learning_rate": 0.0008819217964397337, "loss": 3.8534, "step": 13910 }, { "epoch": 0.945440956651719, "grad_norm": 1.3975247144699097, "learning_rate": 0.000881879331430901, "loss": 3.6083, "step": 13915 }, { "epoch": 0.9457806767223808, "grad_norm": 1.1166316270828247, "learning_rate": 0.0008818368664220683, "loss": 3.4552, "step": 13920 }, { "epoch": 0.9461203967930425, "grad_norm": 1.341982364654541, "learning_rate": 0.0008817944014132355, "loss": 3.8102, "step": 13925 }, { "epoch": 0.9464601168637043, "grad_norm": 1.2511407136917114, "learning_rate": 0.0008817519364044028, "loss": 3.5043, "step": 13930 }, { "epoch": 0.9467998369343661, "grad_norm": 1.2485253810882568, "learning_rate": 0.0008817094713955701, "loss": 3.5323, "step": 13935 }, { "epoch": 0.9471395570050278, "grad_norm": 1.5511631965637207, "learning_rate": 0.0008816670063867373, "loss": 3.8441, "step": 13940 }, { "epoch": 0.9474792770756897, "grad_norm": 1.4241052865982056, "learning_rate": 0.0008816245413779047, "loss": 3.7155, "step": 13945 }, { "epoch": 0.9478189971463514, "grad_norm": 1.2028802633285522, "learning_rate": 0.000881582076369072, "loss": 3.6956, "step": 13950 }, { "epoch": 0.9481587172170132, "grad_norm": 1.0426777601242065, "learning_rate": 0.0008815396113602392, "loss": 3.4119, "step": 13955 }, { "epoch": 0.9484984372876749, "grad_norm": 1.0980159044265747, "learning_rate": 0.0008814971463514064, "loss": 3.5858, "step": 13960 }, { "epoch": 0.9488381573583368, "grad_norm": 1.2153681516647339, "learning_rate": 0.0008814546813425738, "loss": 3.4158, "step": 13965 }, { "epoch": 0.9491778774289985, "grad_norm": 1.8031911849975586, "learning_rate": 0.000881412216333741, "loss": 3.8891, "step": 13970 }, { "epoch": 0.9495175974996602, "grad_norm": 1.1218714714050293, "learning_rate": 0.0008813697513249082, "loss": 3.6798, "step": 13975 }, { "epoch": 0.9498573175703221, "grad_norm": 1.0135219097137451, "learning_rate": 0.0008813272863160757, "loss": 3.7909, "step": 13980 }, { "epoch": 0.9501970376409838, "grad_norm": 1.280370831489563, "learning_rate": 0.0008812848213072429, "loss": 3.5446, "step": 13985 }, { "epoch": 0.9505367577116456, "grad_norm": 1.0515334606170654, "learning_rate": 0.0008812423562984101, "loss": 3.6556, "step": 13990 }, { "epoch": 0.9508764777823073, "grad_norm": 1.2278505563735962, "learning_rate": 0.0008811998912895775, "loss": 3.7037, "step": 13995 }, { "epoch": 0.9512161978529692, "grad_norm": 1.2335460186004639, "learning_rate": 0.0008811574262807447, "loss": 3.4961, "step": 14000 }, { "epoch": 0.9515559179236309, "grad_norm": 1.1119612455368042, "learning_rate": 0.0008811149612719119, "loss": 3.3324, "step": 14005 }, { "epoch": 0.9518956379942927, "grad_norm": 1.1249746084213257, "learning_rate": 0.0008810724962630792, "loss": 3.5078, "step": 14010 }, { "epoch": 0.9522353580649545, "grad_norm": 1.3419711589813232, "learning_rate": 0.0008810300312542466, "loss": 3.7679, "step": 14015 }, { "epoch": 0.9525750781356163, "grad_norm": 1.195621371269226, "learning_rate": 0.0008809875662454138, "loss": 3.6958, "step": 14020 }, { "epoch": 0.952914798206278, "grad_norm": 1.1971609592437744, "learning_rate": 0.0008809451012365811, "loss": 3.7819, "step": 14025 }, { "epoch": 0.9532545182769399, "grad_norm": 1.1075963973999023, "learning_rate": 0.0008809026362277484, "loss": 3.6199, "step": 14030 }, { "epoch": 0.9535942383476016, "grad_norm": 1.0707439184188843, "learning_rate": 0.0008808601712189156, "loss": 3.6435, "step": 14035 }, { "epoch": 0.9539339584182633, "grad_norm": 1.0981413125991821, "learning_rate": 0.0008808177062100829, "loss": 3.7327, "step": 14040 }, { "epoch": 0.9542736784889251, "grad_norm": 1.543609380722046, "learning_rate": 0.0008807752412012501, "loss": 3.6762, "step": 14045 }, { "epoch": 0.9546133985595869, "grad_norm": 1.190970778465271, "learning_rate": 0.0008807327761924175, "loss": 3.8265, "step": 14050 }, { "epoch": 0.9549531186302487, "grad_norm": 1.1554696559906006, "learning_rate": 0.0008806903111835848, "loss": 3.6334, "step": 14055 }, { "epoch": 0.9552928387009104, "grad_norm": 1.2042808532714844, "learning_rate": 0.000880647846174752, "loss": 3.5592, "step": 14060 }, { "epoch": 0.9556325587715723, "grad_norm": 1.3109101057052612, "learning_rate": 0.0008806053811659193, "loss": 3.622, "step": 14065 }, { "epoch": 0.955972278842234, "grad_norm": 1.3019050359725952, "learning_rate": 0.0008805629161570866, "loss": 3.583, "step": 14070 }, { "epoch": 0.9563119989128958, "grad_norm": 1.1812946796417236, "learning_rate": 0.0008805204511482538, "loss": 3.8246, "step": 14075 }, { "epoch": 0.9566517189835575, "grad_norm": 1.795888900756836, "learning_rate": 0.000880477986139421, "loss": 3.7612, "step": 14080 }, { "epoch": 0.9569914390542194, "grad_norm": 1.1287891864776611, "learning_rate": 0.0008804355211305885, "loss": 3.6079, "step": 14085 }, { "epoch": 0.9573311591248811, "grad_norm": 1.3274520635604858, "learning_rate": 0.0008803930561217557, "loss": 3.6443, "step": 14090 }, { "epoch": 0.9576708791955428, "grad_norm": 1.3271422386169434, "learning_rate": 0.0008803505911129229, "loss": 3.7127, "step": 14095 }, { "epoch": 0.9580105992662047, "grad_norm": 1.698102593421936, "learning_rate": 0.0008803081261040903, "loss": 3.5242, "step": 14100 }, { "epoch": 0.9583503193368664, "grad_norm": 1.3859165906906128, "learning_rate": 0.0008802656610952575, "loss": 3.4537, "step": 14105 }, { "epoch": 0.9586900394075282, "grad_norm": 1.2509762048721313, "learning_rate": 0.0008802231960864247, "loss": 3.4668, "step": 14110 }, { "epoch": 0.95902975947819, "grad_norm": 6.055736541748047, "learning_rate": 0.0008801807310775921, "loss": 3.6364, "step": 14115 }, { "epoch": 0.9593694795488518, "grad_norm": 1.2248291969299316, "learning_rate": 0.0008801382660687594, "loss": 4.0127, "step": 14120 }, { "epoch": 0.9597091996195135, "grad_norm": 1.3742849826812744, "learning_rate": 0.0008800958010599266, "loss": 3.7946, "step": 14125 }, { "epoch": 0.9600489196901753, "grad_norm": 1.3071458339691162, "learning_rate": 0.000880053336051094, "loss": 3.6854, "step": 14130 }, { "epoch": 0.9603886397608371, "grad_norm": 1.488822340965271, "learning_rate": 0.0008800108710422612, "loss": 3.681, "step": 14135 }, { "epoch": 0.9607283598314988, "grad_norm": 1.1472152471542358, "learning_rate": 0.0008799684060334284, "loss": 3.7819, "step": 14140 }, { "epoch": 0.9610680799021606, "grad_norm": 0.9417963624000549, "learning_rate": 0.0008799259410245957, "loss": 3.5883, "step": 14145 }, { "epoch": 0.9614077999728224, "grad_norm": 1.0449095964431763, "learning_rate": 0.000879883476015763, "loss": 3.8401, "step": 14150 }, { "epoch": 0.9617475200434842, "grad_norm": 1.3201948404312134, "learning_rate": 0.0008798410110069303, "loss": 3.6585, "step": 14155 }, { "epoch": 0.9620872401141459, "grad_norm": 1.1617003679275513, "learning_rate": 0.0008797985459980976, "loss": 3.6885, "step": 14160 }, { "epoch": 0.9624269601848077, "grad_norm": 1.1842656135559082, "learning_rate": 0.0008797560809892649, "loss": 3.7156, "step": 14165 }, { "epoch": 0.9627666802554695, "grad_norm": 1.257477045059204, "learning_rate": 0.0008797136159804321, "loss": 3.5835, "step": 14170 }, { "epoch": 0.9631064003261313, "grad_norm": 1.267675757408142, "learning_rate": 0.0008796711509715994, "loss": 3.6434, "step": 14175 }, { "epoch": 0.963446120396793, "grad_norm": 1.413139820098877, "learning_rate": 0.0008796286859627667, "loss": 3.6077, "step": 14180 }, { "epoch": 0.9637858404674549, "grad_norm": 1.2480072975158691, "learning_rate": 0.0008795862209539339, "loss": 3.8307, "step": 14185 }, { "epoch": 0.9641255605381166, "grad_norm": 1.309422254562378, "learning_rate": 0.0008795437559451013, "loss": 3.5559, "step": 14190 }, { "epoch": 0.9644652806087783, "grad_norm": 2.709667921066284, "learning_rate": 0.0008795012909362685, "loss": 3.6798, "step": 14195 }, { "epoch": 0.9648050006794402, "grad_norm": 6.114317417144775, "learning_rate": 0.0008794588259274358, "loss": 3.6044, "step": 14200 }, { "epoch": 0.9651447207501019, "grad_norm": 1.5850796699523926, "learning_rate": 0.0008794163609186031, "loss": 3.5533, "step": 14205 }, { "epoch": 0.9654844408207637, "grad_norm": 3.501887559890747, "learning_rate": 0.0008793738959097703, "loss": 3.6766, "step": 14210 }, { "epoch": 0.9658241608914254, "grad_norm": 1.3142457008361816, "learning_rate": 0.0008793314309009377, "loss": 3.6263, "step": 14215 }, { "epoch": 0.9661638809620873, "grad_norm": 1.5193803310394287, "learning_rate": 0.0008792889658921049, "loss": 3.8125, "step": 14220 }, { "epoch": 0.966503601032749, "grad_norm": 1.3773170709609985, "learning_rate": 0.0008792465008832722, "loss": 3.6716, "step": 14225 }, { "epoch": 0.9668433211034108, "grad_norm": 1.188973069190979, "learning_rate": 0.0008792040358744396, "loss": 3.6684, "step": 14230 }, { "epoch": 0.9671830411740726, "grad_norm": 1.0379574298858643, "learning_rate": 0.0008791615708656068, "loss": 3.7082, "step": 14235 }, { "epoch": 0.9675227612447344, "grad_norm": 1.481623888015747, "learning_rate": 0.000879119105856774, "loss": 3.5328, "step": 14240 }, { "epoch": 0.9678624813153961, "grad_norm": 1.2748521566390991, "learning_rate": 0.0008790766408479413, "loss": 3.569, "step": 14245 }, { "epoch": 0.9682022013860578, "grad_norm": 1.1588187217712402, "learning_rate": 0.0008790341758391086, "loss": 3.7017, "step": 14250 }, { "epoch": 0.9685419214567197, "grad_norm": 1.0978269577026367, "learning_rate": 0.0008789917108302758, "loss": 3.7861, "step": 14255 }, { "epoch": 0.9688816415273814, "grad_norm": 1.0008482933044434, "learning_rate": 0.0008789492458214432, "loss": 3.7188, "step": 14260 }, { "epoch": 0.9692213615980432, "grad_norm": 1.0288275480270386, "learning_rate": 0.0008789067808126105, "loss": 3.5935, "step": 14265 }, { "epoch": 0.969561081668705, "grad_norm": 1.2539006471633911, "learning_rate": 0.0008788643158037777, "loss": 3.6319, "step": 14270 }, { "epoch": 0.9699008017393668, "grad_norm": 1.399878740310669, "learning_rate": 0.000878821850794945, "loss": 3.7241, "step": 14275 }, { "epoch": 0.9702405218100285, "grad_norm": 1.6239129304885864, "learning_rate": 0.0008787793857861123, "loss": 3.5754, "step": 14280 }, { "epoch": 0.9705802418806904, "grad_norm": 1.3193047046661377, "learning_rate": 0.0008787369207772795, "loss": 3.6795, "step": 14285 }, { "epoch": 0.9709199619513521, "grad_norm": 1.8831861019134521, "learning_rate": 0.0008786944557684468, "loss": 3.6868, "step": 14290 }, { "epoch": 0.9712596820220138, "grad_norm": 1.137747049331665, "learning_rate": 0.0008786519907596141, "loss": 3.6618, "step": 14295 }, { "epoch": 0.9715994020926756, "grad_norm": 1.3897204399108887, "learning_rate": 0.0008786095257507814, "loss": 3.6164, "step": 14300 }, { "epoch": 0.9719391221633374, "grad_norm": 1.4695711135864258, "learning_rate": 0.0008785670607419487, "loss": 3.7722, "step": 14305 }, { "epoch": 0.9722788422339992, "grad_norm": 1.7313364744186401, "learning_rate": 0.0008785245957331159, "loss": 3.6662, "step": 14310 }, { "epoch": 0.9726185623046609, "grad_norm": 1.212051272392273, "learning_rate": 0.0008784821307242832, "loss": 3.4824, "step": 14315 }, { "epoch": 0.9729582823753228, "grad_norm": 1.091705560684204, "learning_rate": 0.0008784396657154505, "loss": 3.6232, "step": 14320 }, { "epoch": 0.9732980024459845, "grad_norm": 1.4533251523971558, "learning_rate": 0.0008783972007066177, "loss": 3.2554, "step": 14325 }, { "epoch": 0.9736377225166463, "grad_norm": 1.067510962486267, "learning_rate": 0.0008783547356977851, "loss": 3.7549, "step": 14330 }, { "epoch": 0.973977442587308, "grad_norm": 1.0597604513168335, "learning_rate": 0.0008783122706889524, "loss": 3.7772, "step": 14335 }, { "epoch": 0.9743171626579699, "grad_norm": 0.9514012932777405, "learning_rate": 0.0008782698056801196, "loss": 3.607, "step": 14340 }, { "epoch": 0.9746568827286316, "grad_norm": 1.1274350881576538, "learning_rate": 0.0008782273406712868, "loss": 3.7647, "step": 14345 }, { "epoch": 0.9749966027992933, "grad_norm": 1.273849368095398, "learning_rate": 0.0008781848756624542, "loss": 3.6511, "step": 14350 }, { "epoch": 0.9753363228699552, "grad_norm": 1.2117702960968018, "learning_rate": 0.0008781424106536214, "loss": 3.2943, "step": 14355 }, { "epoch": 0.9756760429406169, "grad_norm": 1.274061918258667, "learning_rate": 0.0008780999456447886, "loss": 3.7661, "step": 14360 }, { "epoch": 0.9760157630112787, "grad_norm": 1.100542664527893, "learning_rate": 0.0008780574806359561, "loss": 3.6329, "step": 14365 }, { "epoch": 0.9763554830819405, "grad_norm": 1.2153478860855103, "learning_rate": 0.0008780150156271233, "loss": 3.7532, "step": 14370 }, { "epoch": 0.9766952031526023, "grad_norm": 1.3136975765228271, "learning_rate": 0.0008779725506182905, "loss": 3.5171, "step": 14375 }, { "epoch": 0.977034923223264, "grad_norm": 1.210275411605835, "learning_rate": 0.0008779300856094579, "loss": 3.7229, "step": 14380 }, { "epoch": 0.9773746432939258, "grad_norm": 0.9341737627983093, "learning_rate": 0.0008778876206006251, "loss": 3.3051, "step": 14385 }, { "epoch": 0.9777143633645876, "grad_norm": 1.5256357192993164, "learning_rate": 0.0008778451555917923, "loss": 3.4641, "step": 14390 }, { "epoch": 0.9780540834352494, "grad_norm": 1.2488961219787598, "learning_rate": 0.0008778026905829596, "loss": 3.7581, "step": 14395 }, { "epoch": 0.9783938035059111, "grad_norm": 1.273672342300415, "learning_rate": 0.000877760225574127, "loss": 3.6601, "step": 14400 }, { "epoch": 0.978733523576573, "grad_norm": 1.1529314517974854, "learning_rate": 0.0008777177605652942, "loss": 3.4095, "step": 14405 }, { "epoch": 0.9790732436472347, "grad_norm": 1.1607656478881836, "learning_rate": 0.0008776752955564615, "loss": 3.4975, "step": 14410 }, { "epoch": 0.9794129637178964, "grad_norm": 1.0669053792953491, "learning_rate": 0.0008776328305476288, "loss": 3.6752, "step": 14415 }, { "epoch": 0.9797526837885582, "grad_norm": 1.2665882110595703, "learning_rate": 0.000877590365538796, "loss": 3.4071, "step": 14420 }, { "epoch": 0.98009240385922, "grad_norm": 1.1002538204193115, "learning_rate": 0.0008775479005299633, "loss": 3.4528, "step": 14425 }, { "epoch": 0.9804321239298818, "grad_norm": 1.425880789756775, "learning_rate": 0.0008775054355211305, "loss": 3.8156, "step": 14430 }, { "epoch": 0.9807718440005435, "grad_norm": 2.2575669288635254, "learning_rate": 0.0008774629705122979, "loss": 3.2915, "step": 14435 }, { "epoch": 0.9811115640712054, "grad_norm": 1.2759031057357788, "learning_rate": 0.0008774205055034652, "loss": 3.6947, "step": 14440 }, { "epoch": 0.9814512841418671, "grad_norm": 1.20292329788208, "learning_rate": 0.0008773780404946324, "loss": 3.7734, "step": 14445 }, { "epoch": 0.9817910042125289, "grad_norm": 1.38677179813385, "learning_rate": 0.0008773355754857997, "loss": 3.3036, "step": 14450 }, { "epoch": 0.9821307242831907, "grad_norm": 0.81452876329422, "learning_rate": 0.000877293110476967, "loss": 3.7765, "step": 14455 }, { "epoch": 0.9824704443538524, "grad_norm": 1.570940613746643, "learning_rate": 0.0008772506454681342, "loss": 3.5271, "step": 14460 }, { "epoch": 0.9828101644245142, "grad_norm": 1.052893042564392, "learning_rate": 0.0008772081804593015, "loss": 3.7058, "step": 14465 }, { "epoch": 0.9831498844951759, "grad_norm": 1.3114746809005737, "learning_rate": 0.0008771657154504689, "loss": 3.679, "step": 14470 }, { "epoch": 0.9834896045658378, "grad_norm": 1.344246745109558, "learning_rate": 0.0008771232504416361, "loss": 3.3982, "step": 14475 }, { "epoch": 0.9838293246364995, "grad_norm": 1.4735846519470215, "learning_rate": 0.0008770807854328033, "loss": 3.4866, "step": 14480 }, { "epoch": 0.9841690447071613, "grad_norm": 1.0640825033187866, "learning_rate": 0.0008770383204239707, "loss": 3.7484, "step": 14485 }, { "epoch": 0.9845087647778231, "grad_norm": 1.0853455066680908, "learning_rate": 0.0008769958554151379, "loss": 3.583, "step": 14490 }, { "epoch": 0.9848484848484849, "grad_norm": 1.7790824174880981, "learning_rate": 0.0008769533904063051, "loss": 3.8163, "step": 14495 }, { "epoch": 0.9851882049191466, "grad_norm": 1.4722776412963867, "learning_rate": 0.0008769109253974726, "loss": 3.6478, "step": 14500 }, { "epoch": 0.9855279249898083, "grad_norm": 1.3810869455337524, "learning_rate": 0.0008768684603886398, "loss": 3.5895, "step": 14505 }, { "epoch": 0.9858676450604702, "grad_norm": 1.6166229248046875, "learning_rate": 0.000876825995379807, "loss": 3.5227, "step": 14510 }, { "epoch": 0.9862073651311319, "grad_norm": 1.211607813835144, "learning_rate": 0.0008767835303709744, "loss": 3.8338, "step": 14515 }, { "epoch": 0.9865470852017937, "grad_norm": 1.0663131475448608, "learning_rate": 0.0008767410653621416, "loss": 3.5474, "step": 14520 }, { "epoch": 0.9868868052724555, "grad_norm": 0.9875766634941101, "learning_rate": 0.0008766986003533088, "loss": 3.4877, "step": 14525 }, { "epoch": 0.9872265253431173, "grad_norm": 1.0785655975341797, "learning_rate": 0.0008766561353444761, "loss": 3.7007, "step": 14530 }, { "epoch": 0.987566245413779, "grad_norm": 1.2814615964889526, "learning_rate": 0.0008766136703356435, "loss": 3.6322, "step": 14535 }, { "epoch": 0.9879059654844409, "grad_norm": 2.3334672451019287, "learning_rate": 0.0008765712053268107, "loss": 3.7809, "step": 14540 }, { "epoch": 0.9882456855551026, "grad_norm": 0.9619077444076538, "learning_rate": 0.000876528740317978, "loss": 3.4883, "step": 14545 }, { "epoch": 0.9885854056257644, "grad_norm": 1.517042875289917, "learning_rate": 0.0008764862753091453, "loss": 3.581, "step": 14550 }, { "epoch": 0.9889251256964261, "grad_norm": 9.343836784362793, "learning_rate": 0.0008764438103003126, "loss": 3.5723, "step": 14555 }, { "epoch": 0.989264845767088, "grad_norm": 3.0438098907470703, "learning_rate": 0.0008764013452914798, "loss": 3.5592, "step": 14560 }, { "epoch": 0.9896045658377497, "grad_norm": 1.139878749847412, "learning_rate": 0.0008763588802826471, "loss": 3.6686, "step": 14565 }, { "epoch": 0.9899442859084114, "grad_norm": 0.9311371445655823, "learning_rate": 0.0008763164152738145, "loss": 3.4829, "step": 14570 }, { "epoch": 0.9902840059790733, "grad_norm": 2.2212915420532227, "learning_rate": 0.0008762739502649817, "loss": 3.7577, "step": 14575 }, { "epoch": 0.990623726049735, "grad_norm": 1.406194806098938, "learning_rate": 0.000876231485256149, "loss": 3.5797, "step": 14580 }, { "epoch": 0.9909634461203968, "grad_norm": 1.1515858173370361, "learning_rate": 0.0008761890202473163, "loss": 3.6269, "step": 14585 }, { "epoch": 0.9913031661910585, "grad_norm": 1.0352131128311157, "learning_rate": 0.0008761465552384835, "loss": 3.7895, "step": 14590 }, { "epoch": 0.9916428862617204, "grad_norm": 1.6122925281524658, "learning_rate": 0.0008761040902296507, "loss": 3.607, "step": 14595 }, { "epoch": 0.9919826063323821, "grad_norm": 1.0801987648010254, "learning_rate": 0.0008760616252208181, "loss": 3.692, "step": 14600 }, { "epoch": 0.9923223264030439, "grad_norm": 1.3972982168197632, "learning_rate": 0.0008760191602119854, "loss": 3.9084, "step": 14605 }, { "epoch": 0.9926620464737057, "grad_norm": 1.2603524923324585, "learning_rate": 0.0008759766952031526, "loss": 3.5638, "step": 14610 }, { "epoch": 0.9930017665443674, "grad_norm": 21.35373306274414, "learning_rate": 0.00087593423019432, "loss": 3.7341, "step": 14615 }, { "epoch": 0.9933414866150292, "grad_norm": 1.1859757900238037, "learning_rate": 0.0008758917651854872, "loss": 3.5605, "step": 14620 }, { "epoch": 0.993681206685691, "grad_norm": 1.153773307800293, "learning_rate": 0.0008758493001766544, "loss": 3.6358, "step": 14625 }, { "epoch": 0.9940209267563528, "grad_norm": 2.2649073600769043, "learning_rate": 0.0008758068351678218, "loss": 3.8308, "step": 14630 }, { "epoch": 0.9943606468270145, "grad_norm": 1.3504188060760498, "learning_rate": 0.000875764370158989, "loss": 3.5556, "step": 14635 }, { "epoch": 0.9947003668976763, "grad_norm": 1.117344617843628, "learning_rate": 0.0008757219051501563, "loss": 3.7892, "step": 14640 }, { "epoch": 0.9950400869683381, "grad_norm": 1.6963260173797607, "learning_rate": 0.0008756794401413236, "loss": 3.5775, "step": 14645 }, { "epoch": 0.9953798070389999, "grad_norm": 5.079668045043945, "learning_rate": 0.0008756369751324909, "loss": 3.588, "step": 14650 }, { "epoch": 0.9957195271096616, "grad_norm": 1.3253083229064941, "learning_rate": 0.0008755945101236581, "loss": 3.6551, "step": 14655 }, { "epoch": 0.9960592471803235, "grad_norm": 1.172369122505188, "learning_rate": 0.0008755520451148254, "loss": 3.6553, "step": 14660 }, { "epoch": 0.9963989672509852, "grad_norm": 1.340739130973816, "learning_rate": 0.0008755095801059927, "loss": 3.8708, "step": 14665 }, { "epoch": 0.9967386873216469, "grad_norm": 1.7406255006790161, "learning_rate": 0.0008754671150971599, "loss": 3.7522, "step": 14670 }, { "epoch": 0.9970784073923087, "grad_norm": 1.435092806816101, "learning_rate": 0.0008754246500883273, "loss": 3.7808, "step": 14675 }, { "epoch": 0.9974181274629705, "grad_norm": 1.3170384168624878, "learning_rate": 0.0008753821850794946, "loss": 3.7364, "step": 14680 }, { "epoch": 0.9977578475336323, "grad_norm": 1.2595895528793335, "learning_rate": 0.0008753397200706618, "loss": 3.5988, "step": 14685 }, { "epoch": 0.998097567604294, "grad_norm": 0.9023413062095642, "learning_rate": 0.0008752972550618291, "loss": 3.77, "step": 14690 }, { "epoch": 0.9984372876749559, "grad_norm": 1.3387391567230225, "learning_rate": 0.0008752547900529963, "loss": 3.9364, "step": 14695 }, { "epoch": 0.9987770077456176, "grad_norm": 1.2403621673583984, "learning_rate": 0.0008752123250441636, "loss": 3.8366, "step": 14700 }, { "epoch": 0.9991167278162794, "grad_norm": 1.5008667707443237, "learning_rate": 0.0008751698600353309, "loss": 3.5939, "step": 14705 }, { "epoch": 0.9994564478869412, "grad_norm": 1.4832103252410889, "learning_rate": 0.0008751273950264982, "loss": 3.768, "step": 14710 }, { "epoch": 0.999796167957603, "grad_norm": 1.6182820796966553, "learning_rate": 0.0008750849300176655, "loss": 3.9202, "step": 14715 }, { "epoch": 1.0, "eval_bertscore": { "f1": 0.837438708059154, "precision": 0.8349307767121942, "recall": 0.8412848847909818 }, "eval_bleu_4": 0.016588739555373093, "eval_exact_match": 9.690861517588914e-05, "eval_loss": 3.5278615951538086, "eval_meteor": 0.08738420772994102, "eval_rouge": { "rouge1": 0.11336883654851658, "rouge2": 0.016344355216042498, "rougeL": 0.09790528755291529, "rougeLsum": 0.09798717278415087 }, "eval_runtime": 3211.6832, "eval_samples_per_second": 3.213, "eval_steps_per_second": 0.402, "step": 14718 }, { "epoch": 1.0001358880282647, "grad_norm": 1.3166906833648682, "learning_rate": 0.0008750424650088328, "loss": 3.6356, "step": 14720 }, { "epoch": 1.0004756080989265, "grad_norm": 2.407972812652588, "learning_rate": 0.000875, "loss": 3.5606, "step": 14725 }, { "epoch": 1.0008153281695882, "grad_norm": 1.651012897491455, "learning_rate": 0.0008749575349911672, "loss": 3.5419, "step": 14730 }, { "epoch": 1.00115504824025, "grad_norm": 1.300134301185608, "learning_rate": 0.0008749150699823346, "loss": 3.6811, "step": 14735 }, { "epoch": 1.0014947683109119, "grad_norm": 1.3720903396606445, "learning_rate": 0.0008748726049735018, "loss": 3.5771, "step": 14740 }, { "epoch": 1.0018344883815735, "grad_norm": 3.1214053630828857, "learning_rate": 0.0008748301399646691, "loss": 3.6681, "step": 14745 }, { "epoch": 1.0021742084522354, "grad_norm": 1.5596997737884521, "learning_rate": 0.0008747876749558365, "loss": 3.344, "step": 14750 }, { "epoch": 1.0025139285228972, "grad_norm": 1.4524495601654053, "learning_rate": 0.0008747452099470037, "loss": 3.4725, "step": 14755 }, { "epoch": 1.0028536485935589, "grad_norm": 1.9091004133224487, "learning_rate": 0.0008747027449381709, "loss": 3.6978, "step": 14760 }, { "epoch": 1.0031933686642207, "grad_norm": 1.2002027034759521, "learning_rate": 0.0008746602799293383, "loss": 3.6221, "step": 14765 }, { "epoch": 1.0035330887348826, "grad_norm": 1.177920937538147, "learning_rate": 0.0008746178149205055, "loss": 3.4629, "step": 14770 }, { "epoch": 1.0038728088055442, "grad_norm": 1.4337910413742065, "learning_rate": 0.0008745753499116727, "loss": 3.725, "step": 14775 }, { "epoch": 1.004212528876206, "grad_norm": 1.5984669923782349, "learning_rate": 0.0008745328849028402, "loss": 3.5906, "step": 14780 }, { "epoch": 1.0045522489468677, "grad_norm": 1.5136847496032715, "learning_rate": 0.0008744904198940074, "loss": 3.6977, "step": 14785 }, { "epoch": 1.0048919690175295, "grad_norm": 1.5870412588119507, "learning_rate": 0.0008744479548851746, "loss": 3.7521, "step": 14790 }, { "epoch": 1.0052316890881914, "grad_norm": 1.2812384366989136, "learning_rate": 0.0008744054898763419, "loss": 3.4102, "step": 14795 }, { "epoch": 1.005571409158853, "grad_norm": 2.435668468475342, "learning_rate": 0.0008743630248675092, "loss": 3.5746, "step": 14800 }, { "epoch": 1.0059111292295149, "grad_norm": 1.6149853467941284, "learning_rate": 0.0008743205598586764, "loss": 3.5294, "step": 14805 }, { "epoch": 1.0062508493001767, "grad_norm": 1.4740389585494995, "learning_rate": 0.0008742780948498437, "loss": 3.4822, "step": 14810 }, { "epoch": 1.0065905693708384, "grad_norm": 7.994239807128906, "learning_rate": 0.0008742356298410111, "loss": 3.4471, "step": 14815 }, { "epoch": 1.0069302894415002, "grad_norm": 1.558164358139038, "learning_rate": 0.0008741931648321783, "loss": 3.9393, "step": 14820 }, { "epoch": 1.007270009512162, "grad_norm": 1.894185185432434, "learning_rate": 0.0008741506998233456, "loss": 3.6126, "step": 14825 }, { "epoch": 1.0076097295828237, "grad_norm": 1.32089102268219, "learning_rate": 0.0008741082348145128, "loss": 3.6092, "step": 14830 }, { "epoch": 1.0079494496534855, "grad_norm": 1.5490529537200928, "learning_rate": 0.0008740657698056801, "loss": 3.6779, "step": 14835 }, { "epoch": 1.0082891697241474, "grad_norm": 1.324292540550232, "learning_rate": 0.0008740233047968474, "loss": 3.581, "step": 14840 }, { "epoch": 1.008628889794809, "grad_norm": 1.3331700563430786, "learning_rate": 0.0008739808397880146, "loss": 3.9231, "step": 14845 }, { "epoch": 1.0089686098654709, "grad_norm": 1.7364434003829956, "learning_rate": 0.000873938374779182, "loss": 3.8105, "step": 14850 }, { "epoch": 1.0093083299361327, "grad_norm": 1.0766868591308594, "learning_rate": 0.0008738959097703493, "loss": 3.8543, "step": 14855 }, { "epoch": 1.0096480500067944, "grad_norm": 1.3061331510543823, "learning_rate": 0.0008738534447615165, "loss": 3.6126, "step": 14860 }, { "epoch": 1.0099877700774562, "grad_norm": 1.2416530847549438, "learning_rate": 0.0008738109797526838, "loss": 3.5008, "step": 14865 }, { "epoch": 1.0103274901481178, "grad_norm": 1.5737037658691406, "learning_rate": 0.0008737685147438511, "loss": 3.4916, "step": 14870 }, { "epoch": 1.0106672102187797, "grad_norm": 1.3917335271835327, "learning_rate": 0.0008737260497350183, "loss": 3.5534, "step": 14875 }, { "epoch": 1.0110069302894416, "grad_norm": 1.6035170555114746, "learning_rate": 0.0008736835847261855, "loss": 3.5344, "step": 14880 }, { "epoch": 1.0113466503601032, "grad_norm": 1.236450433731079, "learning_rate": 0.000873641119717353, "loss": 3.7939, "step": 14885 }, { "epoch": 1.011686370430765, "grad_norm": 1.7211267948150635, "learning_rate": 0.0008735986547085202, "loss": 3.8375, "step": 14890 }, { "epoch": 1.012026090501427, "grad_norm": 1.3280143737792969, "learning_rate": 0.0008735561896996874, "loss": 3.5213, "step": 14895 }, { "epoch": 1.0123658105720885, "grad_norm": 1.5298066139221191, "learning_rate": 0.0008735137246908548, "loss": 3.4243, "step": 14900 }, { "epoch": 1.0127055306427504, "grad_norm": 1.1495654582977295, "learning_rate": 0.000873471259682022, "loss": 3.9095, "step": 14905 }, { "epoch": 1.0130452507134122, "grad_norm": 1.0119924545288086, "learning_rate": 0.0008734287946731893, "loss": 4.0227, "step": 14910 }, { "epoch": 1.0133849707840739, "grad_norm": 1.5937973260879517, "learning_rate": 0.0008733863296643566, "loss": 3.4719, "step": 14915 }, { "epoch": 1.0137246908547357, "grad_norm": 1.22808837890625, "learning_rate": 0.0008733438646555239, "loss": 3.403, "step": 14920 }, { "epoch": 1.0140644109253976, "grad_norm": 1.1804845333099365, "learning_rate": 0.0008733013996466912, "loss": 3.5694, "step": 14925 }, { "epoch": 1.0144041309960592, "grad_norm": 1.5564923286437988, "learning_rate": 0.0008732589346378584, "loss": 3.4388, "step": 14930 }, { "epoch": 1.014743851066721, "grad_norm": 15.211454391479492, "learning_rate": 0.0008732164696290257, "loss": 3.424, "step": 14935 }, { "epoch": 1.015083571137383, "grad_norm": 1.2948219776153564, "learning_rate": 0.000873174004620193, "loss": 3.3409, "step": 14940 }, { "epoch": 1.0154232912080445, "grad_norm": 1.2691371440887451, "learning_rate": 0.0008731315396113602, "loss": 3.8283, "step": 14945 }, { "epoch": 1.0157630112787064, "grad_norm": 1.279168725013733, "learning_rate": 0.0008730890746025275, "loss": 3.7181, "step": 14950 }, { "epoch": 1.016102731349368, "grad_norm": 1.3405994176864624, "learning_rate": 0.0008730466095936949, "loss": 3.6131, "step": 14955 }, { "epoch": 1.0164424514200299, "grad_norm": 1.3651719093322754, "learning_rate": 0.0008730041445848621, "loss": 3.6006, "step": 14960 }, { "epoch": 1.0167821714906917, "grad_norm": 1.479517936706543, "learning_rate": 0.0008729616795760294, "loss": 3.5148, "step": 14965 }, { "epoch": 1.0171218915613534, "grad_norm": 1.2256523370742798, "learning_rate": 0.0008729192145671967, "loss": 3.4089, "step": 14970 }, { "epoch": 1.0174616116320152, "grad_norm": 1.1900955438613892, "learning_rate": 0.0008728767495583639, "loss": 3.5806, "step": 14975 }, { "epoch": 1.017801331702677, "grad_norm": 1.1721118688583374, "learning_rate": 0.0008728342845495311, "loss": 3.5569, "step": 14980 }, { "epoch": 1.0181410517733387, "grad_norm": 1.325750470161438, "learning_rate": 0.0008727918195406986, "loss": 3.825, "step": 14985 }, { "epoch": 1.0184807718440005, "grad_norm": 1.8097035884857178, "learning_rate": 0.0008727493545318658, "loss": 3.6227, "step": 14990 }, { "epoch": 1.0188204919146624, "grad_norm": 1.6901124715805054, "learning_rate": 0.000872706889523033, "loss": 3.614, "step": 14995 }, { "epoch": 1.019160211985324, "grad_norm": 1.0712623596191406, "learning_rate": 0.0008726644245142004, "loss": 3.582, "step": 15000 }, { "epoch": 1.0194999320559859, "grad_norm": 1.3260493278503418, "learning_rate": 0.0008726219595053676, "loss": 3.4277, "step": 15005 }, { "epoch": 1.0198396521266477, "grad_norm": 1.1333271265029907, "learning_rate": 0.0008725794944965348, "loss": 3.6246, "step": 15010 }, { "epoch": 1.0201793721973094, "grad_norm": 1.0332101583480835, "learning_rate": 0.0008725370294877022, "loss": 3.6416, "step": 15015 }, { "epoch": 1.0205190922679712, "grad_norm": 1.4848788976669312, "learning_rate": 0.0008724945644788695, "loss": 3.2488, "step": 15020 }, { "epoch": 1.020858812338633, "grad_norm": 1.4440929889678955, "learning_rate": 0.0008724520994700367, "loss": 3.616, "step": 15025 }, { "epoch": 1.0211985324092947, "grad_norm": 1.341843843460083, "learning_rate": 0.000872409634461204, "loss": 3.3153, "step": 15030 }, { "epoch": 1.0215382524799566, "grad_norm": 1.3730677366256714, "learning_rate": 0.0008723671694523713, "loss": 3.8306, "step": 15035 }, { "epoch": 1.0218779725506182, "grad_norm": 1.5234850645065308, "learning_rate": 0.0008723247044435385, "loss": 3.5946, "step": 15040 }, { "epoch": 1.02221769262128, "grad_norm": 1.4540719985961914, "learning_rate": 0.0008722822394347058, "loss": 3.6404, "step": 15045 }, { "epoch": 1.022557412691942, "grad_norm": 1.6587408781051636, "learning_rate": 0.0008722397744258731, "loss": 3.6693, "step": 15050 }, { "epoch": 1.0228971327626035, "grad_norm": 1.34171462059021, "learning_rate": 0.0008721973094170404, "loss": 3.4888, "step": 15055 }, { "epoch": 1.0232368528332654, "grad_norm": 2.8799009323120117, "learning_rate": 0.0008721548444082077, "loss": 3.6239, "step": 15060 }, { "epoch": 1.0235765729039272, "grad_norm": 1.4606562852859497, "learning_rate": 0.000872112379399375, "loss": 3.5486, "step": 15065 }, { "epoch": 1.0239162929745889, "grad_norm": 3.6609835624694824, "learning_rate": 0.0008720699143905422, "loss": 3.5001, "step": 15070 }, { "epoch": 1.0242560130452507, "grad_norm": 1.5922707319259644, "learning_rate": 0.0008720274493817095, "loss": 3.6113, "step": 15075 }, { "epoch": 1.0245957331159126, "grad_norm": 1.3093618154525757, "learning_rate": 0.0008719849843728767, "loss": 3.8284, "step": 15080 }, { "epoch": 1.0249354531865742, "grad_norm": 1.3063725233078003, "learning_rate": 0.000871942519364044, "loss": 3.4986, "step": 15085 }, { "epoch": 1.025275173257236, "grad_norm": 1.248901605606079, "learning_rate": 0.0008719000543552114, "loss": 3.6678, "step": 15090 }, { "epoch": 1.025614893327898, "grad_norm": 1.1654719114303589, "learning_rate": 0.0008718575893463786, "loss": 3.8566, "step": 15095 }, { "epoch": 1.0259546133985595, "grad_norm": 2.2384543418884277, "learning_rate": 0.0008718151243375459, "loss": 3.6253, "step": 15100 }, { "epoch": 1.0262943334692214, "grad_norm": 1.391823649406433, "learning_rate": 0.0008717726593287132, "loss": 3.5801, "step": 15105 }, { "epoch": 1.0266340535398832, "grad_norm": 1.3916505575180054, "learning_rate": 0.0008717301943198804, "loss": 3.5874, "step": 15110 }, { "epoch": 1.0269737736105449, "grad_norm": 1.8890552520751953, "learning_rate": 0.0008716877293110476, "loss": 3.5671, "step": 15115 }, { "epoch": 1.0273134936812067, "grad_norm": 1.1581677198410034, "learning_rate": 0.000871645264302215, "loss": 3.677, "step": 15120 }, { "epoch": 1.0276532137518684, "grad_norm": 2.1645398139953613, "learning_rate": 0.0008716027992933823, "loss": 3.5462, "step": 15125 }, { "epoch": 1.0279929338225302, "grad_norm": 1.320906639099121, "learning_rate": 0.0008715603342845495, "loss": 3.7533, "step": 15130 }, { "epoch": 1.028332653893192, "grad_norm": 1.3158750534057617, "learning_rate": 0.0008715178692757169, "loss": 3.6893, "step": 15135 }, { "epoch": 1.0286723739638537, "grad_norm": 1.096139907836914, "learning_rate": 0.0008714754042668841, "loss": 3.7257, "step": 15140 }, { "epoch": 1.0290120940345155, "grad_norm": 1.3727426528930664, "learning_rate": 0.0008714329392580513, "loss": 3.5008, "step": 15145 }, { "epoch": 1.0293518141051774, "grad_norm": 1.2930922508239746, "learning_rate": 0.0008713904742492187, "loss": 3.7087, "step": 15150 }, { "epoch": 1.029691534175839, "grad_norm": 2.057786464691162, "learning_rate": 0.0008713480092403859, "loss": 3.5784, "step": 15155 }, { "epoch": 1.0300312542465009, "grad_norm": 1.1611210107803345, "learning_rate": 0.0008713055442315532, "loss": 3.759, "step": 15160 }, { "epoch": 1.0303709743171627, "grad_norm": 2.484990119934082, "learning_rate": 0.0008712630792227206, "loss": 3.51, "step": 15165 }, { "epoch": 1.0307106943878244, "grad_norm": 1.1853916645050049, "learning_rate": 0.0008712206142138878, "loss": 3.7286, "step": 15170 }, { "epoch": 1.0310504144584862, "grad_norm": 1.531678557395935, "learning_rate": 0.000871178149205055, "loss": 3.3814, "step": 15175 }, { "epoch": 1.031390134529148, "grad_norm": 1.5252596139907837, "learning_rate": 0.0008711356841962223, "loss": 3.7545, "step": 15180 }, { "epoch": 1.0317298545998097, "grad_norm": 1.0779949426651, "learning_rate": 0.0008710932191873896, "loss": 3.6877, "step": 15185 }, { "epoch": 1.0320695746704716, "grad_norm": 2.4831910133361816, "learning_rate": 0.0008710507541785568, "loss": 3.7399, "step": 15190 }, { "epoch": 1.0324092947411334, "grad_norm": 1.9290739297866821, "learning_rate": 0.0008710082891697242, "loss": 3.6876, "step": 15195 }, { "epoch": 1.032749014811795, "grad_norm": 5.424341201782227, "learning_rate": 0.0008709658241608915, "loss": 3.5363, "step": 15200 }, { "epoch": 1.033088734882457, "grad_norm": 1.3451040983200073, "learning_rate": 0.0008709233591520587, "loss": 3.6877, "step": 15205 }, { "epoch": 1.0334284549531185, "grad_norm": 1.6080034971237183, "learning_rate": 0.000870880894143226, "loss": 3.6678, "step": 15210 }, { "epoch": 1.0337681750237804, "grad_norm": 0.9906778931617737, "learning_rate": 0.0008708384291343932, "loss": 3.6636, "step": 15215 }, { "epoch": 1.0341078950944422, "grad_norm": 1.1581716537475586, "learning_rate": 0.0008707959641255605, "loss": 3.7793, "step": 15220 }, { "epoch": 1.0344476151651039, "grad_norm": 1.561163067817688, "learning_rate": 0.0008707534991167278, "loss": 3.3141, "step": 15225 }, { "epoch": 1.0347873352357657, "grad_norm": 1.267944097518921, "learning_rate": 0.0008707110341078951, "loss": 3.584, "step": 15230 }, { "epoch": 1.0351270553064276, "grad_norm": 1.2771589756011963, "learning_rate": 0.0008706685690990624, "loss": 3.8554, "step": 15235 }, { "epoch": 1.0354667753770892, "grad_norm": 1.6444543600082397, "learning_rate": 0.0008706261040902297, "loss": 3.5067, "step": 15240 }, { "epoch": 1.035806495447751, "grad_norm": 1.7610076665878296, "learning_rate": 0.0008705836390813969, "loss": 3.8659, "step": 15245 }, { "epoch": 1.036146215518413, "grad_norm": 1.020967960357666, "learning_rate": 0.0008705411740725643, "loss": 3.698, "step": 15250 }, { "epoch": 1.0364859355890745, "grad_norm": 1.3311740159988403, "learning_rate": 0.0008704987090637315, "loss": 3.6432, "step": 15255 }, { "epoch": 1.0368256556597364, "grad_norm": 1.3312512636184692, "learning_rate": 0.0008704562440548987, "loss": 3.7733, "step": 15260 }, { "epoch": 1.0371653757303982, "grad_norm": 1.0355918407440186, "learning_rate": 0.0008704137790460662, "loss": 3.5981, "step": 15265 }, { "epoch": 1.0375050958010599, "grad_norm": 1.255499243736267, "learning_rate": 0.0008703713140372334, "loss": 3.5465, "step": 15270 }, { "epoch": 1.0378448158717217, "grad_norm": 1.4623594284057617, "learning_rate": 0.0008703288490284006, "loss": 3.332, "step": 15275 }, { "epoch": 1.0381845359423836, "grad_norm": 1.4163949489593506, "learning_rate": 0.0008702863840195679, "loss": 3.6394, "step": 15280 }, { "epoch": 1.0385242560130452, "grad_norm": 1.2504994869232178, "learning_rate": 0.0008702439190107352, "loss": 3.6482, "step": 15285 }, { "epoch": 1.038863976083707, "grad_norm": 0.9823108315467834, "learning_rate": 0.0008702014540019024, "loss": 3.9034, "step": 15290 }, { "epoch": 1.0392036961543687, "grad_norm": 1.4433362483978271, "learning_rate": 0.0008701589889930697, "loss": 3.6011, "step": 15295 }, { "epoch": 1.0395434162250305, "grad_norm": 1.1907895803451538, "learning_rate": 0.0008701165239842371, "loss": 3.6322, "step": 15300 }, { "epoch": 1.0398831362956924, "grad_norm": 1.3925589323043823, "learning_rate": 0.0008700740589754043, "loss": 3.6677, "step": 15305 }, { "epoch": 1.040222856366354, "grad_norm": 1.6668939590454102, "learning_rate": 0.0008700315939665716, "loss": 3.2425, "step": 15310 }, { "epoch": 1.0405625764370159, "grad_norm": 1.6501684188842773, "learning_rate": 0.0008699891289577389, "loss": 3.7327, "step": 15315 }, { "epoch": 1.0409022965076777, "grad_norm": 1.5886242389678955, "learning_rate": 0.0008699466639489061, "loss": 3.5144, "step": 15320 }, { "epoch": 1.0412420165783394, "grad_norm": 1.2832921743392944, "learning_rate": 0.0008699041989400734, "loss": 3.5636, "step": 15325 }, { "epoch": 1.0415817366490012, "grad_norm": 1.2375811338424683, "learning_rate": 0.0008698617339312406, "loss": 3.6169, "step": 15330 }, { "epoch": 1.041921456719663, "grad_norm": 1.038804292678833, "learning_rate": 0.000869819268922408, "loss": 3.7181, "step": 15335 }, { "epoch": 1.0422611767903247, "grad_norm": 1.3739694356918335, "learning_rate": 0.0008697768039135753, "loss": 3.8732, "step": 15340 }, { "epoch": 1.0426008968609866, "grad_norm": 14.195100784301758, "learning_rate": 0.0008697343389047425, "loss": 3.5518, "step": 15345 }, { "epoch": 1.0429406169316484, "grad_norm": 1.2708122730255127, "learning_rate": 0.0008696918738959098, "loss": 3.502, "step": 15350 }, { "epoch": 1.04328033700231, "grad_norm": 1.1813251972198486, "learning_rate": 0.0008696494088870771, "loss": 3.3686, "step": 15355 }, { "epoch": 1.043620057072972, "grad_norm": 1.096824288368225, "learning_rate": 0.0008696069438782443, "loss": 3.6313, "step": 15360 }, { "epoch": 1.0439597771436337, "grad_norm": 1.5721787214279175, "learning_rate": 0.0008695644788694115, "loss": 3.342, "step": 15365 }, { "epoch": 1.0442994972142954, "grad_norm": 1.314038872718811, "learning_rate": 0.000869522013860579, "loss": 3.7374, "step": 15370 }, { "epoch": 1.0446392172849572, "grad_norm": 1.274539828300476, "learning_rate": 0.0008694795488517462, "loss": 3.5686, "step": 15375 }, { "epoch": 1.0449789373556189, "grad_norm": 1.093459963798523, "learning_rate": 0.0008694370838429134, "loss": 3.4378, "step": 15380 }, { "epoch": 1.0453186574262807, "grad_norm": 1.7719863653182983, "learning_rate": 0.0008693946188340808, "loss": 3.3942, "step": 15385 }, { "epoch": 1.0456583774969426, "grad_norm": 1.3025120496749878, "learning_rate": 0.000869352153825248, "loss": 3.4835, "step": 15390 }, { "epoch": 1.0459980975676042, "grad_norm": 1.0753166675567627, "learning_rate": 0.0008693096888164152, "loss": 3.3972, "step": 15395 }, { "epoch": 1.046337817638266, "grad_norm": 1.8935846090316772, "learning_rate": 0.0008692672238075826, "loss": 3.4534, "step": 15400 }, { "epoch": 1.046677537708928, "grad_norm": 1.9131640195846558, "learning_rate": 0.0008692247587987499, "loss": 3.4893, "step": 15405 }, { "epoch": 1.0470172577795895, "grad_norm": 2.1055479049682617, "learning_rate": 0.0008691822937899171, "loss": 3.5467, "step": 15410 }, { "epoch": 1.0473569778502514, "grad_norm": 1.6371665000915527, "learning_rate": 0.0008691398287810845, "loss": 3.9009, "step": 15415 }, { "epoch": 1.0476966979209132, "grad_norm": 1.4199814796447754, "learning_rate": 0.0008690973637722517, "loss": 3.4489, "step": 15420 }, { "epoch": 1.0480364179915749, "grad_norm": 1.023337483406067, "learning_rate": 0.0008690548987634189, "loss": 3.5666, "step": 15425 }, { "epoch": 1.0483761380622367, "grad_norm": 1.3902463912963867, "learning_rate": 0.0008690124337545862, "loss": 3.3717, "step": 15430 }, { "epoch": 1.0487158581328986, "grad_norm": 3.273625135421753, "learning_rate": 0.0008689699687457535, "loss": 3.546, "step": 15435 }, { "epoch": 1.0490555782035602, "grad_norm": 0.951773464679718, "learning_rate": 0.0008689275037369208, "loss": 3.5762, "step": 15440 }, { "epoch": 1.049395298274222, "grad_norm": 1.368361234664917, "learning_rate": 0.0008688850387280881, "loss": 3.4448, "step": 15445 }, { "epoch": 1.049735018344884, "grad_norm": 1.172600507736206, "learning_rate": 0.0008688425737192554, "loss": 3.7455, "step": 15450 }, { "epoch": 1.0500747384155455, "grad_norm": 1.189997673034668, "learning_rate": 0.0008688001087104226, "loss": 3.7265, "step": 15455 }, { "epoch": 1.0504144584862074, "grad_norm": 1.7896533012390137, "learning_rate": 0.0008687576437015899, "loss": 3.4207, "step": 15460 }, { "epoch": 1.050754178556869, "grad_norm": 1.3993327617645264, "learning_rate": 0.0008687151786927571, "loss": 3.8486, "step": 15465 }, { "epoch": 1.0510938986275309, "grad_norm": 1.3236621618270874, "learning_rate": 0.0008686727136839244, "loss": 3.7223, "step": 15470 }, { "epoch": 1.0514336186981927, "grad_norm": 1.2024935483932495, "learning_rate": 0.0008686302486750918, "loss": 3.4758, "step": 15475 }, { "epoch": 1.0517733387688544, "grad_norm": 1.3431999683380127, "learning_rate": 0.000868587783666259, "loss": 3.7051, "step": 15480 }, { "epoch": 1.0521130588395162, "grad_norm": 1.2925662994384766, "learning_rate": 0.0008685453186574263, "loss": 3.7655, "step": 15485 }, { "epoch": 1.052452778910178, "grad_norm": 1.0601786375045776, "learning_rate": 0.0008685028536485936, "loss": 3.7782, "step": 15490 }, { "epoch": 1.0527924989808397, "grad_norm": 1.375856637954712, "learning_rate": 0.0008684603886397608, "loss": 3.5199, "step": 15495 }, { "epoch": 1.0531322190515016, "grad_norm": 1.1196837425231934, "learning_rate": 0.000868417923630928, "loss": 3.3628, "step": 15500 }, { "epoch": 1.0534719391221634, "grad_norm": 1.4383820295333862, "learning_rate": 0.0008683754586220954, "loss": 3.7053, "step": 15505 }, { "epoch": 1.053811659192825, "grad_norm": 1.1105955839157104, "learning_rate": 0.0008683329936132627, "loss": 3.9135, "step": 15510 }, { "epoch": 1.054151379263487, "grad_norm": 1.4628151655197144, "learning_rate": 0.0008682905286044299, "loss": 3.4967, "step": 15515 }, { "epoch": 1.0544910993341488, "grad_norm": 1.2942028045654297, "learning_rate": 0.0008682480635955973, "loss": 3.7228, "step": 15520 }, { "epoch": 1.0548308194048104, "grad_norm": 1.4453632831573486, "learning_rate": 0.0008682055985867645, "loss": 3.6384, "step": 15525 }, { "epoch": 1.0551705394754722, "grad_norm": 1.0019110441207886, "learning_rate": 0.0008681631335779317, "loss": 3.5728, "step": 15530 }, { "epoch": 1.055510259546134, "grad_norm": 1.5541558265686035, "learning_rate": 0.0008681206685690991, "loss": 3.772, "step": 15535 }, { "epoch": 1.0558499796167957, "grad_norm": 1.551676869392395, "learning_rate": 0.0008680782035602663, "loss": 3.7105, "step": 15540 }, { "epoch": 1.0561896996874576, "grad_norm": 1.1594219207763672, "learning_rate": 0.0008680357385514336, "loss": 3.7637, "step": 15545 }, { "epoch": 1.0565294197581192, "grad_norm": 2.510342597961426, "learning_rate": 0.000867993273542601, "loss": 3.6991, "step": 15550 }, { "epoch": 1.056869139828781, "grad_norm": NaN, "learning_rate": 0.0008679593015355347, "loss": 3.4513, "step": 15555 }, { "epoch": 1.057208859899443, "grad_norm": 1.518423080444336, "learning_rate": 0.000867916836526702, "loss": 3.2923, "step": 15560 }, { "epoch": 1.0575485799701045, "grad_norm": 1.4465004205703735, "learning_rate": 0.0008678743715178693, "loss": 3.7527, "step": 15565 }, { "epoch": 1.0578883000407664, "grad_norm": 1.5817221403121948, "learning_rate": 0.0008678319065090366, "loss": 3.716, "step": 15570 }, { "epoch": 1.0582280201114282, "grad_norm": 1.3652759790420532, "learning_rate": 0.0008677894415002039, "loss": 3.3865, "step": 15575 }, { "epoch": 1.0585677401820899, "grad_norm": 2.205536127090454, "learning_rate": 0.0008677469764913711, "loss": 3.647, "step": 15580 }, { "epoch": 1.0589074602527517, "grad_norm": 1.4381959438323975, "learning_rate": 0.0008677045114825384, "loss": 3.7661, "step": 15585 }, { "epoch": 1.0592471803234136, "grad_norm": 1.3128823041915894, "learning_rate": 0.0008676620464737057, "loss": 3.7825, "step": 15590 }, { "epoch": 1.0595869003940752, "grad_norm": 1.3330471515655518, "learning_rate": 0.0008676195814648729, "loss": 3.7119, "step": 15595 }, { "epoch": 1.059926620464737, "grad_norm": 1.106520414352417, "learning_rate": 0.0008675771164560403, "loss": 3.5783, "step": 15600 }, { "epoch": 1.060266340535399, "grad_norm": 1.1200309991836548, "learning_rate": 0.0008675346514472076, "loss": 3.4527, "step": 15605 }, { "epoch": 1.0606060606060606, "grad_norm": 1.511953353881836, "learning_rate": 0.0008674921864383748, "loss": 3.6532, "step": 15610 }, { "epoch": 1.0609457806767224, "grad_norm": 1.2332046031951904, "learning_rate": 0.000867449721429542, "loss": 3.7038, "step": 15615 }, { "epoch": 1.0612855007473843, "grad_norm": 1.4154295921325684, "learning_rate": 0.0008674072564207094, "loss": 3.9907, "step": 15620 }, { "epoch": 1.061625220818046, "grad_norm": 1.8465453386306763, "learning_rate": 0.0008673647914118766, "loss": 3.5824, "step": 15625 }, { "epoch": 1.0619649408887077, "grad_norm": 1.1233859062194824, "learning_rate": 0.0008673223264030438, "loss": 3.7404, "step": 15630 }, { "epoch": 1.0623046609593694, "grad_norm": 1.4832333326339722, "learning_rate": 0.0008672798613942113, "loss": 3.798, "step": 15635 }, { "epoch": 1.0626443810300312, "grad_norm": 1.5609742403030396, "learning_rate": 0.0008672373963853785, "loss": 3.4913, "step": 15640 }, { "epoch": 1.062984101100693, "grad_norm": 1.457556962966919, "learning_rate": 0.0008671949313765457, "loss": 3.5484, "step": 15645 }, { "epoch": 1.0633238211713547, "grad_norm": 1.2404595613479614, "learning_rate": 0.000867152466367713, "loss": 3.6063, "step": 15650 }, { "epoch": 1.0636635412420166, "grad_norm": 1.4806907176971436, "learning_rate": 0.0008671100013588803, "loss": 3.6182, "step": 15655 }, { "epoch": 1.0640032613126784, "grad_norm": 1.4391734600067139, "learning_rate": 0.0008670675363500475, "loss": 3.6468, "step": 15660 }, { "epoch": 1.06434298138334, "grad_norm": 1.128070592880249, "learning_rate": 0.0008670250713412148, "loss": 3.5484, "step": 15665 }, { "epoch": 1.064682701454002, "grad_norm": 3.9504432678222656, "learning_rate": 0.0008669826063323822, "loss": 3.4663, "step": 15670 }, { "epoch": 1.0650224215246638, "grad_norm": 4.502615451812744, "learning_rate": 0.0008669401413235494, "loss": 3.6099, "step": 15675 }, { "epoch": 1.0653621415953254, "grad_norm": 1.4080593585968018, "learning_rate": 0.0008668976763147167, "loss": 3.8415, "step": 15680 }, { "epoch": 1.0657018616659872, "grad_norm": 1.5537890195846558, "learning_rate": 0.000866855211305884, "loss": 3.3625, "step": 15685 }, { "epoch": 1.066041581736649, "grad_norm": 1.398476004600525, "learning_rate": 0.0008668127462970512, "loss": 3.7901, "step": 15690 }, { "epoch": 1.0663813018073107, "grad_norm": 1.4627693891525269, "learning_rate": 0.0008667702812882185, "loss": 3.6718, "step": 15695 }, { "epoch": 1.0667210218779726, "grad_norm": 1.3914203643798828, "learning_rate": 0.0008667278162793857, "loss": 3.6299, "step": 15700 }, { "epoch": 1.0670607419486344, "grad_norm": 1.2847344875335693, "learning_rate": 0.0008666853512705531, "loss": 3.2, "step": 15705 }, { "epoch": 1.067400462019296, "grad_norm": 1.4275717735290527, "learning_rate": 0.0008666428862617204, "loss": 3.5689, "step": 15710 }, { "epoch": 1.067740182089958, "grad_norm": 1.2578989267349243, "learning_rate": 0.0008666004212528876, "loss": 3.4923, "step": 15715 }, { "epoch": 1.0680799021606195, "grad_norm": 1.2118395566940308, "learning_rate": 0.0008665579562440549, "loss": 3.6136, "step": 15720 }, { "epoch": 1.0684196222312814, "grad_norm": 1.332883596420288, "learning_rate": 0.0008665154912352222, "loss": 3.6599, "step": 15725 }, { "epoch": 1.0687593423019432, "grad_norm": 1.1688578128814697, "learning_rate": 0.0008664730262263894, "loss": 3.5637, "step": 15730 }, { "epoch": 1.0690990623726049, "grad_norm": 1.1457005739212036, "learning_rate": 0.0008664305612175566, "loss": 3.7917, "step": 15735 }, { "epoch": 1.0694387824432667, "grad_norm": 2.1675636768341064, "learning_rate": 0.0008663880962087241, "loss": 3.694, "step": 15740 }, { "epoch": 1.0697785025139286, "grad_norm": 1.2380911111831665, "learning_rate": 0.0008663456311998913, "loss": 3.4849, "step": 15745 }, { "epoch": 1.0701182225845902, "grad_norm": 1.2705168724060059, "learning_rate": 0.0008663031661910585, "loss": 3.3034, "step": 15750 }, { "epoch": 1.070457942655252, "grad_norm": 1.2101551294326782, "learning_rate": 0.0008662607011822259, "loss": 3.8648, "step": 15755 }, { "epoch": 1.070797662725914, "grad_norm": 1.2906736135482788, "learning_rate": 0.0008662182361733931, "loss": 3.5767, "step": 15760 }, { "epoch": 1.0711373827965756, "grad_norm": 1.8099288940429688, "learning_rate": 0.0008661757711645603, "loss": 3.7182, "step": 15765 }, { "epoch": 1.0714771028672374, "grad_norm": 1.1318553686141968, "learning_rate": 0.0008661333061557277, "loss": 3.3963, "step": 15770 }, { "epoch": 1.0718168229378993, "grad_norm": 3.621483325958252, "learning_rate": 0.000866090841146895, "loss": 3.6965, "step": 15775 }, { "epoch": 1.072156543008561, "grad_norm": 1.4853880405426025, "learning_rate": 0.0008660483761380622, "loss": 3.6268, "step": 15780 }, { "epoch": 1.0724962630792227, "grad_norm": 1.100446343421936, "learning_rate": 0.0008660059111292296, "loss": 3.4572, "step": 15785 }, { "epoch": 1.0728359831498846, "grad_norm": 1.6955772638320923, "learning_rate": 0.0008659634461203968, "loss": 3.592, "step": 15790 }, { "epoch": 1.0731757032205462, "grad_norm": 1.369293451309204, "learning_rate": 0.0008659209811115641, "loss": 3.8484, "step": 15795 }, { "epoch": 1.073515423291208, "grad_norm": 1.9072332382202148, "learning_rate": 0.0008658785161027313, "loss": 3.5931, "step": 15800 }, { "epoch": 1.07385514336187, "grad_norm": 1.1086595058441162, "learning_rate": 0.0008658360510938986, "loss": 3.6736, "step": 15805 }, { "epoch": 1.0741948634325316, "grad_norm": 1.4474271535873413, "learning_rate": 0.000865793586085066, "loss": 3.482, "step": 15810 }, { "epoch": 1.0745345835031934, "grad_norm": 1.390909194946289, "learning_rate": 0.0008657511210762332, "loss": 3.6915, "step": 15815 }, { "epoch": 1.074874303573855, "grad_norm": 1.1870497465133667, "learning_rate": 0.0008657086560674005, "loss": 3.6699, "step": 15820 }, { "epoch": 1.075214023644517, "grad_norm": 1.4328322410583496, "learning_rate": 0.0008656661910585678, "loss": 3.6581, "step": 15825 }, { "epoch": 1.0755537437151788, "grad_norm": 1.2380818128585815, "learning_rate": 0.000865623726049735, "loss": 3.6605, "step": 15830 }, { "epoch": 1.0758934637858404, "grad_norm": 1.166750431060791, "learning_rate": 0.0008655812610409023, "loss": 3.3702, "step": 15835 }, { "epoch": 1.0762331838565022, "grad_norm": 1.1447505950927734, "learning_rate": 0.0008655387960320697, "loss": 3.5835, "step": 15840 }, { "epoch": 1.076572903927164, "grad_norm": 1.330103874206543, "learning_rate": 0.0008654963310232369, "loss": 3.1664, "step": 15845 }, { "epoch": 1.0769126239978257, "grad_norm": 1.3198381662368774, "learning_rate": 0.0008654538660144041, "loss": 3.3853, "step": 15850 }, { "epoch": 1.0772523440684876, "grad_norm": 1.1655906438827515, "learning_rate": 0.0008654114010055715, "loss": 3.6384, "step": 15855 }, { "epoch": 1.0775920641391494, "grad_norm": 1.3951127529144287, "learning_rate": 0.0008653689359967387, "loss": 3.9068, "step": 15860 }, { "epoch": 1.077931784209811, "grad_norm": 3.870925188064575, "learning_rate": 0.0008653264709879059, "loss": 3.3326, "step": 15865 }, { "epoch": 1.078271504280473, "grad_norm": 1.2279119491577148, "learning_rate": 0.0008652840059790733, "loss": 3.6588, "step": 15870 }, { "epoch": 1.0786112243511348, "grad_norm": 1.130321741104126, "learning_rate": 0.0008652415409702406, "loss": 3.4561, "step": 15875 }, { "epoch": 1.0789509444217964, "grad_norm": 1.528591513633728, "learning_rate": 0.0008651990759614078, "loss": 3.4839, "step": 15880 }, { "epoch": 1.0792906644924583, "grad_norm": 1.3384541273117065, "learning_rate": 0.0008651566109525752, "loss": 3.7235, "step": 15885 }, { "epoch": 1.0796303845631199, "grad_norm": 1.1053028106689453, "learning_rate": 0.0008651141459437424, "loss": 3.5209, "step": 15890 }, { "epoch": 1.0799701046337817, "grad_norm": 1.3913891315460205, "learning_rate": 0.0008650716809349096, "loss": 3.7448, "step": 15895 }, { "epoch": 1.0803098247044436, "grad_norm": 1.370138168334961, "learning_rate": 0.000865029215926077, "loss": 3.24, "step": 15900 }, { "epoch": 1.0806495447751052, "grad_norm": 1.1376445293426514, "learning_rate": 0.0008649867509172442, "loss": 3.8331, "step": 15905 }, { "epoch": 1.080989264845767, "grad_norm": 1.6780116558074951, "learning_rate": 0.0008649442859084115, "loss": 3.8472, "step": 15910 }, { "epoch": 1.081328984916429, "grad_norm": 7.153387546539307, "learning_rate": 0.0008649018208995788, "loss": 3.667, "step": 15915 }, { "epoch": 1.0816687049870906, "grad_norm": 1.6884760856628418, "learning_rate": 0.0008648593558907461, "loss": 3.5605, "step": 15920 }, { "epoch": 1.0820084250577524, "grad_norm": 1.2234952449798584, "learning_rate": 0.0008648168908819133, "loss": 3.5849, "step": 15925 }, { "epoch": 1.0823481451284143, "grad_norm": 1.1735016107559204, "learning_rate": 0.0008647744258730806, "loss": 3.5864, "step": 15930 }, { "epoch": 1.082687865199076, "grad_norm": 1.1600301265716553, "learning_rate": 0.0008647319608642479, "loss": 3.5312, "step": 15935 }, { "epoch": 1.0830275852697377, "grad_norm": 1.5533771514892578, "learning_rate": 0.0008646894958554151, "loss": 3.6743, "step": 15940 }, { "epoch": 1.0833673053403996, "grad_norm": 1.490462064743042, "learning_rate": 0.0008646470308465825, "loss": 3.6634, "step": 15945 }, { "epoch": 1.0837070254110612, "grad_norm": 1.1175315380096436, "learning_rate": 0.0008646045658377497, "loss": 3.6253, "step": 15950 }, { "epoch": 1.084046745481723, "grad_norm": 1.0968096256256104, "learning_rate": 0.000864562100828917, "loss": 3.6683, "step": 15955 }, { "epoch": 1.084386465552385, "grad_norm": 1.1549400091171265, "learning_rate": 0.0008645196358200843, "loss": 3.4977, "step": 15960 }, { "epoch": 1.0847261856230466, "grad_norm": 1.2320348024368286, "learning_rate": 0.0008644771708112515, "loss": 3.6392, "step": 15965 }, { "epoch": 1.0850659056937084, "grad_norm": 1.1596134901046753, "learning_rate": 0.0008644347058024188, "loss": 3.5831, "step": 15970 }, { "epoch": 1.0854056257643703, "grad_norm": 1.1805295944213867, "learning_rate": 0.0008643922407935861, "loss": 3.6791, "step": 15975 }, { "epoch": 1.085745345835032, "grad_norm": 1.2430083751678467, "learning_rate": 0.0008643497757847534, "loss": 3.7414, "step": 15980 }, { "epoch": 1.0860850659056938, "grad_norm": 1.4862091541290283, "learning_rate": 0.0008643073107759207, "loss": 3.6031, "step": 15985 }, { "epoch": 1.0864247859763554, "grad_norm": 1.1923848390579224, "learning_rate": 0.000864264845767088, "loss": 3.5363, "step": 15990 }, { "epoch": 1.0867645060470172, "grad_norm": 5.226635456085205, "learning_rate": 0.0008642223807582552, "loss": 3.7103, "step": 15995 }, { "epoch": 1.087104226117679, "grad_norm": 1.4608962535858154, "learning_rate": 0.0008641799157494224, "loss": 3.6353, "step": 16000 }, { "epoch": 1.0874439461883407, "grad_norm": 1.338848352432251, "learning_rate": 0.0008641374507405898, "loss": 3.6535, "step": 16005 }, { "epoch": 1.0877836662590026, "grad_norm": 2.92295503616333, "learning_rate": 0.000864094985731757, "loss": 3.7745, "step": 16010 }, { "epoch": 1.0881233863296644, "grad_norm": 1.0953961610794067, "learning_rate": 0.0008640525207229243, "loss": 3.5271, "step": 16015 }, { "epoch": 1.088463106400326, "grad_norm": 3.594329833984375, "learning_rate": 0.0008640100557140917, "loss": 3.6155, "step": 16020 }, { "epoch": 1.088802826470988, "grad_norm": 1.2066094875335693, "learning_rate": 0.0008639675907052589, "loss": 3.598, "step": 16025 }, { "epoch": 1.0891425465416498, "grad_norm": 1.3712784051895142, "learning_rate": 0.0008639251256964261, "loss": 3.6353, "step": 16030 }, { "epoch": 1.0894822666123114, "grad_norm": 5.278666973114014, "learning_rate": 0.0008638826606875935, "loss": 3.6316, "step": 16035 }, { "epoch": 1.0898219866829733, "grad_norm": 2.127222776412964, "learning_rate": 0.0008638401956787607, "loss": 3.459, "step": 16040 }, { "epoch": 1.090161706753635, "grad_norm": 1.9536371231079102, "learning_rate": 0.0008637977306699279, "loss": 3.5995, "step": 16045 }, { "epoch": 1.0905014268242967, "grad_norm": 0.9262590408325195, "learning_rate": 0.0008637552656610953, "loss": 3.8751, "step": 16050 }, { "epoch": 1.0908411468949586, "grad_norm": 1.0503114461898804, "learning_rate": 0.0008637128006522626, "loss": 3.536, "step": 16055 }, { "epoch": 1.0911808669656202, "grad_norm": 1.4404999017715454, "learning_rate": 0.0008636703356434298, "loss": 3.3131, "step": 16060 }, { "epoch": 1.091520587036282, "grad_norm": 2.063206434249878, "learning_rate": 0.0008636278706345971, "loss": 3.5913, "step": 16065 }, { "epoch": 1.091860307106944, "grad_norm": 6.4407501220703125, "learning_rate": 0.0008635854056257644, "loss": 3.4767, "step": 16070 }, { "epoch": 1.0922000271776056, "grad_norm": 1.231980562210083, "learning_rate": 0.0008635429406169316, "loss": 3.652, "step": 16075 }, { "epoch": 1.0925397472482674, "grad_norm": 1.2253528833389282, "learning_rate": 0.0008635004756080989, "loss": 3.6071, "step": 16080 }, { "epoch": 1.0928794673189293, "grad_norm": 1.7641711235046387, "learning_rate": 0.0008634580105992663, "loss": 3.53, "step": 16085 }, { "epoch": 1.093219187389591, "grad_norm": 1.3326908349990845, "learning_rate": 0.0008634155455904335, "loss": 3.3678, "step": 16090 }, { "epoch": 1.0935589074602527, "grad_norm": 1.3443076610565186, "learning_rate": 0.0008633730805816008, "loss": 3.7372, "step": 16095 }, { "epoch": 1.0938986275309146, "grad_norm": 1.8044079542160034, "learning_rate": 0.000863330615572768, "loss": 3.7332, "step": 16100 }, { "epoch": 1.0942383476015762, "grad_norm": 1.3721859455108643, "learning_rate": 0.0008632881505639353, "loss": 3.4507, "step": 16105 }, { "epoch": 1.094578067672238, "grad_norm": 1.2875850200653076, "learning_rate": 0.0008632456855551026, "loss": 3.5675, "step": 16110 }, { "epoch": 1.0949177877429, "grad_norm": 1.2600064277648926, "learning_rate": 0.0008632032205462698, "loss": 3.6684, "step": 16115 }, { "epoch": 1.0952575078135616, "grad_norm": 1.1113725900650024, "learning_rate": 0.0008631607555374372, "loss": 3.6491, "step": 16120 }, { "epoch": 1.0955972278842234, "grad_norm": 1.5576823949813843, "learning_rate": 0.0008631182905286045, "loss": 3.5078, "step": 16125 }, { "epoch": 1.0959369479548853, "grad_norm": 1.1119595766067505, "learning_rate": 0.0008630758255197717, "loss": 3.8223, "step": 16130 }, { "epoch": 1.096276668025547, "grad_norm": 1.3690545558929443, "learning_rate": 0.0008630333605109391, "loss": 3.405, "step": 16135 }, { "epoch": 1.0966163880962088, "grad_norm": 0.9735977053642273, "learning_rate": 0.0008629908955021063, "loss": 3.7191, "step": 16140 }, { "epoch": 1.0969561081668706, "grad_norm": 1.140775442123413, "learning_rate": 0.0008629484304932735, "loss": 3.5076, "step": 16145 }, { "epoch": 1.0972958282375322, "grad_norm": 1.1062344312667847, "learning_rate": 0.0008629059654844408, "loss": 3.5888, "step": 16150 }, { "epoch": 1.097635548308194, "grad_norm": 1.3452571630477905, "learning_rate": 0.0008628635004756082, "loss": 3.608, "step": 16155 }, { "epoch": 1.0979752683788557, "grad_norm": 1.111832857131958, "learning_rate": 0.0008628210354667754, "loss": 3.5816, "step": 16160 }, { "epoch": 1.0983149884495176, "grad_norm": 1.4363921880722046, "learning_rate": 0.0008627785704579427, "loss": 3.5034, "step": 16165 }, { "epoch": 1.0986547085201794, "grad_norm": 1.4682121276855469, "learning_rate": 0.00086273610544911, "loss": 3.6645, "step": 16170 }, { "epoch": 1.098994428590841, "grad_norm": 1.5447646379470825, "learning_rate": 0.0008626936404402772, "loss": 3.6651, "step": 16175 }, { "epoch": 1.099334148661503, "grad_norm": 1.5355583429336548, "learning_rate": 0.0008626511754314445, "loss": 3.5989, "step": 16180 }, { "epoch": 1.0996738687321648, "grad_norm": 1.0782443284988403, "learning_rate": 0.0008626087104226117, "loss": 3.6863, "step": 16185 }, { "epoch": 1.1000135888028264, "grad_norm": 1.3578532934188843, "learning_rate": 0.0008625662454137791, "loss": 3.3701, "step": 16190 }, { "epoch": 1.1003533088734883, "grad_norm": 1.1494064331054688, "learning_rate": 0.0008625237804049464, "loss": 3.642, "step": 16195 }, { "epoch": 1.10069302894415, "grad_norm": 1.1303586959838867, "learning_rate": 0.0008624813153961136, "loss": 3.5964, "step": 16200 }, { "epoch": 1.1010327490148117, "grad_norm": 1.0804824829101562, "learning_rate": 0.0008624388503872809, "loss": 3.6242, "step": 16205 }, { "epoch": 1.1013724690854736, "grad_norm": 1.0955919027328491, "learning_rate": 0.0008623963853784482, "loss": 3.3524, "step": 16210 }, { "epoch": 1.1017121891561354, "grad_norm": 1.2906938791275024, "learning_rate": 0.0008623539203696154, "loss": 3.3459, "step": 16215 }, { "epoch": 1.102051909226797, "grad_norm": 1.1019179821014404, "learning_rate": 0.0008623114553607827, "loss": 3.6823, "step": 16220 }, { "epoch": 1.102391629297459, "grad_norm": 1.1827945709228516, "learning_rate": 0.0008622689903519501, "loss": 3.778, "step": 16225 }, { "epoch": 1.1027313493681206, "grad_norm": 1.2362476587295532, "learning_rate": 0.0008622265253431173, "loss": 3.6249, "step": 16230 }, { "epoch": 1.1030710694387824, "grad_norm": 1.4839417934417725, "learning_rate": 0.0008621840603342845, "loss": 3.7265, "step": 16235 }, { "epoch": 1.1034107895094443, "grad_norm": 1.0424081087112427, "learning_rate": 0.0008621415953254519, "loss": 3.7034, "step": 16240 }, { "epoch": 1.103750509580106, "grad_norm": 1.1570982933044434, "learning_rate": 0.0008620991303166191, "loss": 3.8103, "step": 16245 }, { "epoch": 1.1040902296507678, "grad_norm": 1.1573641300201416, "learning_rate": 0.0008620566653077863, "loss": 3.6894, "step": 16250 }, { "epoch": 1.1044299497214296, "grad_norm": 1.3842713832855225, "learning_rate": 0.0008620142002989537, "loss": 3.6289, "step": 16255 }, { "epoch": 1.1047696697920912, "grad_norm": 1.1315820217132568, "learning_rate": 0.000861971735290121, "loss": 3.4509, "step": 16260 }, { "epoch": 1.105109389862753, "grad_norm": 1.2949697971343994, "learning_rate": 0.0008619292702812882, "loss": 3.5017, "step": 16265 }, { "epoch": 1.105449109933415, "grad_norm": 1.9738582372665405, "learning_rate": 0.0008618868052724556, "loss": 3.5135, "step": 16270 }, { "epoch": 1.1057888300040766, "grad_norm": 1.2038909196853638, "learning_rate": 0.0008618443402636228, "loss": 3.7076, "step": 16275 }, { "epoch": 1.1061285500747384, "grad_norm": 1.373559594154358, "learning_rate": 0.00086180187525479, "loss": 3.5226, "step": 16280 }, { "epoch": 1.1064682701454003, "grad_norm": 1.5003254413604736, "learning_rate": 0.0008617594102459573, "loss": 3.6173, "step": 16285 }, { "epoch": 1.106807990216062, "grad_norm": 1.28163743019104, "learning_rate": 0.0008617169452371246, "loss": 3.5108, "step": 16290 }, { "epoch": 1.1071477102867238, "grad_norm": 1.2721233367919922, "learning_rate": 0.0008616744802282919, "loss": 3.2399, "step": 16295 }, { "epoch": 1.1074874303573856, "grad_norm": 1.4851006269454956, "learning_rate": 0.0008616320152194592, "loss": 3.312, "step": 16300 }, { "epoch": 1.1078271504280472, "grad_norm": 1.2409238815307617, "learning_rate": 0.0008615895502106265, "loss": 3.5071, "step": 16305 }, { "epoch": 1.108166870498709, "grad_norm": 1.155383586883545, "learning_rate": 0.0008615470852017937, "loss": 3.7936, "step": 16310 }, { "epoch": 1.108506590569371, "grad_norm": 1.1806951761245728, "learning_rate": 0.000861504620192961, "loss": 3.4939, "step": 16315 }, { "epoch": 1.1088463106400326, "grad_norm": 1.0730822086334229, "learning_rate": 0.0008614621551841283, "loss": 3.6875, "step": 16320 }, { "epoch": 1.1091860307106944, "grad_norm": 1.0504974126815796, "learning_rate": 0.0008614196901752955, "loss": 3.6655, "step": 16325 }, { "epoch": 1.109525750781356, "grad_norm": 2.8416080474853516, "learning_rate": 0.0008613772251664629, "loss": 3.5925, "step": 16330 }, { "epoch": 1.109865470852018, "grad_norm": 0.9314908385276794, "learning_rate": 0.0008613347601576302, "loss": 3.7303, "step": 16335 }, { "epoch": 1.1102051909226798, "grad_norm": 1.1632658243179321, "learning_rate": 0.0008612922951487974, "loss": 3.4978, "step": 16340 }, { "epoch": 1.1105449109933414, "grad_norm": 1.2813061475753784, "learning_rate": 0.0008612498301399647, "loss": 3.7908, "step": 16345 }, { "epoch": 1.1108846310640033, "grad_norm": 1.0905195474624634, "learning_rate": 0.0008612073651311319, "loss": 3.6692, "step": 16350 }, { "epoch": 1.1112243511346651, "grad_norm": 1.5750612020492554, "learning_rate": 0.0008611649001222992, "loss": 3.6943, "step": 16355 }, { "epoch": 1.1115640712053267, "grad_norm": 1.2152886390686035, "learning_rate": 0.0008611224351134666, "loss": 3.4527, "step": 16360 }, { "epoch": 1.1119037912759886, "grad_norm": 1.008733868598938, "learning_rate": 0.0008610799701046338, "loss": 3.6132, "step": 16365 }, { "epoch": 1.1122435113466504, "grad_norm": 1.5974698066711426, "learning_rate": 0.0008610375050958011, "loss": 3.5144, "step": 16370 }, { "epoch": 1.112583231417312, "grad_norm": 1.1068345308303833, "learning_rate": 0.0008609950400869684, "loss": 3.3409, "step": 16375 }, { "epoch": 1.112922951487974, "grad_norm": 1.6680008172988892, "learning_rate": 0.0008609525750781356, "loss": 3.6859, "step": 16380 }, { "epoch": 1.1132626715586358, "grad_norm": 1.8286552429199219, "learning_rate": 0.0008609101100693028, "loss": 3.7192, "step": 16385 }, { "epoch": 1.1136023916292974, "grad_norm": 1.2198536396026611, "learning_rate": 0.0008608676450604702, "loss": 3.8348, "step": 16390 }, { "epoch": 1.1139421116999593, "grad_norm": 1.0993266105651855, "learning_rate": 0.0008608251800516375, "loss": 3.8015, "step": 16395 }, { "epoch": 1.114281831770621, "grad_norm": 1.1322425603866577, "learning_rate": 0.0008607827150428047, "loss": 3.4912, "step": 16400 }, { "epoch": 1.1146215518412828, "grad_norm": 1.3754218816757202, "learning_rate": 0.0008607402500339721, "loss": 3.5602, "step": 16405 }, { "epoch": 1.1149612719119446, "grad_norm": 1.2162518501281738, "learning_rate": 0.0008606977850251393, "loss": 3.6762, "step": 16410 }, { "epoch": 1.1153009919826062, "grad_norm": 1.2927137613296509, "learning_rate": 0.0008606553200163065, "loss": 3.6262, "step": 16415 }, { "epoch": 1.115640712053268, "grad_norm": 1.2274760007858276, "learning_rate": 0.0008606128550074739, "loss": 3.7712, "step": 16420 }, { "epoch": 1.11598043212393, "grad_norm": 1.1131126880645752, "learning_rate": 0.0008605703899986411, "loss": 3.447, "step": 16425 }, { "epoch": 1.1163201521945916, "grad_norm": 1.32160222530365, "learning_rate": 0.0008605279249898084, "loss": 3.7176, "step": 16430 }, { "epoch": 1.1166598722652534, "grad_norm": 1.4992352724075317, "learning_rate": 0.0008604854599809758, "loss": 3.447, "step": 16435 }, { "epoch": 1.1169995923359153, "grad_norm": 1.2355464696884155, "learning_rate": 0.000860442994972143, "loss": 3.6018, "step": 16440 }, { "epoch": 1.117339312406577, "grad_norm": 1.4298770427703857, "learning_rate": 0.0008604005299633102, "loss": 3.6076, "step": 16445 }, { "epoch": 1.1176790324772388, "grad_norm": 1.8030023574829102, "learning_rate": 0.0008603580649544775, "loss": 3.9672, "step": 16450 }, { "epoch": 1.1180187525479006, "grad_norm": 1.1993069648742676, "learning_rate": 0.0008603155999456448, "loss": 3.6483, "step": 16455 }, { "epoch": 1.1183584726185622, "grad_norm": 1.9725743532180786, "learning_rate": 0.000860273134936812, "loss": 3.6638, "step": 16460 }, { "epoch": 1.118698192689224, "grad_norm": 1.2965764999389648, "learning_rate": 0.0008602306699279794, "loss": 3.8426, "step": 16465 }, { "epoch": 1.119037912759886, "grad_norm": 1.6041958332061768, "learning_rate": 0.0008601882049191467, "loss": 3.3309, "step": 16470 }, { "epoch": 1.1193776328305476, "grad_norm": 0.9035895466804504, "learning_rate": 0.000860145739910314, "loss": 3.671, "step": 16475 }, { "epoch": 1.1197173529012094, "grad_norm": 1.1888939142227173, "learning_rate": 0.0008601032749014812, "loss": 3.6519, "step": 16480 }, { "epoch": 1.1200570729718713, "grad_norm": 1.2379236221313477, "learning_rate": 0.0008600608098926484, "loss": 3.5871, "step": 16485 }, { "epoch": 1.120396793042533, "grad_norm": 1.1729705333709717, "learning_rate": 0.0008600183448838158, "loss": 3.5661, "step": 16490 }, { "epoch": 1.1207365131131948, "grad_norm": 1.286995768547058, "learning_rate": 0.000859975879874983, "loss": 3.6561, "step": 16495 }, { "epoch": 1.1210762331838564, "grad_norm": 1.4253073930740356, "learning_rate": 0.0008599334148661503, "loss": 3.6679, "step": 16500 }, { "epoch": 1.1214159532545183, "grad_norm": 1.073899269104004, "learning_rate": 0.0008598909498573177, "loss": 3.6081, "step": 16505 }, { "epoch": 1.1217556733251801, "grad_norm": 1.1028568744659424, "learning_rate": 0.0008598484848484849, "loss": 3.5163, "step": 16510 }, { "epoch": 1.1220953933958417, "grad_norm": 1.1157504320144653, "learning_rate": 0.0008598060198396521, "loss": 3.575, "step": 16515 }, { "epoch": 1.1224351134665036, "grad_norm": 1.3553597927093506, "learning_rate": 0.0008597635548308195, "loss": 3.7038, "step": 16520 }, { "epoch": 1.1227748335371655, "grad_norm": 1.0763546228408813, "learning_rate": 0.0008597210898219867, "loss": 3.4669, "step": 16525 }, { "epoch": 1.123114553607827, "grad_norm": 1.2051626443862915, "learning_rate": 0.0008596786248131539, "loss": 3.5447, "step": 16530 }, { "epoch": 1.123454273678489, "grad_norm": 1.2731778621673584, "learning_rate": 0.0008596361598043214, "loss": 3.5919, "step": 16535 }, { "epoch": 1.1237939937491508, "grad_norm": 1.3489327430725098, "learning_rate": 0.0008595936947954886, "loss": 3.8444, "step": 16540 }, { "epoch": 1.1241337138198124, "grad_norm": 1.1822887659072876, "learning_rate": 0.0008595512297866558, "loss": 3.6285, "step": 16545 }, { "epoch": 1.1244734338904743, "grad_norm": 1.2485300302505493, "learning_rate": 0.0008595087647778231, "loss": 3.5354, "step": 16550 }, { "epoch": 1.1248131539611361, "grad_norm": 1.3406535387039185, "learning_rate": 0.0008594662997689904, "loss": 3.6173, "step": 16555 }, { "epoch": 1.1251528740317978, "grad_norm": 1.2481826543807983, "learning_rate": 0.0008594238347601576, "loss": 3.6584, "step": 16560 }, { "epoch": 1.1254925941024596, "grad_norm": 1.5988812446594238, "learning_rate": 0.0008593813697513249, "loss": 3.7565, "step": 16565 }, { "epoch": 1.1258323141731212, "grad_norm": 1.2674334049224854, "learning_rate": 0.0008593389047424923, "loss": 3.5893, "step": 16570 }, { "epoch": 1.126172034243783, "grad_norm": 1.326209306716919, "learning_rate": 0.0008592964397336595, "loss": 3.7527, "step": 16575 }, { "epoch": 1.126511754314445, "grad_norm": 1.2260745763778687, "learning_rate": 0.0008592539747248268, "loss": 3.7572, "step": 16580 }, { "epoch": 1.1268514743851066, "grad_norm": 1.1088247299194336, "learning_rate": 0.000859211509715994, "loss": 3.7232, "step": 16585 }, { "epoch": 1.1271911944557684, "grad_norm": 1.2931889295578003, "learning_rate": 0.0008591690447071613, "loss": 3.5183, "step": 16590 }, { "epoch": 1.1275309145264303, "grad_norm": 1.2101763486862183, "learning_rate": 0.0008591265796983286, "loss": 3.8765, "step": 16595 }, { "epoch": 1.127870634597092, "grad_norm": 4.549984931945801, "learning_rate": 0.0008590841146894958, "loss": 3.576, "step": 16600 }, { "epoch": 1.1282103546677538, "grad_norm": 1.4858015775680542, "learning_rate": 0.0008590416496806632, "loss": 3.5059, "step": 16605 }, { "epoch": 1.1285500747384156, "grad_norm": 1.2190178632736206, "learning_rate": 0.0008589991846718305, "loss": 3.6081, "step": 16610 }, { "epoch": 1.1288897948090773, "grad_norm": 1.2045350074768066, "learning_rate": 0.0008589567196629977, "loss": 3.6297, "step": 16615 }, { "epoch": 1.129229514879739, "grad_norm": 1.356902837753296, "learning_rate": 0.000858914254654165, "loss": 3.5117, "step": 16620 }, { "epoch": 1.129569234950401, "grad_norm": 1.8279441595077515, "learning_rate": 0.0008588717896453323, "loss": 3.6165, "step": 16625 }, { "epoch": 1.1299089550210626, "grad_norm": 1.5112848281860352, "learning_rate": 0.0008588293246364995, "loss": 3.6322, "step": 16630 }, { "epoch": 1.1302486750917244, "grad_norm": 1.279726505279541, "learning_rate": 0.0008587868596276667, "loss": 3.6975, "step": 16635 }, { "epoch": 1.1305883951623863, "grad_norm": 1.3135143518447876, "learning_rate": 0.0008587443946188342, "loss": 3.6399, "step": 16640 }, { "epoch": 1.130928115233048, "grad_norm": 1.2947150468826294, "learning_rate": 0.0008587019296100014, "loss": 3.5305, "step": 16645 }, { "epoch": 1.1312678353037098, "grad_norm": 1.3417495489120483, "learning_rate": 0.0008586594646011686, "loss": 3.8636, "step": 16650 }, { "epoch": 1.1316075553743716, "grad_norm": 1.1342848539352417, "learning_rate": 0.000858616999592336, "loss": 3.4343, "step": 16655 }, { "epoch": 1.1319472754450333, "grad_norm": 1.274341106414795, "learning_rate": 0.0008585745345835032, "loss": 3.6171, "step": 16660 }, { "epoch": 1.1322869955156951, "grad_norm": 1.0023139715194702, "learning_rate": 0.0008585320695746704, "loss": 3.619, "step": 16665 }, { "epoch": 1.1326267155863567, "grad_norm": 1.3439583778381348, "learning_rate": 0.0008584896045658378, "loss": 3.6635, "step": 16670 }, { "epoch": 1.1329664356570186, "grad_norm": 1.2051395177841187, "learning_rate": 0.0008584471395570051, "loss": 3.6478, "step": 16675 }, { "epoch": 1.1333061557276805, "grad_norm": 1.3420886993408203, "learning_rate": 0.0008584046745481723, "loss": 3.5626, "step": 16680 }, { "epoch": 1.133645875798342, "grad_norm": 1.1503498554229736, "learning_rate": 0.0008583622095393396, "loss": 3.6272, "step": 16685 }, { "epoch": 1.133985595869004, "grad_norm": 1.5594539642333984, "learning_rate": 0.0008583197445305069, "loss": 3.7092, "step": 16690 }, { "epoch": 1.1343253159396658, "grad_norm": 1.482405185699463, "learning_rate": 0.0008582772795216741, "loss": 3.5786, "step": 16695 }, { "epoch": 1.1346650360103274, "grad_norm": 1.2472866773605347, "learning_rate": 0.0008582348145128414, "loss": 3.3409, "step": 16700 }, { "epoch": 1.1350047560809893, "grad_norm": 1.1303613185882568, "learning_rate": 0.0008581923495040087, "loss": 3.6866, "step": 16705 }, { "epoch": 1.1353444761516511, "grad_norm": 1.4806160926818848, "learning_rate": 0.000858149884495176, "loss": 3.6717, "step": 16710 }, { "epoch": 1.1356841962223128, "grad_norm": 1.2180570363998413, "learning_rate": 0.0008581074194863433, "loss": 3.5855, "step": 16715 }, { "epoch": 1.1360239162929746, "grad_norm": 1.167659878730774, "learning_rate": 0.0008580649544775106, "loss": 3.7711, "step": 16720 }, { "epoch": 1.1363636363636362, "grad_norm": 1.5085915327072144, "learning_rate": 0.0008580224894686778, "loss": 3.6349, "step": 16725 }, { "epoch": 1.136703356434298, "grad_norm": 1.2037054300308228, "learning_rate": 0.0008579800244598451, "loss": 3.7524, "step": 16730 }, { "epoch": 1.13704307650496, "grad_norm": 1.4167718887329102, "learning_rate": 0.0008579375594510123, "loss": 3.8126, "step": 16735 }, { "epoch": 1.1373827965756216, "grad_norm": 1.374959111213684, "learning_rate": 0.0008578950944421796, "loss": 3.5027, "step": 16740 }, { "epoch": 1.1377225166462834, "grad_norm": 1.4773199558258057, "learning_rate": 0.000857852629433347, "loss": 3.6005, "step": 16745 }, { "epoch": 1.1380622367169453, "grad_norm": 1.5651538372039795, "learning_rate": 0.0008578101644245142, "loss": 3.6802, "step": 16750 }, { "epoch": 1.138401956787607, "grad_norm": 1.678668737411499, "learning_rate": 0.0008577676994156815, "loss": 3.8322, "step": 16755 }, { "epoch": 1.1387416768582688, "grad_norm": 1.7387276887893677, "learning_rate": 0.0008577252344068488, "loss": 3.5175, "step": 16760 }, { "epoch": 1.1390813969289306, "grad_norm": 1.086539387702942, "learning_rate": 0.000857682769398016, "loss": 3.847, "step": 16765 }, { "epoch": 1.1394211169995923, "grad_norm": 1.7579964399337769, "learning_rate": 0.0008576403043891832, "loss": 3.6205, "step": 16770 }, { "epoch": 1.139760837070254, "grad_norm": 1.4294012784957886, "learning_rate": 0.0008575978393803506, "loss": 3.5869, "step": 16775 }, { "epoch": 1.140100557140916, "grad_norm": 1.1242291927337646, "learning_rate": 0.0008575553743715179, "loss": 3.5405, "step": 16780 }, { "epoch": 1.1404402772115776, "grad_norm": 1.3755601644515991, "learning_rate": 0.0008575129093626851, "loss": 3.6974, "step": 16785 }, { "epoch": 1.1407799972822394, "grad_norm": 1.0659431219100952, "learning_rate": 0.0008574704443538525, "loss": 3.7664, "step": 16790 }, { "epoch": 1.1411197173529013, "grad_norm": 1.4112814664840698, "learning_rate": 0.0008574279793450197, "loss": 3.3869, "step": 16795 }, { "epoch": 1.141459437423563, "grad_norm": 1.3488727807998657, "learning_rate": 0.0008573855143361869, "loss": 3.6952, "step": 16800 }, { "epoch": 1.1417991574942248, "grad_norm": 1.510749101638794, "learning_rate": 0.0008573430493273543, "loss": 3.7153, "step": 16805 }, { "epoch": 1.1421388775648866, "grad_norm": 1.3948503732681274, "learning_rate": 0.0008573005843185215, "loss": 3.6734, "step": 16810 }, { "epoch": 1.1424785976355483, "grad_norm": 1.7410659790039062, "learning_rate": 0.0008572581193096889, "loss": 3.5988, "step": 16815 }, { "epoch": 1.1428183177062101, "grad_norm": 1.2357791662216187, "learning_rate": 0.0008572156543008562, "loss": 3.456, "step": 16820 }, { "epoch": 1.143158037776872, "grad_norm": 2.8299803733825684, "learning_rate": 0.0008571731892920234, "loss": 3.4734, "step": 16825 }, { "epoch": 1.1434977578475336, "grad_norm": 1.339350700378418, "learning_rate": 0.0008571307242831907, "loss": 3.5917, "step": 16830 }, { "epoch": 1.1438374779181955, "grad_norm": 1.235294222831726, "learning_rate": 0.0008570882592743579, "loss": 3.7647, "step": 16835 }, { "epoch": 1.144177197988857, "grad_norm": 1.3450340032577515, "learning_rate": 0.0008570457942655252, "loss": 3.7245, "step": 16840 }, { "epoch": 1.144516918059519, "grad_norm": 1.054034948348999, "learning_rate": 0.0008570033292566925, "loss": 3.6091, "step": 16845 }, { "epoch": 1.1448566381301808, "grad_norm": 1.360019564628601, "learning_rate": 0.0008569608642478598, "loss": 3.7175, "step": 16850 }, { "epoch": 1.1451963582008424, "grad_norm": 1.572566032409668, "learning_rate": 0.0008569183992390271, "loss": 3.4641, "step": 16855 }, { "epoch": 1.1455360782715043, "grad_norm": 1.0934844017028809, "learning_rate": 0.0008568759342301944, "loss": 3.7176, "step": 16860 }, { "epoch": 1.1458757983421661, "grad_norm": 1.2917137145996094, "learning_rate": 0.0008568334692213616, "loss": 3.4106, "step": 16865 }, { "epoch": 1.1462155184128278, "grad_norm": 1.2593477964401245, "learning_rate": 0.0008567910042125288, "loss": 3.5125, "step": 16870 }, { "epoch": 1.1465552384834896, "grad_norm": 1.349924087524414, "learning_rate": 0.0008567485392036962, "loss": 3.8436, "step": 16875 }, { "epoch": 1.1468949585541515, "grad_norm": 1.2391284704208374, "learning_rate": 0.0008567060741948634, "loss": 3.5998, "step": 16880 }, { "epoch": 1.147234678624813, "grad_norm": 1.227146863937378, "learning_rate": 0.0008566636091860307, "loss": 3.645, "step": 16885 }, { "epoch": 1.147574398695475, "grad_norm": 1.3156874179840088, "learning_rate": 0.0008566211441771981, "loss": 3.7275, "step": 16890 }, { "epoch": 1.1479141187661366, "grad_norm": 1.3288040161132812, "learning_rate": 0.0008565786791683653, "loss": 3.6177, "step": 16895 }, { "epoch": 1.1482538388367984, "grad_norm": 1.1806517839431763, "learning_rate": 0.0008565362141595325, "loss": 3.4833, "step": 16900 }, { "epoch": 1.1485935589074603, "grad_norm": 1.030890941619873, "learning_rate": 0.0008564937491506999, "loss": 3.6869, "step": 16905 }, { "epoch": 1.148933278978122, "grad_norm": 1.283888339996338, "learning_rate": 0.0008564512841418671, "loss": 3.4813, "step": 16910 }, { "epoch": 1.1492729990487838, "grad_norm": 1.2337411642074585, "learning_rate": 0.0008564088191330343, "loss": 3.5688, "step": 16915 }, { "epoch": 1.1496127191194456, "grad_norm": 1.3437323570251465, "learning_rate": 0.0008563663541242018, "loss": 3.4779, "step": 16920 }, { "epoch": 1.1499524391901073, "grad_norm": 1.3314036130905151, "learning_rate": 0.000856323889115369, "loss": 3.5798, "step": 16925 }, { "epoch": 1.150292159260769, "grad_norm": 1.5582525730133057, "learning_rate": 0.0008562814241065362, "loss": 3.6198, "step": 16930 }, { "epoch": 1.150631879331431, "grad_norm": 1.5290462970733643, "learning_rate": 0.0008562389590977035, "loss": 3.5075, "step": 16935 }, { "epoch": 1.1509715994020926, "grad_norm": 1.0443838834762573, "learning_rate": 0.0008561964940888708, "loss": 3.7317, "step": 16940 }, { "epoch": 1.1513113194727544, "grad_norm": 1.1194489002227783, "learning_rate": 0.000856154029080038, "loss": 3.4554, "step": 16945 }, { "epoch": 1.1516510395434163, "grad_norm": 1.2235826253890991, "learning_rate": 0.0008561115640712054, "loss": 3.7114, "step": 16950 }, { "epoch": 1.151990759614078, "grad_norm": 1.2731930017471313, "learning_rate": 0.0008560690990623727, "loss": 3.3417, "step": 16955 }, { "epoch": 1.1523304796847398, "grad_norm": 1.2122819423675537, "learning_rate": 0.0008560266340535399, "loss": 3.393, "step": 16960 }, { "epoch": 1.1526701997554016, "grad_norm": 5.1374430656433105, "learning_rate": 0.0008559841690447072, "loss": 3.5691, "step": 16965 }, { "epoch": 1.1530099198260633, "grad_norm": 2.1504838466644287, "learning_rate": 0.0008559417040358744, "loss": 3.5148, "step": 16970 }, { "epoch": 1.1533496398967251, "grad_norm": 1.1921825408935547, "learning_rate": 0.0008558992390270417, "loss": 3.5141, "step": 16975 }, { "epoch": 1.153689359967387, "grad_norm": 1.484427809715271, "learning_rate": 0.000855856774018209, "loss": 3.607, "step": 16980 }, { "epoch": 1.1540290800380486, "grad_norm": 1.4880266189575195, "learning_rate": 0.0008558143090093763, "loss": 3.7074, "step": 16985 }, { "epoch": 1.1543688001087105, "grad_norm": 1.6345655918121338, "learning_rate": 0.0008557718440005436, "loss": 3.4455, "step": 16990 }, { "epoch": 1.1547085201793723, "grad_norm": 1.2612953186035156, "learning_rate": 0.0008557293789917109, "loss": 3.6351, "step": 16995 }, { "epoch": 1.155048240250034, "grad_norm": 1.368190884590149, "learning_rate": 0.0008556869139828781, "loss": 3.5899, "step": 17000 }, { "epoch": 1.1553879603206958, "grad_norm": 1.4008674621582031, "learning_rate": 0.0008556444489740454, "loss": 3.6163, "step": 17005 }, { "epoch": 1.1557276803913574, "grad_norm": 1.1329236030578613, "learning_rate": 0.0008556019839652127, "loss": 3.6296, "step": 17010 }, { "epoch": 1.1560674004620193, "grad_norm": 1.1590012311935425, "learning_rate": 0.0008555595189563799, "loss": 3.7182, "step": 17015 }, { "epoch": 1.1564071205326811, "grad_norm": 1.030753254890442, "learning_rate": 0.0008555170539475473, "loss": 3.6383, "step": 17020 }, { "epoch": 1.1567468406033428, "grad_norm": 1.5792328119277954, "learning_rate": 0.0008554745889387146, "loss": 3.7922, "step": 17025 }, { "epoch": 1.1570865606740046, "grad_norm": 1.4246777296066284, "learning_rate": 0.0008554321239298818, "loss": 3.5352, "step": 17030 }, { "epoch": 1.1574262807446665, "grad_norm": 1.4504257440567017, "learning_rate": 0.000855389658921049, "loss": 3.5261, "step": 17035 }, { "epoch": 1.157766000815328, "grad_norm": 2.2604565620422363, "learning_rate": 0.0008553471939122164, "loss": 3.3965, "step": 17040 }, { "epoch": 1.15810572088599, "grad_norm": 1.0536093711853027, "learning_rate": 0.0008553047289033836, "loss": 3.8707, "step": 17045 }, { "epoch": 1.1584454409566518, "grad_norm": 1.3165159225463867, "learning_rate": 0.0008552622638945508, "loss": 3.3375, "step": 17050 }, { "epoch": 1.1587851610273134, "grad_norm": 1.0613596439361572, "learning_rate": 0.0008552197988857183, "loss": 3.6332, "step": 17055 }, { "epoch": 1.1591248810979753, "grad_norm": 1.8559569120407104, "learning_rate": 0.0008551773338768855, "loss": 3.5653, "step": 17060 }, { "epoch": 1.159464601168637, "grad_norm": 2.131669044494629, "learning_rate": 0.0008551348688680527, "loss": 3.6989, "step": 17065 }, { "epoch": 1.1598043212392988, "grad_norm": 1.1471368074417114, "learning_rate": 0.00085509240385922, "loss": 3.5552, "step": 17070 }, { "epoch": 1.1601440413099606, "grad_norm": 1.3780031204223633, "learning_rate": 0.0008550499388503873, "loss": 3.6826, "step": 17075 }, { "epoch": 1.1604837613806223, "grad_norm": 1.8772066831588745, "learning_rate": 0.0008550074738415545, "loss": 3.3697, "step": 17080 }, { "epoch": 1.1608234814512841, "grad_norm": 1.2428606748580933, "learning_rate": 0.0008549650088327218, "loss": 3.5364, "step": 17085 }, { "epoch": 1.161163201521946, "grad_norm": 2.005060911178589, "learning_rate": 0.0008549225438238892, "loss": 3.5514, "step": 17090 }, { "epoch": 1.1615029215926076, "grad_norm": 1.3154884576797485, "learning_rate": 0.0008548800788150564, "loss": 3.6019, "step": 17095 }, { "epoch": 1.1618426416632694, "grad_norm": 1.1726747751235962, "learning_rate": 0.0008548376138062237, "loss": 3.558, "step": 17100 }, { "epoch": 1.1621823617339313, "grad_norm": 1.187117576599121, "learning_rate": 0.000854795148797391, "loss": 3.6023, "step": 17105 }, { "epoch": 1.162522081804593, "grad_norm": 1.101328730583191, "learning_rate": 0.0008547526837885582, "loss": 3.4726, "step": 17110 }, { "epoch": 1.1628618018752548, "grad_norm": 1.2584171295166016, "learning_rate": 0.0008547102187797255, "loss": 3.4044, "step": 17115 }, { "epoch": 1.1632015219459166, "grad_norm": 1.2021925449371338, "learning_rate": 0.0008546677537708927, "loss": 3.6101, "step": 17120 }, { "epoch": 1.1635412420165783, "grad_norm": 1.2013700008392334, "learning_rate": 0.0008546252887620601, "loss": 3.5135, "step": 17125 }, { "epoch": 1.1638809620872401, "grad_norm": 1.877311110496521, "learning_rate": 0.0008545828237532274, "loss": 3.9286, "step": 17130 }, { "epoch": 1.164220682157902, "grad_norm": 1.5733816623687744, "learning_rate": 0.0008545403587443946, "loss": 3.3814, "step": 17135 }, { "epoch": 1.1645604022285636, "grad_norm": 1.255810022354126, "learning_rate": 0.0008544978937355619, "loss": 3.7308, "step": 17140 }, { "epoch": 1.1649001222992255, "grad_norm": 1.5478020906448364, "learning_rate": 0.0008544554287267292, "loss": 3.6525, "step": 17145 }, { "epoch": 1.1652398423698873, "grad_norm": 1.7834136486053467, "learning_rate": 0.0008544129637178964, "loss": 3.6604, "step": 17150 }, { "epoch": 1.165579562440549, "grad_norm": 1.0972856283187866, "learning_rate": 0.0008543704987090638, "loss": 3.6499, "step": 17155 }, { "epoch": 1.1659192825112108, "grad_norm": 1.1420916318893433, "learning_rate": 0.0008543280337002311, "loss": 3.8311, "step": 17160 }, { "epoch": 1.1662590025818727, "grad_norm": 1.329188346862793, "learning_rate": 0.0008542855686913983, "loss": 3.6361, "step": 17165 }, { "epoch": 1.1665987226525343, "grad_norm": 0.9906181693077087, "learning_rate": 0.0008542431036825657, "loss": 3.4825, "step": 17170 }, { "epoch": 1.1669384427231961, "grad_norm": 1.5416910648345947, "learning_rate": 0.0008542006386737329, "loss": 3.8075, "step": 17175 }, { "epoch": 1.1672781627938578, "grad_norm": 1.3668458461761475, "learning_rate": 0.0008541581736649001, "loss": 3.807, "step": 17180 }, { "epoch": 1.1676178828645196, "grad_norm": 1.1941957473754883, "learning_rate": 0.0008541157086560674, "loss": 3.6949, "step": 17185 }, { "epoch": 1.1679576029351815, "grad_norm": 1.643873929977417, "learning_rate": 0.0008540732436472347, "loss": 3.3861, "step": 17190 }, { "epoch": 1.168297323005843, "grad_norm": 1.4003697633743286, "learning_rate": 0.000854030778638402, "loss": 3.5145, "step": 17195 }, { "epoch": 1.168637043076505, "grad_norm": 1.3523937463760376, "learning_rate": 0.0008539883136295693, "loss": 3.8688, "step": 17200 }, { "epoch": 1.1689767631471668, "grad_norm": 1.0581203699111938, "learning_rate": 0.0008539458486207366, "loss": 3.5618, "step": 17205 }, { "epoch": 1.1693164832178284, "grad_norm": 1.2886035442352295, "learning_rate": 0.0008539033836119038, "loss": 3.6134, "step": 17210 }, { "epoch": 1.1696562032884903, "grad_norm": 1.2518532276153564, "learning_rate": 0.0008538609186030711, "loss": 3.5588, "step": 17215 }, { "epoch": 1.1699959233591521, "grad_norm": 2.056565523147583, "learning_rate": 0.0008538184535942383, "loss": 3.6325, "step": 17220 }, { "epoch": 1.1703356434298138, "grad_norm": 1.1777377128601074, "learning_rate": 0.0008537759885854056, "loss": 3.519, "step": 17225 }, { "epoch": 1.1706753635004756, "grad_norm": 1.3716936111450195, "learning_rate": 0.000853733523576573, "loss": 3.5591, "step": 17230 }, { "epoch": 1.1710150835711373, "grad_norm": 1.4075703620910645, "learning_rate": 0.0008536910585677402, "loss": 3.301, "step": 17235 }, { "epoch": 1.1713548036417991, "grad_norm": 1.6518900394439697, "learning_rate": 0.0008536485935589075, "loss": 3.347, "step": 17240 }, { "epoch": 1.171694523712461, "grad_norm": 1.3088793754577637, "learning_rate": 0.0008536061285500748, "loss": 3.5689, "step": 17245 }, { "epoch": 1.1720342437831226, "grad_norm": 1.438757300376892, "learning_rate": 0.000853563663541242, "loss": 3.5779, "step": 17250 }, { "epoch": 1.1723739638537845, "grad_norm": 1.4259088039398193, "learning_rate": 0.0008535211985324093, "loss": 3.7573, "step": 17255 }, { "epoch": 1.1727136839244463, "grad_norm": 2.0238494873046875, "learning_rate": 0.0008534787335235766, "loss": 3.6332, "step": 17260 }, { "epoch": 1.173053403995108, "grad_norm": 1.197591781616211, "learning_rate": 0.0008534362685147439, "loss": 3.601, "step": 17265 }, { "epoch": 1.1733931240657698, "grad_norm": 1.2946574687957764, "learning_rate": 0.0008533938035059111, "loss": 3.6112, "step": 17270 }, { "epoch": 1.1737328441364316, "grad_norm": 1.256157398223877, "learning_rate": 0.0008533513384970785, "loss": 3.4189, "step": 17275 }, { "epoch": 1.1740725642070933, "grad_norm": 1.4745858907699585, "learning_rate": 0.0008533088734882457, "loss": 3.8259, "step": 17280 }, { "epoch": 1.1744122842777551, "grad_norm": 1.3893141746520996, "learning_rate": 0.0008532664084794129, "loss": 3.528, "step": 17285 }, { "epoch": 1.174752004348417, "grad_norm": 1.2321279048919678, "learning_rate": 0.0008532239434705803, "loss": 3.6218, "step": 17290 }, { "epoch": 1.1750917244190786, "grad_norm": 1.3681789636611938, "learning_rate": 0.0008531814784617475, "loss": 3.733, "step": 17295 }, { "epoch": 1.1754314444897405, "grad_norm": 1.3041305541992188, "learning_rate": 0.0008531390134529148, "loss": 3.7128, "step": 17300 }, { "epoch": 1.1757711645604023, "grad_norm": 1.3904788494110107, "learning_rate": 0.0008530965484440822, "loss": 3.7668, "step": 17305 }, { "epoch": 1.176110884631064, "grad_norm": 2.139915943145752, "learning_rate": 0.0008530540834352494, "loss": 3.7738, "step": 17310 }, { "epoch": 1.1764506047017258, "grad_norm": 1.1908096075057983, "learning_rate": 0.0008530116184264166, "loss": 3.6077, "step": 17315 }, { "epoch": 1.1767903247723877, "grad_norm": 1.1178627014160156, "learning_rate": 0.000852969153417584, "loss": 3.833, "step": 17320 }, { "epoch": 1.1771300448430493, "grad_norm": 1.402048110961914, "learning_rate": 0.0008529266884087512, "loss": 3.6413, "step": 17325 }, { "epoch": 1.1774697649137111, "grad_norm": 1.0867230892181396, "learning_rate": 0.0008528842233999184, "loss": 3.8608, "step": 17330 }, { "epoch": 1.177809484984373, "grad_norm": 0.9738664627075195, "learning_rate": 0.0008528417583910858, "loss": 3.7724, "step": 17335 }, { "epoch": 1.1781492050550346, "grad_norm": 1.5712816715240479, "learning_rate": 0.0008527992933822531, "loss": 3.6681, "step": 17340 }, { "epoch": 1.1784889251256965, "grad_norm": 1.8806418180465698, "learning_rate": 0.0008527568283734203, "loss": 3.5572, "step": 17345 }, { "epoch": 1.178828645196358, "grad_norm": 1.1907312870025635, "learning_rate": 0.0008527143633645876, "loss": 3.4788, "step": 17350 }, { "epoch": 1.17916836526702, "grad_norm": 1.2965304851531982, "learning_rate": 0.0008526718983557549, "loss": 3.5021, "step": 17355 }, { "epoch": 1.1795080853376818, "grad_norm": 2.1944098472595215, "learning_rate": 0.0008526294333469221, "loss": 3.7969, "step": 17360 }, { "epoch": 1.1798478054083434, "grad_norm": 2.607063055038452, "learning_rate": 0.0008525869683380894, "loss": 3.3119, "step": 17365 }, { "epoch": 1.1801875254790053, "grad_norm": 1.127215027809143, "learning_rate": 0.0008525445033292567, "loss": 3.5232, "step": 17370 }, { "epoch": 1.1805272455496671, "grad_norm": 1.3176013231277466, "learning_rate": 0.000852502038320424, "loss": 3.6217, "step": 17375 }, { "epoch": 1.1808669656203288, "grad_norm": 1.2782303094863892, "learning_rate": 0.0008524595733115913, "loss": 3.6303, "step": 17380 }, { "epoch": 1.1812066856909906, "grad_norm": 1.7452164888381958, "learning_rate": 0.0008524171083027585, "loss": 3.3513, "step": 17385 }, { "epoch": 1.1815464057616525, "grad_norm": 1.5649539232254028, "learning_rate": 0.0008523746432939258, "loss": 3.5265, "step": 17390 }, { "epoch": 1.1818861258323141, "grad_norm": 1.4418041706085205, "learning_rate": 0.0008523321782850931, "loss": 3.4342, "step": 17395 }, { "epoch": 1.182225845902976, "grad_norm": 4.921403884887695, "learning_rate": 0.0008522897132762603, "loss": 3.4338, "step": 17400 }, { "epoch": 1.1825655659736376, "grad_norm": 1.4002809524536133, "learning_rate": 0.0008522472482674277, "loss": 3.3983, "step": 17405 }, { "epoch": 1.1829052860442995, "grad_norm": 1.278860092163086, "learning_rate": 0.000852204783258595, "loss": 3.7291, "step": 17410 }, { "epoch": 1.1832450061149613, "grad_norm": 1.2295200824737549, "learning_rate": 0.0008521623182497622, "loss": 3.4707, "step": 17415 }, { "epoch": 1.183584726185623, "grad_norm": 1.3361661434173584, "learning_rate": 0.0008521198532409294, "loss": 3.5371, "step": 17420 }, { "epoch": 1.1839244462562848, "grad_norm": 1.130141019821167, "learning_rate": 0.0008520773882320968, "loss": 3.5017, "step": 17425 }, { "epoch": 1.1842641663269466, "grad_norm": 1.1493678092956543, "learning_rate": 0.000852034923223264, "loss": 3.7797, "step": 17430 }, { "epoch": 1.1846038863976083, "grad_norm": 1.0802040100097656, "learning_rate": 0.0008519924582144312, "loss": 3.8915, "step": 17435 }, { "epoch": 1.1849436064682701, "grad_norm": 1.3339653015136719, "learning_rate": 0.0008519499932055987, "loss": 3.5262, "step": 17440 }, { "epoch": 1.185283326538932, "grad_norm": 1.3667069673538208, "learning_rate": 0.0008519160211985324, "loss": 3.4229, "step": 17445 }, { "epoch": 1.1856230466095936, "grad_norm": 1.4327776432037354, "learning_rate": 0.0008518735561896997, "loss": 3.6388, "step": 17450 }, { "epoch": 1.1859627666802555, "grad_norm": 1.426360845565796, "learning_rate": 0.0008518310911808669, "loss": 3.7803, "step": 17455 }, { "epoch": 1.1863024867509173, "grad_norm": 1.1326602697372437, "learning_rate": 0.0008517886261720343, "loss": 3.662, "step": 17460 }, { "epoch": 1.186642206821579, "grad_norm": 1.4912965297698975, "learning_rate": 0.0008517461611632016, "loss": 3.5919, "step": 17465 }, { "epoch": 1.1869819268922408, "grad_norm": 2.075784921646118, "learning_rate": 0.0008517036961543688, "loss": 3.7542, "step": 17470 }, { "epoch": 1.1873216469629027, "grad_norm": 1.4299319982528687, "learning_rate": 0.0008516612311455361, "loss": 3.2126, "step": 17475 }, { "epoch": 1.1876613670335643, "grad_norm": 1.5401322841644287, "learning_rate": 0.0008516187661367034, "loss": 3.6764, "step": 17480 }, { "epoch": 1.1880010871042261, "grad_norm": 1.4392273426055908, "learning_rate": 0.0008515763011278706, "loss": 3.6894, "step": 17485 }, { "epoch": 1.188340807174888, "grad_norm": 1.3259761333465576, "learning_rate": 0.0008515338361190379, "loss": 3.285, "step": 17490 }, { "epoch": 1.1886805272455496, "grad_norm": 1.313845157623291, "learning_rate": 0.0008514913711102053, "loss": 3.4525, "step": 17495 }, { "epoch": 1.1890202473162115, "grad_norm": 1.2591824531555176, "learning_rate": 0.0008514489061013725, "loss": 3.5248, "step": 17500 }, { "epoch": 1.1893599673868733, "grad_norm": 1.5509825944900513, "learning_rate": 0.0008514064410925397, "loss": 3.479, "step": 17505 }, { "epoch": 1.189699687457535, "grad_norm": 1.0799860954284668, "learning_rate": 0.0008513639760837071, "loss": 3.5528, "step": 17510 }, { "epoch": 1.1900394075281968, "grad_norm": 1.4442540407180786, "learning_rate": 0.0008513215110748743, "loss": 3.8483, "step": 17515 }, { "epoch": 1.1903791275988584, "grad_norm": 1.3773220777511597, "learning_rate": 0.0008512790460660415, "loss": 3.4923, "step": 17520 }, { "epoch": 1.1907188476695203, "grad_norm": 1.8486692905426025, "learning_rate": 0.0008512365810572089, "loss": 3.5826, "step": 17525 }, { "epoch": 1.1910585677401822, "grad_norm": 1.1105751991271973, "learning_rate": 0.0008511941160483762, "loss": 3.5986, "step": 17530 }, { "epoch": 1.1913982878108438, "grad_norm": 1.2687098979949951, "learning_rate": 0.0008511516510395434, "loss": 3.7254, "step": 17535 }, { "epoch": 1.1917380078815056, "grad_norm": 1.3694097995758057, "learning_rate": 0.0008511091860307108, "loss": 3.4587, "step": 17540 }, { "epoch": 1.1920777279521675, "grad_norm": 1.3436557054519653, "learning_rate": 0.000851066721021878, "loss": 3.5668, "step": 17545 }, { "epoch": 1.1924174480228291, "grad_norm": 1.3346526622772217, "learning_rate": 0.0008510242560130452, "loss": 3.6773, "step": 17550 }, { "epoch": 1.192757168093491, "grad_norm": 1.702106237411499, "learning_rate": 0.0008509817910042125, "loss": 3.5624, "step": 17555 }, { "epoch": 1.1930968881641528, "grad_norm": 1.3941550254821777, "learning_rate": 0.0008509393259953798, "loss": 3.6193, "step": 17560 }, { "epoch": 1.1934366082348145, "grad_norm": 1.3299963474273682, "learning_rate": 0.0008508968609865471, "loss": 3.3447, "step": 17565 }, { "epoch": 1.1937763283054763, "grad_norm": 1.3931523561477661, "learning_rate": 0.0008508543959777144, "loss": 3.6434, "step": 17570 }, { "epoch": 1.194116048376138, "grad_norm": 1.6396822929382324, "learning_rate": 0.0008508119309688817, "loss": 3.3645, "step": 17575 }, { "epoch": 1.1944557684467998, "grad_norm": 1.1086560487747192, "learning_rate": 0.0008507694659600489, "loss": 3.5845, "step": 17580 }, { "epoch": 1.1947954885174616, "grad_norm": 1.4738422632217407, "learning_rate": 0.0008507270009512162, "loss": 3.7321, "step": 17585 }, { "epoch": 1.1951352085881233, "grad_norm": 1.4207996129989624, "learning_rate": 0.0008506845359423835, "loss": 3.7699, "step": 17590 }, { "epoch": 1.1954749286587851, "grad_norm": 5.002362251281738, "learning_rate": 0.0008506420709335507, "loss": 3.7915, "step": 17595 }, { "epoch": 1.195814648729447, "grad_norm": 1.0703197717666626, "learning_rate": 0.0008505996059247181, "loss": 3.6249, "step": 17600 }, { "epoch": 1.1961543688001086, "grad_norm": 1.3623998165130615, "learning_rate": 0.0008505571409158853, "loss": 3.4799, "step": 17605 }, { "epoch": 1.1964940888707705, "grad_norm": 1.42000412940979, "learning_rate": 0.0008505146759070526, "loss": 3.1443, "step": 17610 }, { "epoch": 1.1968338089414323, "grad_norm": 1.537500023841858, "learning_rate": 0.0008504722108982199, "loss": 3.6457, "step": 17615 }, { "epoch": 1.197173529012094, "grad_norm": 1.42108154296875, "learning_rate": 0.0008504297458893871, "loss": 3.5641, "step": 17620 }, { "epoch": 1.1975132490827558, "grad_norm": 3.4872069358825684, "learning_rate": 0.0008503872808805544, "loss": 3.3897, "step": 17625 }, { "epoch": 1.1978529691534177, "grad_norm": 1.2634514570236206, "learning_rate": 0.0008503448158717217, "loss": 3.5519, "step": 17630 }, { "epoch": 1.1981926892240793, "grad_norm": 1.0648272037506104, "learning_rate": 0.000850302350862889, "loss": 3.4505, "step": 17635 }, { "epoch": 1.1985324092947411, "grad_norm": 2.9119250774383545, "learning_rate": 0.0008502598858540563, "loss": 3.7864, "step": 17640 }, { "epoch": 1.198872129365403, "grad_norm": 1.3478792905807495, "learning_rate": 0.0008502174208452236, "loss": 3.5139, "step": 17645 }, { "epoch": 1.1992118494360646, "grad_norm": 4.490004539489746, "learning_rate": 0.0008501749558363908, "loss": 3.5598, "step": 17650 }, { "epoch": 1.1995515695067265, "grad_norm": 1.422042965888977, "learning_rate": 0.000850132490827558, "loss": 3.6949, "step": 17655 }, { "epoch": 1.1998912895773883, "grad_norm": 1.3151881694793701, "learning_rate": 0.0008500900258187254, "loss": 3.6997, "step": 17660 }, { "epoch": 1.20023100964805, "grad_norm": 1.3358696699142456, "learning_rate": 0.0008500475608098926, "loss": 3.6558, "step": 17665 }, { "epoch": 1.2005707297187118, "grad_norm": 1.1203299760818481, "learning_rate": 0.0008500050958010599, "loss": 3.5273, "step": 17670 }, { "epoch": 1.2009104497893737, "grad_norm": 1.2939271926879883, "learning_rate": 0.0008499626307922273, "loss": 3.54, "step": 17675 }, { "epoch": 1.2012501698600353, "grad_norm": 1.1111971139907837, "learning_rate": 0.0008499201657833945, "loss": 3.3232, "step": 17680 }, { "epoch": 1.2015898899306972, "grad_norm": 4.7715535163879395, "learning_rate": 0.0008498777007745617, "loss": 3.7278, "step": 17685 }, { "epoch": 1.2019296100013588, "grad_norm": 1.2151548862457275, "learning_rate": 0.000849835235765729, "loss": 3.8153, "step": 17690 }, { "epoch": 1.2022693300720206, "grad_norm": 1.1532493829727173, "learning_rate": 0.0008497927707568963, "loss": 3.4832, "step": 17695 }, { "epoch": 1.2026090501426825, "grad_norm": 0.9607582688331604, "learning_rate": 0.0008497503057480636, "loss": 3.327, "step": 17700 }, { "epoch": 1.2029487702133441, "grad_norm": 1.9059737920761108, "learning_rate": 0.000849707840739231, "loss": 3.3817, "step": 17705 }, { "epoch": 1.203288490284006, "grad_norm": 2.336454391479492, "learning_rate": 0.0008496653757303982, "loss": 3.9697, "step": 17710 }, { "epoch": 1.2036282103546678, "grad_norm": 1.284204363822937, "learning_rate": 0.0008496229107215655, "loss": 3.3052, "step": 17715 }, { "epoch": 1.2039679304253295, "grad_norm": 2.075328826904297, "learning_rate": 0.0008495804457127327, "loss": 3.7334, "step": 17720 }, { "epoch": 1.2043076504959913, "grad_norm": 1.613434076309204, "learning_rate": 0.0008495379807039, "loss": 3.7504, "step": 17725 }, { "epoch": 1.2046473705666532, "grad_norm": 1.3634446859359741, "learning_rate": 0.0008494955156950673, "loss": 3.6236, "step": 17730 }, { "epoch": 1.2049870906373148, "grad_norm": 1.809268593788147, "learning_rate": 0.0008494530506862345, "loss": 3.5747, "step": 17735 }, { "epoch": 1.2053268107079766, "grad_norm": 1.0879569053649902, "learning_rate": 0.0008494105856774019, "loss": 3.4562, "step": 17740 }, { "epoch": 1.2056665307786383, "grad_norm": 3.1985225677490234, "learning_rate": 0.0008493681206685692, "loss": 3.6389, "step": 17745 }, { "epoch": 1.2060062508493001, "grad_norm": 1.4660992622375488, "learning_rate": 0.0008493256556597364, "loss": 3.751, "step": 17750 }, { "epoch": 1.206345970919962, "grad_norm": 1.4847209453582764, "learning_rate": 0.0008492831906509036, "loss": 3.3598, "step": 17755 }, { "epoch": 1.2066856909906236, "grad_norm": 1.2877333164215088, "learning_rate": 0.000849240725642071, "loss": 3.6625, "step": 17760 }, { "epoch": 1.2070254110612855, "grad_norm": 1.1049795150756836, "learning_rate": 0.0008491982606332382, "loss": 3.7143, "step": 17765 }, { "epoch": 1.2073651311319473, "grad_norm": 1.2189003229141235, "learning_rate": 0.0008491557956244054, "loss": 3.4396, "step": 17770 }, { "epoch": 1.207704851202609, "grad_norm": 1.0921012163162231, "learning_rate": 0.0008491133306155729, "loss": 3.7891, "step": 17775 }, { "epoch": 1.2080445712732708, "grad_norm": 1.462216854095459, "learning_rate": 0.0008490708656067401, "loss": 3.603, "step": 17780 }, { "epoch": 1.2083842913439327, "grad_norm": 1.655122995376587, "learning_rate": 0.0008490284005979073, "loss": 3.5127, "step": 17785 }, { "epoch": 1.2087240114145943, "grad_norm": 1.3106335401535034, "learning_rate": 0.0008489859355890747, "loss": 3.9236, "step": 17790 }, { "epoch": 1.2090637314852561, "grad_norm": 1.13233482837677, "learning_rate": 0.0008489434705802419, "loss": 3.6706, "step": 17795 }, { "epoch": 1.209403451555918, "grad_norm": 1.335641622543335, "learning_rate": 0.0008489010055714091, "loss": 3.7451, "step": 17800 }, { "epoch": 1.2097431716265796, "grad_norm": 1.332876443862915, "learning_rate": 0.0008488585405625765, "loss": 3.3516, "step": 17805 }, { "epoch": 1.2100828916972415, "grad_norm": 1.6946650743484497, "learning_rate": 0.0008488160755537438, "loss": 3.5075, "step": 17810 }, { "epoch": 1.2104226117679033, "grad_norm": 1.7229321002960205, "learning_rate": 0.000848773610544911, "loss": 3.6967, "step": 17815 }, { "epoch": 1.210762331838565, "grad_norm": 1.1054545640945435, "learning_rate": 0.0008487311455360783, "loss": 3.6615, "step": 17820 }, { "epoch": 1.2111020519092268, "grad_norm": 1.7070062160491943, "learning_rate": 0.0008486886805272456, "loss": 3.6791, "step": 17825 }, { "epoch": 1.2114417719798887, "grad_norm": 0.9867748618125916, "learning_rate": 0.0008486462155184128, "loss": 3.7103, "step": 17830 }, { "epoch": 1.2117814920505503, "grad_norm": 1.3707433938980103, "learning_rate": 0.0008486037505095801, "loss": 3.626, "step": 17835 }, { "epoch": 1.2121212121212122, "grad_norm": 1.2696044445037842, "learning_rate": 0.0008485612855007475, "loss": 3.7764, "step": 17840 }, { "epoch": 1.212460932191874, "grad_norm": 1.1882160902023315, "learning_rate": 0.0008485188204919147, "loss": 3.6996, "step": 17845 }, { "epoch": 1.2128006522625356, "grad_norm": 1.9996521472930908, "learning_rate": 0.000848476355483082, "loss": 3.6224, "step": 17850 }, { "epoch": 1.2131403723331975, "grad_norm": 1.7840602397918701, "learning_rate": 0.0008484338904742492, "loss": 3.765, "step": 17855 }, { "epoch": 1.2134800924038591, "grad_norm": 1.097930669784546, "learning_rate": 0.0008483914254654165, "loss": 3.5665, "step": 17860 }, { "epoch": 1.213819812474521, "grad_norm": 1.1922889947891235, "learning_rate": 0.0008483489604565838, "loss": 3.8199, "step": 17865 }, { "epoch": 1.2141595325451828, "grad_norm": 1.6462266445159912, "learning_rate": 0.000848306495447751, "loss": 3.4744, "step": 17870 }, { "epoch": 1.2144992526158445, "grad_norm": 1.196407675743103, "learning_rate": 0.0008482640304389184, "loss": 3.8655, "step": 17875 }, { "epoch": 1.2148389726865063, "grad_norm": 1.3269468545913696, "learning_rate": 0.0008482215654300857, "loss": 3.7308, "step": 17880 }, { "epoch": 1.2151786927571682, "grad_norm": 1.6022000312805176, "learning_rate": 0.0008481791004212529, "loss": 3.3831, "step": 17885 }, { "epoch": 1.2155184128278298, "grad_norm": 0.9736699461936951, "learning_rate": 0.0008481366354124201, "loss": 3.693, "step": 17890 }, { "epoch": 1.2158581328984917, "grad_norm": 1.1100139617919922, "learning_rate": 0.0008480941704035875, "loss": 3.8194, "step": 17895 }, { "epoch": 1.2161978529691535, "grad_norm": 1.117911696434021, "learning_rate": 0.0008480517053947547, "loss": 3.7945, "step": 17900 }, { "epoch": 1.2165375730398151, "grad_norm": 1.5778332948684692, "learning_rate": 0.0008480092403859219, "loss": 3.6767, "step": 17905 }, { "epoch": 1.216877293110477, "grad_norm": 1.2781437635421753, "learning_rate": 0.0008479667753770894, "loss": 3.5242, "step": 17910 }, { "epoch": 1.2172170131811386, "grad_norm": 1.5785930156707764, "learning_rate": 0.0008479243103682566, "loss": 3.5717, "step": 17915 }, { "epoch": 1.2175567332518005, "grad_norm": 1.2541922330856323, "learning_rate": 0.0008478818453594238, "loss": 3.7126, "step": 17920 }, { "epoch": 1.2178964533224623, "grad_norm": 1.7932292222976685, "learning_rate": 0.0008478393803505912, "loss": 3.7177, "step": 17925 }, { "epoch": 1.218236173393124, "grad_norm": 1.5561612844467163, "learning_rate": 0.0008477969153417584, "loss": 3.444, "step": 17930 }, { "epoch": 1.2185758934637858, "grad_norm": 1.1820042133331299, "learning_rate": 0.0008477629433346923, "loss": 3.7639, "step": 17935 }, { "epoch": 1.2189156135344477, "grad_norm": 1.5302547216415405, "learning_rate": 0.0008477204783258595, "loss": 3.4101, "step": 17940 }, { "epoch": 1.2192553336051093, "grad_norm": 1.1133216619491577, "learning_rate": 0.0008476780133170268, "loss": 3.9144, "step": 17945 }, { "epoch": 1.2195950536757711, "grad_norm": 1.7016228437423706, "learning_rate": 0.0008476355483081941, "loss": 3.5697, "step": 17950 }, { "epoch": 1.219934773746433, "grad_norm": 2.0059921741485596, "learning_rate": 0.0008475930832993613, "loss": 3.542, "step": 17955 }, { "epoch": 1.2202744938170946, "grad_norm": 1.214593768119812, "learning_rate": 0.0008475506182905286, "loss": 3.6837, "step": 17960 }, { "epoch": 1.2206142138877565, "grad_norm": 1.252801775932312, "learning_rate": 0.000847508153281696, "loss": 3.6391, "step": 17965 }, { "epoch": 1.2209539339584183, "grad_norm": 1.5442100763320923, "learning_rate": 0.0008474656882728632, "loss": 3.6595, "step": 17970 }, { "epoch": 1.22129365402908, "grad_norm": 1.3177729845046997, "learning_rate": 0.0008474232232640305, "loss": 3.6462, "step": 17975 }, { "epoch": 1.2216333740997418, "grad_norm": 0.9997592568397522, "learning_rate": 0.0008473807582551978, "loss": 3.6082, "step": 17980 }, { "epoch": 1.2219730941704037, "grad_norm": 1.3159356117248535, "learning_rate": 0.000847338293246365, "loss": 3.433, "step": 17985 }, { "epoch": 1.2223128142410653, "grad_norm": 1.363865613937378, "learning_rate": 0.0008472958282375322, "loss": 3.7204, "step": 17990 }, { "epoch": 1.2226525343117272, "grad_norm": 1.208786129951477, "learning_rate": 0.0008472533632286996, "loss": 3.7226, "step": 17995 }, { "epoch": 1.222992254382389, "grad_norm": 1.8888460397720337, "learning_rate": 0.0008472108982198669, "loss": 3.5346, "step": 18000 }, { "epoch": 1.2233319744530506, "grad_norm": 1.2429465055465698, "learning_rate": 0.0008471684332110341, "loss": 3.5399, "step": 18005 }, { "epoch": 1.2236716945237125, "grad_norm": 1.3827271461486816, "learning_rate": 0.0008471259682022015, "loss": 3.6845, "step": 18010 }, { "epoch": 1.2240114145943743, "grad_norm": 1.0488187074661255, "learning_rate": 0.0008470835031933687, "loss": 3.4885, "step": 18015 }, { "epoch": 1.224351134665036, "grad_norm": 1.0642189979553223, "learning_rate": 0.0008470410381845359, "loss": 3.845, "step": 18020 }, { "epoch": 1.2246908547356978, "grad_norm": 1.1225789785385132, "learning_rate": 0.0008469985731757033, "loss": 3.5503, "step": 18025 }, { "epoch": 1.2250305748063597, "grad_norm": 1.3311715126037598, "learning_rate": 0.0008469561081668705, "loss": 3.3833, "step": 18030 }, { "epoch": 1.2253702948770213, "grad_norm": 1.1571400165557861, "learning_rate": 0.0008469136431580378, "loss": 3.9116, "step": 18035 }, { "epoch": 1.2257100149476832, "grad_norm": 2.3590850830078125, "learning_rate": 0.0008468711781492051, "loss": 3.7766, "step": 18040 }, { "epoch": 1.2260497350183448, "grad_norm": 1.2237871885299683, "learning_rate": 0.0008468287131403724, "loss": 3.7668, "step": 18045 }, { "epoch": 1.2263894550890067, "grad_norm": 1.1359655857086182, "learning_rate": 0.0008467862481315396, "loss": 3.4883, "step": 18050 }, { "epoch": 1.2267291751596685, "grad_norm": 1.4269251823425293, "learning_rate": 0.0008467437831227069, "loss": 3.6644, "step": 18055 }, { "epoch": 1.2270688952303301, "grad_norm": 1.438506841659546, "learning_rate": 0.0008467013181138742, "loss": 3.3234, "step": 18060 }, { "epoch": 1.227408615300992, "grad_norm": 1.2240567207336426, "learning_rate": 0.0008466588531050414, "loss": 3.547, "step": 18065 }, { "epoch": 1.2277483353716538, "grad_norm": 1.2921143770217896, "learning_rate": 0.0008466163880962088, "loss": 3.7465, "step": 18070 }, { "epoch": 1.2280880554423155, "grad_norm": 1.112519383430481, "learning_rate": 0.0008465739230873761, "loss": 3.7497, "step": 18075 }, { "epoch": 1.2284277755129773, "grad_norm": 1.416303038597107, "learning_rate": 0.0008465314580785433, "loss": 3.4424, "step": 18080 }, { "epoch": 1.228767495583639, "grad_norm": 1.1632537841796875, "learning_rate": 0.0008464889930697106, "loss": 3.5532, "step": 18085 }, { "epoch": 1.2291072156543008, "grad_norm": 1.5655796527862549, "learning_rate": 0.0008464465280608778, "loss": 3.6606, "step": 18090 }, { "epoch": 1.2294469357249627, "grad_norm": 1.304542064666748, "learning_rate": 0.0008464040630520451, "loss": 3.623, "step": 18095 }, { "epoch": 1.2297866557956243, "grad_norm": 1.0754669904708862, "learning_rate": 0.0008463615980432124, "loss": 3.8449, "step": 18100 }, { "epoch": 1.2301263758662861, "grad_norm": 1.0942376852035522, "learning_rate": 0.0008463191330343797, "loss": 3.7459, "step": 18105 }, { "epoch": 1.230466095936948, "grad_norm": 1.0985232591629028, "learning_rate": 0.000846276668025547, "loss": 3.5541, "step": 18110 }, { "epoch": 1.2308058160076096, "grad_norm": 1.1378705501556396, "learning_rate": 0.0008462342030167143, "loss": 3.6709, "step": 18115 }, { "epoch": 1.2311455360782715, "grad_norm": 1.2582639455795288, "learning_rate": 0.0008461917380078815, "loss": 3.8657, "step": 18120 }, { "epoch": 1.2314852561489333, "grad_norm": 1.2281392812728882, "learning_rate": 0.0008461492729990487, "loss": 3.4921, "step": 18125 }, { "epoch": 1.231824976219595, "grad_norm": 1.2719323635101318, "learning_rate": 0.0008461068079902161, "loss": 3.534, "step": 18130 }, { "epoch": 1.2321646962902568, "grad_norm": 1.2360328435897827, "learning_rate": 0.0008460643429813833, "loss": 3.66, "step": 18135 }, { "epoch": 1.2325044163609187, "grad_norm": 1.2661629915237427, "learning_rate": 0.0008460218779725506, "loss": 3.4652, "step": 18140 }, { "epoch": 1.2328441364315803, "grad_norm": 1.8124936819076538, "learning_rate": 0.000845979412963718, "loss": 3.5965, "step": 18145 }, { "epoch": 1.2331838565022422, "grad_norm": 1.1610101461410522, "learning_rate": 0.0008459369479548852, "loss": 3.6529, "step": 18150 }, { "epoch": 1.233523576572904, "grad_norm": 1.091636061668396, "learning_rate": 0.0008458944829460524, "loss": 3.4832, "step": 18155 }, { "epoch": 1.2338632966435656, "grad_norm": 1.264228105545044, "learning_rate": 0.0008458520179372198, "loss": 3.5681, "step": 18160 }, { "epoch": 1.2342030167142275, "grad_norm": 2.40230131149292, "learning_rate": 0.000845809552928387, "loss": 3.3574, "step": 18165 }, { "epoch": 1.2345427367848893, "grad_norm": 1.4689041376113892, "learning_rate": 0.0008457670879195542, "loss": 3.5758, "step": 18170 }, { "epoch": 1.234882456855551, "grad_norm": 1.2972742319107056, "learning_rate": 0.0008457246229107217, "loss": 3.5413, "step": 18175 }, { "epoch": 1.2352221769262128, "grad_norm": 1.5239436626434326, "learning_rate": 0.0008456821579018889, "loss": 3.6168, "step": 18180 }, { "epoch": 1.2355618969968747, "grad_norm": 1.3663471937179565, "learning_rate": 0.0008456396928930561, "loss": 3.6564, "step": 18185 }, { "epoch": 1.2359016170675363, "grad_norm": 1.2094841003417969, "learning_rate": 0.0008455972278842234, "loss": 3.5125, "step": 18190 }, { "epoch": 1.2362413371381982, "grad_norm": 1.1542764902114868, "learning_rate": 0.0008455547628753907, "loss": 3.6537, "step": 18195 }, { "epoch": 1.23658105720886, "grad_norm": 1.1457371711730957, "learning_rate": 0.0008455122978665579, "loss": 3.606, "step": 18200 }, { "epoch": 1.2369207772795217, "grad_norm": 1.3004145622253418, "learning_rate": 0.0008454698328577252, "loss": 3.4436, "step": 18205 }, { "epoch": 1.2372604973501835, "grad_norm": 1.4451955556869507, "learning_rate": 0.0008454273678488926, "loss": 3.5909, "step": 18210 }, { "epoch": 1.2376002174208451, "grad_norm": 1.134179711341858, "learning_rate": 0.0008453849028400598, "loss": 3.4785, "step": 18215 }, { "epoch": 1.237939937491507, "grad_norm": 1.2516230344772339, "learning_rate": 0.0008453424378312271, "loss": 3.445, "step": 18220 }, { "epoch": 1.2382796575621688, "grad_norm": 1.3505141735076904, "learning_rate": 0.0008452999728223943, "loss": 3.8621, "step": 18225 }, { "epoch": 1.2386193776328305, "grad_norm": 1.2745293378829956, "learning_rate": 0.0008452575078135616, "loss": 3.5879, "step": 18230 }, { "epoch": 1.2389590977034923, "grad_norm": 1.0283074378967285, "learning_rate": 0.0008452150428047289, "loss": 3.6688, "step": 18235 }, { "epoch": 1.2392988177741542, "grad_norm": 0.89608234167099, "learning_rate": 0.0008451725777958961, "loss": 3.5658, "step": 18240 }, { "epoch": 1.2396385378448158, "grad_norm": 1.6191368103027344, "learning_rate": 0.0008451301127870636, "loss": 3.5193, "step": 18245 }, { "epoch": 1.2399782579154777, "grad_norm": 1.1134147644042969, "learning_rate": 0.0008450876477782308, "loss": 3.592, "step": 18250 }, { "epoch": 1.2403179779861393, "grad_norm": 1.2762707471847534, "learning_rate": 0.000845045182769398, "loss": 3.5854, "step": 18255 }, { "epoch": 1.2406576980568012, "grad_norm": 1.2949641942977905, "learning_rate": 0.0008450027177605654, "loss": 3.3825, "step": 18260 }, { "epoch": 1.240997418127463, "grad_norm": 1.2334554195404053, "learning_rate": 0.0008449602527517326, "loss": 3.852, "step": 18265 }, { "epoch": 1.2413371381981246, "grad_norm": 1.2301818132400513, "learning_rate": 0.0008449177877428998, "loss": 3.2939, "step": 18270 }, { "epoch": 1.2416768582687865, "grad_norm": 1.2449437379837036, "learning_rate": 0.0008448753227340671, "loss": 3.4834, "step": 18275 }, { "epoch": 1.2420165783394483, "grad_norm": 1.2354741096496582, "learning_rate": 0.0008448328577252345, "loss": 3.7342, "step": 18280 }, { "epoch": 1.24235629841011, "grad_norm": 2.951914072036743, "learning_rate": 0.0008447903927164017, "loss": 3.9464, "step": 18285 }, { "epoch": 1.2426960184807718, "grad_norm": 1.3757359981536865, "learning_rate": 0.000844747927707569, "loss": 3.6466, "step": 18290 }, { "epoch": 1.2430357385514337, "grad_norm": 1.309857964515686, "learning_rate": 0.0008447054626987363, "loss": 3.4382, "step": 18295 }, { "epoch": 1.2433754586220953, "grad_norm": 1.3772966861724854, "learning_rate": 0.0008446629976899035, "loss": 3.6951, "step": 18300 }, { "epoch": 1.2437151786927572, "grad_norm": 1.1081253290176392, "learning_rate": 0.0008446205326810708, "loss": 3.7064, "step": 18305 }, { "epoch": 1.244054898763419, "grad_norm": 1.3303786516189575, "learning_rate": 0.0008445780676722381, "loss": 3.7993, "step": 18310 }, { "epoch": 1.2443946188340806, "grad_norm": 1.2683378458023071, "learning_rate": 0.0008445356026634054, "loss": 3.8356, "step": 18315 }, { "epoch": 1.2447343389047425, "grad_norm": 1.5764076709747314, "learning_rate": 0.0008444931376545727, "loss": 3.6898, "step": 18320 }, { "epoch": 1.2450740589754044, "grad_norm": 1.2019211053848267, "learning_rate": 0.00084445067264574, "loss": 3.4743, "step": 18325 }, { "epoch": 1.245413779046066, "grad_norm": 1.2276870012283325, "learning_rate": 0.0008444082076369072, "loss": 3.4749, "step": 18330 }, { "epoch": 1.2457534991167278, "grad_norm": 1.6247549057006836, "learning_rate": 0.0008443657426280745, "loss": 3.5647, "step": 18335 }, { "epoch": 1.2460932191873897, "grad_norm": 1.3381541967391968, "learning_rate": 0.0008443232776192417, "loss": 3.5076, "step": 18340 }, { "epoch": 1.2464329392580513, "grad_norm": 1.3183022737503052, "learning_rate": 0.000844280812610409, "loss": 3.5889, "step": 18345 }, { "epoch": 1.2467726593287132, "grad_norm": 1.3166673183441162, "learning_rate": 0.0008442383476015764, "loss": 3.4773, "step": 18350 }, { "epoch": 1.247112379399375, "grad_norm": 1.2163852453231812, "learning_rate": 0.0008441958825927436, "loss": 3.8082, "step": 18355 }, { "epoch": 1.2474520994700367, "grad_norm": 1.0016759634017944, "learning_rate": 0.0008441534175839109, "loss": 3.7381, "step": 18360 }, { "epoch": 1.2477918195406985, "grad_norm": 0.8812482953071594, "learning_rate": 0.0008441109525750782, "loss": 3.632, "step": 18365 }, { "epoch": 1.2481315396113604, "grad_norm": 1.244808554649353, "learning_rate": 0.0008440684875662454, "loss": 3.4139, "step": 18370 }, { "epoch": 1.248471259682022, "grad_norm": 1.1459875106811523, "learning_rate": 0.0008440260225574126, "loss": 3.7816, "step": 18375 }, { "epoch": 1.2488109797526838, "grad_norm": 1.141732931137085, "learning_rate": 0.00084398355754858, "loss": 3.2999, "step": 18380 }, { "epoch": 1.2491506998233455, "grad_norm": 1.261666178703308, "learning_rate": 0.0008439410925397473, "loss": 3.8025, "step": 18385 }, { "epoch": 1.2494904198940073, "grad_norm": 1.3878034353256226, "learning_rate": 0.0008438986275309145, "loss": 3.4443, "step": 18390 }, { "epoch": 1.2498301399646692, "grad_norm": 1.0726803541183472, "learning_rate": 0.0008438561625220819, "loss": 3.5499, "step": 18395 }, { "epoch": 1.2501698600353308, "grad_norm": 1.3602955341339111, "learning_rate": 0.0008438136975132491, "loss": 3.6753, "step": 18400 }, { "epoch": 1.2505095801059927, "grad_norm": 1.2333754301071167, "learning_rate": 0.0008437712325044163, "loss": 3.7568, "step": 18405 }, { "epoch": 1.2508493001766543, "grad_norm": 1.135968565940857, "learning_rate": 0.0008437287674955837, "loss": 3.7104, "step": 18410 }, { "epoch": 1.2511890202473162, "grad_norm": 1.117478609085083, "learning_rate": 0.0008436863024867509, "loss": 3.333, "step": 18415 }, { "epoch": 1.251528740317978, "grad_norm": 1.3783246278762817, "learning_rate": 0.0008436438374779182, "loss": 3.4559, "step": 18420 }, { "epoch": 1.2518684603886396, "grad_norm": 1.3132699728012085, "learning_rate": 0.0008436013724690856, "loss": 3.5801, "step": 18425 }, { "epoch": 1.2522081804593015, "grad_norm": 1.4450520277023315, "learning_rate": 0.0008435589074602528, "loss": 3.4163, "step": 18430 }, { "epoch": 1.2525479005299633, "grad_norm": 1.943768858909607, "learning_rate": 0.00084351644245142, "loss": 3.8069, "step": 18435 }, { "epoch": 1.252887620600625, "grad_norm": 1.252806305885315, "learning_rate": 0.0008434739774425873, "loss": 3.5904, "step": 18440 }, { "epoch": 1.2532273406712868, "grad_norm": 1.3756346702575684, "learning_rate": 0.0008434315124337546, "loss": 3.5365, "step": 18445 }, { "epoch": 1.2535670607419487, "grad_norm": 1.0584518909454346, "learning_rate": 0.0008433890474249218, "loss": 3.786, "step": 18450 }, { "epoch": 1.2539067808126103, "grad_norm": 1.0271183252334595, "learning_rate": 0.0008433465824160892, "loss": 3.469, "step": 18455 }, { "epoch": 1.2542465008832722, "grad_norm": 1.5075637102127075, "learning_rate": 0.0008433041174072565, "loss": 3.7842, "step": 18460 }, { "epoch": 1.254586220953934, "grad_norm": 1.4979978799819946, "learning_rate": 0.0008432616523984237, "loss": 3.5963, "step": 18465 }, { "epoch": 1.2549259410245956, "grad_norm": 1.2925225496292114, "learning_rate": 0.000843219187389591, "loss": 3.3005, "step": 18470 }, { "epoch": 1.2552656610952575, "grad_norm": 1.4481220245361328, "learning_rate": 0.0008431767223807582, "loss": 3.5439, "step": 18475 }, { "epoch": 1.2556053811659194, "grad_norm": 1.3466545343399048, "learning_rate": 0.0008431342573719255, "loss": 3.54, "step": 18480 }, { "epoch": 1.255945101236581, "grad_norm": 3.9368386268615723, "learning_rate": 0.0008430917923630928, "loss": 3.3674, "step": 18485 }, { "epoch": 1.2562848213072428, "grad_norm": 1.503429651260376, "learning_rate": 0.0008430493273542601, "loss": 3.6606, "step": 18490 }, { "epoch": 1.2566245413779047, "grad_norm": 1.31855046749115, "learning_rate": 0.0008430068623454274, "loss": 3.7071, "step": 18495 }, { "epoch": 1.2569642614485663, "grad_norm": 1.311144232749939, "learning_rate": 0.0008429643973365947, "loss": 3.4766, "step": 18500 }, { "epoch": 1.2573039815192282, "grad_norm": 1.470073938369751, "learning_rate": 0.0008429219323277619, "loss": 3.5344, "step": 18505 }, { "epoch": 1.25764370158989, "grad_norm": 1.2018152475357056, "learning_rate": 0.0008428794673189292, "loss": 3.5811, "step": 18510 }, { "epoch": 1.2579834216605517, "grad_norm": 1.134752631187439, "learning_rate": 0.0008428370023100965, "loss": 3.7799, "step": 18515 }, { "epoch": 1.2583231417312135, "grad_norm": 1.5649850368499756, "learning_rate": 0.0008427945373012637, "loss": 3.6586, "step": 18520 }, { "epoch": 1.2586628618018754, "grad_norm": 1.0771222114562988, "learning_rate": 0.000842752072292431, "loss": 3.8355, "step": 18525 }, { "epoch": 1.259002581872537, "grad_norm": 1.073067307472229, "learning_rate": 0.0008427096072835984, "loss": 3.6299, "step": 18530 }, { "epoch": 1.2593423019431988, "grad_norm": 1.1727499961853027, "learning_rate": 0.0008426671422747656, "loss": 3.8853, "step": 18535 }, { "epoch": 1.2596820220138607, "grad_norm": 1.2022695541381836, "learning_rate": 0.0008426246772659328, "loss": 3.6469, "step": 18540 }, { "epoch": 1.2600217420845223, "grad_norm": 1.3026893138885498, "learning_rate": 0.0008425822122571002, "loss": 3.6797, "step": 18545 }, { "epoch": 1.2603614621551842, "grad_norm": 1.0649961233139038, "learning_rate": 0.0008425397472482674, "loss": 3.857, "step": 18550 }, { "epoch": 1.260701182225846, "grad_norm": 1.4060900211334229, "learning_rate": 0.0008424972822394346, "loss": 3.4482, "step": 18555 }, { "epoch": 1.2610409022965077, "grad_norm": 0.965934693813324, "learning_rate": 0.0008424548172306021, "loss": 3.9652, "step": 18560 }, { "epoch": 1.2613806223671695, "grad_norm": 1.604931116104126, "learning_rate": 0.0008424123522217693, "loss": 3.6724, "step": 18565 }, { "epoch": 1.2617203424378312, "grad_norm": 1.1461617946624756, "learning_rate": 0.0008423698872129365, "loss": 3.7757, "step": 18570 }, { "epoch": 1.262060062508493, "grad_norm": 1.2330150604248047, "learning_rate": 0.0008423274222041038, "loss": 3.5139, "step": 18575 }, { "epoch": 1.2623997825791546, "grad_norm": 1.3365023136138916, "learning_rate": 0.0008422849571952711, "loss": 3.5193, "step": 18580 }, { "epoch": 1.2627395026498165, "grad_norm": 2.3599705696105957, "learning_rate": 0.0008422424921864384, "loss": 3.7645, "step": 18585 }, { "epoch": 1.2630792227204783, "grad_norm": 1.2732008695602417, "learning_rate": 0.0008422000271776057, "loss": 3.8512, "step": 18590 }, { "epoch": 1.26341894279114, "grad_norm": 1.1204981803894043, "learning_rate": 0.000842157562168773, "loss": 3.6008, "step": 18595 }, { "epoch": 1.2637586628618018, "grad_norm": 1.1497430801391602, "learning_rate": 0.0008421150971599403, "loss": 3.71, "step": 18600 }, { "epoch": 1.2640983829324637, "grad_norm": 1.200781226158142, "learning_rate": 0.0008420726321511075, "loss": 3.5601, "step": 18605 }, { "epoch": 1.2644381030031253, "grad_norm": 1.07355797290802, "learning_rate": 0.0008420301671422748, "loss": 3.3939, "step": 18610 }, { "epoch": 1.2647778230737872, "grad_norm": 1.243895173072815, "learning_rate": 0.0008419877021334421, "loss": 3.7534, "step": 18615 }, { "epoch": 1.265117543144449, "grad_norm": 1.1861521005630493, "learning_rate": 0.0008419452371246093, "loss": 3.8083, "step": 18620 }, { "epoch": 1.2654572632151107, "grad_norm": 1.3417288064956665, "learning_rate": 0.0008419027721157766, "loss": 3.4241, "step": 18625 }, { "epoch": 1.2657969832857725, "grad_norm": 1.3953220844268799, "learning_rate": 0.000841860307106944, "loss": 3.3977, "step": 18630 }, { "epoch": 1.2661367033564344, "grad_norm": 1.3239978551864624, "learning_rate": 0.0008418178420981112, "loss": 3.8798, "step": 18635 }, { "epoch": 1.266476423427096, "grad_norm": 1.0379469394683838, "learning_rate": 0.0008417753770892784, "loss": 3.3244, "step": 18640 }, { "epoch": 1.2668161434977578, "grad_norm": 1.2763946056365967, "learning_rate": 0.0008417329120804458, "loss": 3.3727, "step": 18645 }, { "epoch": 1.2671558635684197, "grad_norm": 1.2024154663085938, "learning_rate": 0.000841690447071613, "loss": 3.6437, "step": 18650 }, { "epoch": 1.2674955836390813, "grad_norm": 1.0084351301193237, "learning_rate": 0.0008416479820627802, "loss": 3.8529, "step": 18655 }, { "epoch": 1.2678353037097432, "grad_norm": 1.3357852697372437, "learning_rate": 0.0008416055170539477, "loss": 3.7428, "step": 18660 }, { "epoch": 1.268175023780405, "grad_norm": 1.2628535032272339, "learning_rate": 0.0008415630520451149, "loss": 3.7008, "step": 18665 }, { "epoch": 1.2685147438510667, "grad_norm": 1.836710810661316, "learning_rate": 0.0008415205870362821, "loss": 3.5481, "step": 18670 }, { "epoch": 1.2688544639217285, "grad_norm": 1.245273232460022, "learning_rate": 0.0008414781220274494, "loss": 3.471, "step": 18675 }, { "epoch": 1.2691941839923904, "grad_norm": 1.326507806777954, "learning_rate": 0.0008414356570186167, "loss": 3.4746, "step": 18680 }, { "epoch": 1.269533904063052, "grad_norm": 1.7641167640686035, "learning_rate": 0.0008413931920097839, "loss": 3.8829, "step": 18685 }, { "epoch": 1.2698736241337139, "grad_norm": 1.3110277652740479, "learning_rate": 0.0008413507270009512, "loss": 3.8758, "step": 18690 }, { "epoch": 1.2702133442043757, "grad_norm": 1.4599426984786987, "learning_rate": 0.0008413082619921186, "loss": 3.6385, "step": 18695 }, { "epoch": 1.2705530642750373, "grad_norm": 1.3787654638290405, "learning_rate": 0.0008412657969832858, "loss": 3.8272, "step": 18700 }, { "epoch": 1.2708927843456992, "grad_norm": 0.9574824571609497, "learning_rate": 0.0008412233319744531, "loss": 3.6042, "step": 18705 }, { "epoch": 1.271232504416361, "grad_norm": 1.2862833738327026, "learning_rate": 0.0008411808669656204, "loss": 3.5925, "step": 18710 }, { "epoch": 1.2715722244870227, "grad_norm": 1.4267783164978027, "learning_rate": 0.0008411384019567876, "loss": 3.5234, "step": 18715 }, { "epoch": 1.2719119445576845, "grad_norm": 1.3637120723724365, "learning_rate": 0.0008410959369479549, "loss": 3.3995, "step": 18720 }, { "epoch": 1.2722516646283464, "grad_norm": 1.2051292657852173, "learning_rate": 0.0008410534719391221, "loss": 3.6513, "step": 18725 }, { "epoch": 1.272591384699008, "grad_norm": 1.9058992862701416, "learning_rate": 0.0008410110069302895, "loss": 3.4875, "step": 18730 }, { "epoch": 1.2729311047696699, "grad_norm": 1.0711513757705688, "learning_rate": 0.0008409685419214568, "loss": 3.5594, "step": 18735 }, { "epoch": 1.2732708248403315, "grad_norm": 1.085658073425293, "learning_rate": 0.000840926076912624, "loss": 3.4019, "step": 18740 }, { "epoch": 1.2736105449109933, "grad_norm": 1.3057383298873901, "learning_rate": 0.0008408836119037913, "loss": 3.4998, "step": 18745 }, { "epoch": 1.273950264981655, "grad_norm": 1.2308964729309082, "learning_rate": 0.0008408411468949586, "loss": 3.6894, "step": 18750 }, { "epoch": 1.2742899850523168, "grad_norm": 1.6223058700561523, "learning_rate": 0.0008407986818861258, "loss": 3.6677, "step": 18755 }, { "epoch": 1.2746297051229787, "grad_norm": 1.0945336818695068, "learning_rate": 0.000840756216877293, "loss": 3.5487, "step": 18760 }, { "epoch": 1.2749694251936403, "grad_norm": 1.314217448234558, "learning_rate": 0.0008407137518684605, "loss": 3.4235, "step": 18765 }, { "epoch": 1.2753091452643022, "grad_norm": 1.2455315589904785, "learning_rate": 0.0008406712868596277, "loss": 3.4878, "step": 18770 }, { "epoch": 1.275648865334964, "grad_norm": 1.2105947732925415, "learning_rate": 0.0008406288218507949, "loss": 3.6134, "step": 18775 }, { "epoch": 1.2759885854056257, "grad_norm": 1.163164496421814, "learning_rate": 0.0008405863568419623, "loss": 3.8731, "step": 18780 }, { "epoch": 1.2763283054762875, "grad_norm": 1.5137381553649902, "learning_rate": 0.0008405438918331295, "loss": 3.7647, "step": 18785 }, { "epoch": 1.2766680255469494, "grad_norm": 1.3631110191345215, "learning_rate": 0.0008405014268242967, "loss": 3.8584, "step": 18790 }, { "epoch": 1.277007745617611, "grad_norm": 1.0030488967895508, "learning_rate": 0.0008404589618154641, "loss": 3.6555, "step": 18795 }, { "epoch": 1.2773474656882728, "grad_norm": 1.536506175994873, "learning_rate": 0.0008404164968066314, "loss": 3.534, "step": 18800 }, { "epoch": 1.2776871857589347, "grad_norm": 1.1013647317886353, "learning_rate": 0.0008403740317977986, "loss": 3.5645, "step": 18805 }, { "epoch": 1.2780269058295963, "grad_norm": 1.6968632936477661, "learning_rate": 0.000840331566788966, "loss": 3.6754, "step": 18810 }, { "epoch": 1.2783666259002582, "grad_norm": 1.532374382019043, "learning_rate": 0.0008402891017801332, "loss": 3.5938, "step": 18815 }, { "epoch": 1.27870634597092, "grad_norm": 1.354682207107544, "learning_rate": 0.0008402466367713004, "loss": 3.7004, "step": 18820 }, { "epoch": 1.2790460660415817, "grad_norm": 1.0517202615737915, "learning_rate": 0.0008402041717624677, "loss": 3.67, "step": 18825 }, { "epoch": 1.2793857861122435, "grad_norm": 1.3198026418685913, "learning_rate": 0.000840161706753635, "loss": 3.513, "step": 18830 }, { "epoch": 1.2797255061829054, "grad_norm": 1.3555957078933716, "learning_rate": 0.0008401192417448023, "loss": 3.8477, "step": 18835 }, { "epoch": 1.280065226253567, "grad_norm": 1.3124542236328125, "learning_rate": 0.0008400767767359696, "loss": 3.7344, "step": 18840 }, { "epoch": 1.2804049463242289, "grad_norm": 1.276386022567749, "learning_rate": 0.0008400343117271369, "loss": 3.5738, "step": 18845 }, { "epoch": 1.2807446663948907, "grad_norm": 1.3496525287628174, "learning_rate": 0.0008399918467183041, "loss": 3.5419, "step": 18850 }, { "epoch": 1.2810843864655523, "grad_norm": 1.095122218132019, "learning_rate": 0.0008399493817094714, "loss": 3.6081, "step": 18855 }, { "epoch": 1.2814241065362142, "grad_norm": 1.287148118019104, "learning_rate": 0.0008399069167006386, "loss": 3.583, "step": 18860 }, { "epoch": 1.281763826606876, "grad_norm": 1.0459359884262085, "learning_rate": 0.0008398644516918059, "loss": 3.4702, "step": 18865 }, { "epoch": 1.2821035466775377, "grad_norm": 1.0876961946487427, "learning_rate": 0.0008398219866829733, "loss": 3.6989, "step": 18870 }, { "epoch": 1.2824432667481995, "grad_norm": 1.0869956016540527, "learning_rate": 0.0008397795216741405, "loss": 3.6068, "step": 18875 }, { "epoch": 1.2827829868188614, "grad_norm": 1.16436767578125, "learning_rate": 0.0008397370566653078, "loss": 3.4278, "step": 18880 }, { "epoch": 1.283122706889523, "grad_norm": 1.4257891178131104, "learning_rate": 0.0008396945916564751, "loss": 3.653, "step": 18885 }, { "epoch": 1.2834624269601849, "grad_norm": 1.227944254875183, "learning_rate": 0.0008396521266476423, "loss": 3.3044, "step": 18890 }, { "epoch": 1.2838021470308467, "grad_norm": 1.2273110151290894, "learning_rate": 0.0008396096616388096, "loss": 3.6365, "step": 18895 }, { "epoch": 1.2841418671015083, "grad_norm": 1.0747796297073364, "learning_rate": 0.0008395671966299769, "loss": 3.5689, "step": 18900 }, { "epoch": 1.2844815871721702, "grad_norm": 1.452441930770874, "learning_rate": 0.0008395247316211442, "loss": 3.6051, "step": 18905 }, { "epoch": 1.2848213072428318, "grad_norm": 1.0574228763580322, "learning_rate": 0.0008394822666123114, "loss": 3.8437, "step": 18910 }, { "epoch": 1.2851610273134937, "grad_norm": 1.2854061126708984, "learning_rate": 0.0008394398016034788, "loss": 3.1516, "step": 18915 }, { "epoch": 1.2855007473841553, "grad_norm": 1.1556800603866577, "learning_rate": 0.000839397336594646, "loss": 3.5225, "step": 18920 }, { "epoch": 1.2858404674548172, "grad_norm": 1.219071865081787, "learning_rate": 0.0008393548715858133, "loss": 3.5264, "step": 18925 }, { "epoch": 1.286180187525479, "grad_norm": 1.1415479183197021, "learning_rate": 0.0008393124065769806, "loss": 3.6123, "step": 18930 }, { "epoch": 1.2865199075961407, "grad_norm": 1.2823930978775024, "learning_rate": 0.0008392699415681478, "loss": 3.4032, "step": 18935 }, { "epoch": 1.2868596276668025, "grad_norm": 1.804850697517395, "learning_rate": 0.0008392274765593152, "loss": 3.7209, "step": 18940 }, { "epoch": 1.2871993477374644, "grad_norm": 1.3106937408447266, "learning_rate": 0.0008391850115504825, "loss": 3.6031, "step": 18945 }, { "epoch": 1.287539067808126, "grad_norm": 1.2342469692230225, "learning_rate": 0.0008391425465416497, "loss": 3.8036, "step": 18950 }, { "epoch": 1.2878787878787878, "grad_norm": 1.0163406133651733, "learning_rate": 0.000839100081532817, "loss": 3.7323, "step": 18955 }, { "epoch": 1.2882185079494497, "grad_norm": 1.456870198249817, "learning_rate": 0.0008390576165239842, "loss": 3.4199, "step": 18960 }, { "epoch": 1.2885582280201113, "grad_norm": 1.2609314918518066, "learning_rate": 0.0008390151515151515, "loss": 3.4269, "step": 18965 }, { "epoch": 1.2888979480907732, "grad_norm": 1.360349178314209, "learning_rate": 0.0008389726865063188, "loss": 3.7603, "step": 18970 }, { "epoch": 1.289237668161435, "grad_norm": 1.1221859455108643, "learning_rate": 0.0008389302214974861, "loss": 3.7033, "step": 18975 }, { "epoch": 1.2895773882320967, "grad_norm": 1.1888115406036377, "learning_rate": 0.0008388877564886534, "loss": 3.6356, "step": 18980 }, { "epoch": 1.2899171083027585, "grad_norm": 1.1065130233764648, "learning_rate": 0.0008388452914798207, "loss": 3.6514, "step": 18985 }, { "epoch": 1.2902568283734204, "grad_norm": 1.109170913696289, "learning_rate": 0.0008388028264709879, "loss": 3.5411, "step": 18990 }, { "epoch": 1.290596548444082, "grad_norm": 1.6627119779586792, "learning_rate": 0.0008387603614621552, "loss": 4.031, "step": 18995 }, { "epoch": 1.2909362685147439, "grad_norm": 1.1079338788986206, "learning_rate": 0.0008387178964533225, "loss": 3.4841, "step": 19000 }, { "epoch": 1.2912759885854057, "grad_norm": 1.52591073513031, "learning_rate": 0.0008386754314444897, "loss": 3.5383, "step": 19005 }, { "epoch": 1.2916157086560673, "grad_norm": 1.1798909902572632, "learning_rate": 0.000838632966435657, "loss": 3.6379, "step": 19010 }, { "epoch": 1.2919554287267292, "grad_norm": 1.6142429113388062, "learning_rate": 0.0008385905014268244, "loss": 3.6946, "step": 19015 }, { "epoch": 1.292295148797391, "grad_norm": 1.379896879196167, "learning_rate": 0.0008385480364179916, "loss": 3.673, "step": 19020 }, { "epoch": 1.2926348688680527, "grad_norm": 1.0776244401931763, "learning_rate": 0.0008385055714091588, "loss": 3.6752, "step": 19025 }, { "epoch": 1.2929745889387145, "grad_norm": 1.4874484539031982, "learning_rate": 0.0008384631064003262, "loss": 3.7019, "step": 19030 }, { "epoch": 1.2933143090093764, "grad_norm": 1.2301335334777832, "learning_rate": 0.0008384206413914934, "loss": 3.9022, "step": 19035 }, { "epoch": 1.293654029080038, "grad_norm": 1.233774185180664, "learning_rate": 0.0008383781763826606, "loss": 3.6201, "step": 19040 }, { "epoch": 1.2939937491506999, "grad_norm": 1.2515853643417358, "learning_rate": 0.0008383357113738281, "loss": 3.7302, "step": 19045 }, { "epoch": 1.2943334692213617, "grad_norm": 1.1632437705993652, "learning_rate": 0.0008382932463649953, "loss": 3.2711, "step": 19050 }, { "epoch": 1.2946731892920234, "grad_norm": 1.075851559638977, "learning_rate": 0.0008382507813561625, "loss": 3.6385, "step": 19055 }, { "epoch": 1.2950129093626852, "grad_norm": 1.2854485511779785, "learning_rate": 0.0008382083163473299, "loss": 3.752, "step": 19060 }, { "epoch": 1.295352629433347, "grad_norm": 1.1464661359786987, "learning_rate": 0.0008381658513384971, "loss": 3.3989, "step": 19065 }, { "epoch": 1.2956923495040087, "grad_norm": 1.2975401878356934, "learning_rate": 0.0008381233863296643, "loss": 3.4305, "step": 19070 }, { "epoch": 1.2960320695746705, "grad_norm": 1.6293593645095825, "learning_rate": 0.0008380809213208316, "loss": 3.4963, "step": 19075 }, { "epoch": 1.2963717896453322, "grad_norm": 1.204626202583313, "learning_rate": 0.000838038456311999, "loss": 3.8476, "step": 19080 }, { "epoch": 1.296711509715994, "grad_norm": 1.2476937770843506, "learning_rate": 0.0008379959913031662, "loss": 3.8258, "step": 19085 }, { "epoch": 1.2970512297866557, "grad_norm": 1.1547240018844604, "learning_rate": 0.0008379535262943335, "loss": 3.6935, "step": 19090 }, { "epoch": 1.2973909498573175, "grad_norm": 1.5296577215194702, "learning_rate": 0.0008379110612855008, "loss": 3.3926, "step": 19095 }, { "epoch": 1.2977306699279794, "grad_norm": 1.6178522109985352, "learning_rate": 0.000837868596276668, "loss": 3.5532, "step": 19100 }, { "epoch": 1.298070389998641, "grad_norm": 1.2508690357208252, "learning_rate": 0.0008378261312678353, "loss": 3.5387, "step": 19105 }, { "epoch": 1.2984101100693028, "grad_norm": 1.8305140733718872, "learning_rate": 0.0008377836662590025, "loss": 3.5699, "step": 19110 }, { "epoch": 1.2987498301399647, "grad_norm": 1.6345778703689575, "learning_rate": 0.0008377412012501699, "loss": 3.493, "step": 19115 }, { "epoch": 1.2990895502106263, "grad_norm": 1.1643576622009277, "learning_rate": 0.0008376987362413372, "loss": 3.7461, "step": 19120 }, { "epoch": 1.2994292702812882, "grad_norm": 1.3844034671783447, "learning_rate": 0.0008376562712325044, "loss": 3.5943, "step": 19125 }, { "epoch": 1.29976899035195, "grad_norm": 1.3651047945022583, "learning_rate": 0.0008376138062236717, "loss": 3.6841, "step": 19130 }, { "epoch": 1.3001087104226117, "grad_norm": 1.4430747032165527, "learning_rate": 0.000837571341214839, "loss": 3.5653, "step": 19135 }, { "epoch": 1.3004484304932735, "grad_norm": 1.3255887031555176, "learning_rate": 0.0008375288762060062, "loss": 3.4311, "step": 19140 }, { "epoch": 1.3007881505639354, "grad_norm": 1.5606446266174316, "learning_rate": 0.0008374864111971734, "loss": 3.6791, "step": 19145 }, { "epoch": 1.301127870634597, "grad_norm": 1.1517529487609863, "learning_rate": 0.0008374439461883409, "loss": 3.7448, "step": 19150 }, { "epoch": 1.3014675907052589, "grad_norm": 1.1089235544204712, "learning_rate": 0.0008374014811795081, "loss": 3.4626, "step": 19155 }, { "epoch": 1.3018073107759207, "grad_norm": 1.3832072019577026, "learning_rate": 0.0008373590161706753, "loss": 3.6475, "step": 19160 }, { "epoch": 1.3021470308465823, "grad_norm": 1.3433473110198975, "learning_rate": 0.0008373165511618427, "loss": 3.5078, "step": 19165 }, { "epoch": 1.3024867509172442, "grad_norm": 1.9998682737350464, "learning_rate": 0.0008372740861530099, "loss": 3.9605, "step": 19170 }, { "epoch": 1.302826470987906, "grad_norm": 1.2684246301651, "learning_rate": 0.0008372316211441771, "loss": 3.4787, "step": 19175 }, { "epoch": 1.3031661910585677, "grad_norm": 1.48158597946167, "learning_rate": 0.0008371891561353446, "loss": 3.5049, "step": 19180 }, { "epoch": 1.3035059111292295, "grad_norm": 1.1329306364059448, "learning_rate": 0.0008371466911265118, "loss": 3.4732, "step": 19185 }, { "epoch": 1.3038456311998914, "grad_norm": 1.0431114435195923, "learning_rate": 0.000837104226117679, "loss": 3.9093, "step": 19190 }, { "epoch": 1.304185351270553, "grad_norm": 1.120313048362732, "learning_rate": 0.0008370617611088464, "loss": 3.5674, "step": 19195 }, { "epoch": 1.3045250713412149, "grad_norm": 1.271508812904358, "learning_rate": 0.0008370192961000136, "loss": 3.8341, "step": 19200 }, { "epoch": 1.3048647914118767, "grad_norm": 2.2571301460266113, "learning_rate": 0.0008369768310911808, "loss": 3.5618, "step": 19205 }, { "epoch": 1.3052045114825384, "grad_norm": 1.033862590789795, "learning_rate": 0.0008369343660823481, "loss": 3.5915, "step": 19210 }, { "epoch": 1.3055442315532002, "grad_norm": 1.1413098573684692, "learning_rate": 0.0008368919010735155, "loss": 3.83, "step": 19215 }, { "epoch": 1.305883951623862, "grad_norm": 1.0413181781768799, "learning_rate": 0.0008368494360646827, "loss": 3.4524, "step": 19220 }, { "epoch": 1.3062236716945237, "grad_norm": 1.4037327766418457, "learning_rate": 0.00083680697105585, "loss": 3.8124, "step": 19225 }, { "epoch": 1.3065633917651855, "grad_norm": 1.4527308940887451, "learning_rate": 0.0008367645060470173, "loss": 3.6652, "step": 19230 }, { "epoch": 1.3069031118358474, "grad_norm": 1.480597734451294, "learning_rate": 0.0008367220410381845, "loss": 3.3095, "step": 19235 }, { "epoch": 1.307242831906509, "grad_norm": 1.2056241035461426, "learning_rate": 0.0008366795760293518, "loss": 3.5086, "step": 19240 }, { "epoch": 1.3075825519771709, "grad_norm": 1.284739375114441, "learning_rate": 0.000836637111020519, "loss": 3.7344, "step": 19245 }, { "epoch": 1.3079222720478325, "grad_norm": 1.4222612380981445, "learning_rate": 0.0008365946460116864, "loss": 3.8065, "step": 19250 }, { "epoch": 1.3082619921184944, "grad_norm": 1.1798394918441772, "learning_rate": 0.0008365521810028537, "loss": 3.8723, "step": 19255 }, { "epoch": 1.308601712189156, "grad_norm": 1.460120677947998, "learning_rate": 0.0008365097159940209, "loss": 3.7783, "step": 19260 }, { "epoch": 1.3089414322598178, "grad_norm": 1.1275213956832886, "learning_rate": 0.0008364672509851883, "loss": 3.1473, "step": 19265 }, { "epoch": 1.3092811523304797, "grad_norm": 3.0079281330108643, "learning_rate": 0.0008364247859763555, "loss": 3.5654, "step": 19270 }, { "epoch": 1.3096208724011413, "grad_norm": 0.9786596894264221, "learning_rate": 0.0008363823209675227, "loss": 3.4582, "step": 19275 }, { "epoch": 1.3099605924718032, "grad_norm": 1.1927236318588257, "learning_rate": 0.0008363398559586901, "loss": 3.1289, "step": 19280 }, { "epoch": 1.310300312542465, "grad_norm": 1.1546748876571655, "learning_rate": 0.0008362973909498574, "loss": 3.6123, "step": 19285 }, { "epoch": 1.3106400326131267, "grad_norm": 1.0719890594482422, "learning_rate": 0.0008362549259410246, "loss": 3.5972, "step": 19290 }, { "epoch": 1.3109797526837885, "grad_norm": 1.1660499572753906, "learning_rate": 0.000836212460932192, "loss": 3.5952, "step": 19295 }, { "epoch": 1.3113194727544504, "grad_norm": 1.157926321029663, "learning_rate": 0.0008361699959233592, "loss": 3.6645, "step": 19300 }, { "epoch": 1.311659192825112, "grad_norm": 1.0383806228637695, "learning_rate": 0.0008361275309145264, "loss": 3.7802, "step": 19305 }, { "epoch": 1.3119989128957739, "grad_norm": 1.0059242248535156, "learning_rate": 0.0008360850659056937, "loss": 3.3753, "step": 19310 }, { "epoch": 1.3123386329664357, "grad_norm": 1.3540213108062744, "learning_rate": 0.000836042600896861, "loss": 3.4231, "step": 19315 }, { "epoch": 1.3126783530370973, "grad_norm": 1.019986629486084, "learning_rate": 0.0008360001358880283, "loss": 3.5575, "step": 19320 }, { "epoch": 1.3130180731077592, "grad_norm": 1.2967978715896606, "learning_rate": 0.0008359576708791956, "loss": 3.6137, "step": 19325 }, { "epoch": 1.313357793178421, "grad_norm": 1.340789794921875, "learning_rate": 0.0008359152058703629, "loss": 3.6156, "step": 19330 }, { "epoch": 1.3136975132490827, "grad_norm": 1.4813899993896484, "learning_rate": 0.0008358727408615301, "loss": 3.4669, "step": 19335 }, { "epoch": 1.3140372333197445, "grad_norm": 1.3151750564575195, "learning_rate": 0.0008358302758526974, "loss": 3.623, "step": 19340 }, { "epoch": 1.3143769533904064, "grad_norm": 1.4991168975830078, "learning_rate": 0.0008357878108438647, "loss": 3.5961, "step": 19345 }, { "epoch": 1.314716673461068, "grad_norm": 1.5249629020690918, "learning_rate": 0.0008357453458350319, "loss": 3.5301, "step": 19350 }, { "epoch": 1.3150563935317299, "grad_norm": 1.914986252784729, "learning_rate": 0.0008357028808261993, "loss": 3.5813, "step": 19355 }, { "epoch": 1.3153961136023917, "grad_norm": 1.4678828716278076, "learning_rate": 0.0008356604158173665, "loss": 3.4476, "step": 19360 }, { "epoch": 1.3157358336730534, "grad_norm": 1.2387629747390747, "learning_rate": 0.0008356179508085338, "loss": 3.6963, "step": 19365 }, { "epoch": 1.3160755537437152, "grad_norm": 1.096012830734253, "learning_rate": 0.0008355754857997011, "loss": 3.7232, "step": 19370 }, { "epoch": 1.316415273814377, "grad_norm": 1.0514659881591797, "learning_rate": 0.0008355330207908683, "loss": 3.914, "step": 19375 }, { "epoch": 1.3167549938850387, "grad_norm": 1.3310080766677856, "learning_rate": 0.0008354905557820356, "loss": 3.6252, "step": 19380 }, { "epoch": 1.3170947139557005, "grad_norm": 1.3889378309249878, "learning_rate": 0.0008354480907732029, "loss": 3.7562, "step": 19385 }, { "epoch": 1.3174344340263624, "grad_norm": 1.5321400165557861, "learning_rate": 0.0008354056257643702, "loss": 3.6877, "step": 19390 }, { "epoch": 1.317774154097024, "grad_norm": 1.724936604499817, "learning_rate": 0.0008353631607555375, "loss": 3.1823, "step": 19395 }, { "epoch": 1.3181138741676859, "grad_norm": 1.0940498113632202, "learning_rate": 0.0008353206957467048, "loss": 3.788, "step": 19400 }, { "epoch": 1.3184535942383477, "grad_norm": 1.4340370893478394, "learning_rate": 0.000835278230737872, "loss": 3.6394, "step": 19405 }, { "epoch": 1.3187933143090094, "grad_norm": 1.3915621042251587, "learning_rate": 0.0008352357657290392, "loss": 3.6136, "step": 19410 }, { "epoch": 1.3191330343796712, "grad_norm": 1.6585607528686523, "learning_rate": 0.0008351933007202066, "loss": 3.5714, "step": 19415 }, { "epoch": 1.3194727544503329, "grad_norm": 1.1926809549331665, "learning_rate": 0.0008351508357113738, "loss": 3.8091, "step": 19420 }, { "epoch": 1.3198124745209947, "grad_norm": 1.153201699256897, "learning_rate": 0.0008351083707025411, "loss": 3.5891, "step": 19425 }, { "epoch": 1.3201521945916566, "grad_norm": 1.1411640644073486, "learning_rate": 0.0008350659056937085, "loss": 3.7356, "step": 19430 }, { "epoch": 1.3204919146623182, "grad_norm": 1.1488821506500244, "learning_rate": 0.0008350234406848757, "loss": 3.6055, "step": 19435 }, { "epoch": 1.32083163473298, "grad_norm": 1.3579708337783813, "learning_rate": 0.0008349809756760429, "loss": 3.5884, "step": 19440 }, { "epoch": 1.3211713548036417, "grad_norm": 1.3922916650772095, "learning_rate": 0.0008349385106672103, "loss": 3.5771, "step": 19445 }, { "epoch": 1.3215110748743035, "grad_norm": 1.3807893991470337, "learning_rate": 0.0008348960456583775, "loss": 3.6339, "step": 19450 }, { "epoch": 1.3218507949449654, "grad_norm": 1.3913803100585938, "learning_rate": 0.0008348535806495447, "loss": 3.8891, "step": 19455 }, { "epoch": 1.322190515015627, "grad_norm": 1.25673246383667, "learning_rate": 0.0008348111156407121, "loss": 3.8478, "step": 19460 }, { "epoch": 1.3225302350862889, "grad_norm": 1.1371705532073975, "learning_rate": 0.0008347686506318794, "loss": 3.5291, "step": 19465 }, { "epoch": 1.3228699551569507, "grad_norm": 1.524253010749817, "learning_rate": 0.0008347261856230466, "loss": 3.7591, "step": 19470 }, { "epoch": 1.3232096752276123, "grad_norm": 1.5291062593460083, "learning_rate": 0.0008346837206142139, "loss": 3.5479, "step": 19475 }, { "epoch": 1.3235493952982742, "grad_norm": 1.4679667949676514, "learning_rate": 0.0008346412556053812, "loss": 3.5515, "step": 19480 }, { "epoch": 1.323889115368936, "grad_norm": 1.0768256187438965, "learning_rate": 0.0008345987905965484, "loss": 3.6326, "step": 19485 }, { "epoch": 1.3242288354395977, "grad_norm": 1.1202356815338135, "learning_rate": 0.0008345563255877157, "loss": 3.4395, "step": 19490 }, { "epoch": 1.3245685555102595, "grad_norm": 1.4945756196975708, "learning_rate": 0.0008345138605788831, "loss": 3.7086, "step": 19495 }, { "epoch": 1.3249082755809214, "grad_norm": 1.2548283338546753, "learning_rate": 0.0008344713955700503, "loss": 3.6679, "step": 19500 }, { "epoch": 1.325247995651583, "grad_norm": 1.3700096607208252, "learning_rate": 0.0008344289305612176, "loss": 3.3974, "step": 19505 }, { "epoch": 1.3255877157222449, "grad_norm": 1.895311951637268, "learning_rate": 0.0008343864655523848, "loss": 3.4295, "step": 19510 }, { "epoch": 1.3259274357929067, "grad_norm": 1.1170698404312134, "learning_rate": 0.0008343440005435521, "loss": 3.6756, "step": 19515 }, { "epoch": 1.3262671558635684, "grad_norm": 1.1307249069213867, "learning_rate": 0.0008343015355347194, "loss": 3.53, "step": 19520 }, { "epoch": 1.3266068759342302, "grad_norm": 1.1627197265625, "learning_rate": 0.0008342590705258866, "loss": 3.5334, "step": 19525 }, { "epoch": 1.326946596004892, "grad_norm": 1.3358103036880493, "learning_rate": 0.000834216605517054, "loss": 3.384, "step": 19530 }, { "epoch": 1.3272863160755537, "grad_norm": 1.2170913219451904, "learning_rate": 0.0008341741405082213, "loss": 3.6536, "step": 19535 }, { "epoch": 1.3276260361462155, "grad_norm": 1.381888508796692, "learning_rate": 0.0008341316754993885, "loss": 3.6807, "step": 19540 }, { "epoch": 1.3279657562168774, "grad_norm": 0.9544209837913513, "learning_rate": 0.0008340892104905557, "loss": 3.5122, "step": 19545 }, { "epoch": 1.328305476287539, "grad_norm": 1.2536125183105469, "learning_rate": 0.0008340467454817231, "loss": 3.5111, "step": 19550 }, { "epoch": 1.3286451963582009, "grad_norm": 1.5644181966781616, "learning_rate": 0.0008340042804728903, "loss": 3.74, "step": 19555 }, { "epoch": 1.3289849164288627, "grad_norm": 1.0957947969436646, "learning_rate": 0.0008339618154640575, "loss": 3.687, "step": 19560 }, { "epoch": 1.3293246364995244, "grad_norm": 1.7576035261154175, "learning_rate": 0.000833919350455225, "loss": 3.4781, "step": 19565 }, { "epoch": 1.3296643565701862, "grad_norm": 1.5557969808578491, "learning_rate": 0.0008338768854463922, "loss": 3.6072, "step": 19570 }, { "epoch": 1.330004076640848, "grad_norm": 1.3404215574264526, "learning_rate": 0.0008338344204375594, "loss": 3.7276, "step": 19575 }, { "epoch": 1.3303437967115097, "grad_norm": 1.1942979097366333, "learning_rate": 0.0008337919554287268, "loss": 3.5272, "step": 19580 }, { "epoch": 1.3306835167821716, "grad_norm": 1.2178856134414673, "learning_rate": 0.000833749490419894, "loss": 3.586, "step": 19585 }, { "epoch": 1.3310232368528332, "grad_norm": 1.4329646825790405, "learning_rate": 0.0008337070254110612, "loss": 3.5886, "step": 19590 }, { "epoch": 1.331362956923495, "grad_norm": 1.1363599300384521, "learning_rate": 0.0008336645604022285, "loss": 3.6648, "step": 19595 }, { "epoch": 1.331702676994157, "grad_norm": 1.3025238513946533, "learning_rate": 0.0008336220953933959, "loss": 3.6513, "step": 19600 }, { "epoch": 1.3320423970648185, "grad_norm": 1.5041289329528809, "learning_rate": 0.0008335796303845632, "loss": 3.5802, "step": 19605 }, { "epoch": 1.3323821171354804, "grad_norm": 1.3878027200698853, "learning_rate": 0.0008335371653757304, "loss": 3.5223, "step": 19610 }, { "epoch": 1.332721837206142, "grad_norm": 1.4025388956069946, "learning_rate": 0.0008334947003668977, "loss": 3.3211, "step": 19615 }, { "epoch": 1.3330615572768039, "grad_norm": 1.3628755807876587, "learning_rate": 0.000833452235358065, "loss": 3.8752, "step": 19620 }, { "epoch": 1.3334012773474657, "grad_norm": 1.3571377992630005, "learning_rate": 0.0008334097703492322, "loss": 3.7864, "step": 19625 }, { "epoch": 1.3337409974181273, "grad_norm": 1.5675463676452637, "learning_rate": 0.0008333673053403995, "loss": 4.0393, "step": 19630 }, { "epoch": 1.3340807174887892, "grad_norm": 1.3244630098342896, "learning_rate": 0.0008333248403315669, "loss": 3.6362, "step": 19635 }, { "epoch": 1.334420437559451, "grad_norm": 1.3567599058151245, "learning_rate": 0.0008332823753227341, "loss": 3.5111, "step": 19640 }, { "epoch": 1.3347601576301127, "grad_norm": 1.1365182399749756, "learning_rate": 0.0008332399103139013, "loss": 3.5697, "step": 19645 }, { "epoch": 1.3350998777007745, "grad_norm": 1.5267130136489868, "learning_rate": 0.0008331974453050687, "loss": 3.4052, "step": 19650 }, { "epoch": 1.3354395977714364, "grad_norm": 1.207731008529663, "learning_rate": 0.0008331549802962359, "loss": 3.4566, "step": 19655 }, { "epoch": 1.335779317842098, "grad_norm": 1.3908218145370483, "learning_rate": 0.0008331125152874031, "loss": 3.7589, "step": 19660 }, { "epoch": 1.3361190379127599, "grad_norm": 1.3004480600357056, "learning_rate": 0.0008330700502785706, "loss": 3.6976, "step": 19665 }, { "epoch": 1.3364587579834217, "grad_norm": 1.3844630718231201, "learning_rate": 0.0008330275852697378, "loss": 3.7139, "step": 19670 }, { "epoch": 1.3367984780540834, "grad_norm": 1.311618447303772, "learning_rate": 0.000832985120260905, "loss": 3.7734, "step": 19675 }, { "epoch": 1.3371381981247452, "grad_norm": 1.5289156436920166, "learning_rate": 0.0008329426552520724, "loss": 3.6829, "step": 19680 }, { "epoch": 1.337477918195407, "grad_norm": 1.2494189739227295, "learning_rate": 0.0008329001902432396, "loss": 3.7826, "step": 19685 }, { "epoch": 1.3378176382660687, "grad_norm": 1.372774600982666, "learning_rate": 0.0008328577252344068, "loss": 3.6249, "step": 19690 }, { "epoch": 1.3381573583367306, "grad_norm": 1.1631866693496704, "learning_rate": 0.0008328152602255741, "loss": 3.7312, "step": 19695 }, { "epoch": 1.3384970784073924, "grad_norm": 1.4039921760559082, "learning_rate": 0.0008327727952167415, "loss": 3.3973, "step": 19700 }, { "epoch": 1.338836798478054, "grad_norm": 1.2616894245147705, "learning_rate": 0.0008327303302079087, "loss": 3.6649, "step": 19705 }, { "epoch": 1.3391765185487159, "grad_norm": 1.6930716037750244, "learning_rate": 0.000832687865199076, "loss": 3.4452, "step": 19710 }, { "epoch": 1.3395162386193777, "grad_norm": 1.2062218189239502, "learning_rate": 0.0008326454001902433, "loss": 3.4058, "step": 19715 }, { "epoch": 1.3398559586900394, "grad_norm": 1.798264980316162, "learning_rate": 0.0008326029351814105, "loss": 3.7102, "step": 19720 }, { "epoch": 1.3401956787607012, "grad_norm": 1.1019508838653564, "learning_rate": 0.0008325604701725778, "loss": 3.6811, "step": 19725 }, { "epoch": 1.340535398831363, "grad_norm": 1.088791847229004, "learning_rate": 0.0008325180051637451, "loss": 3.5835, "step": 19730 }, { "epoch": 1.3408751189020247, "grad_norm": 1.04375422000885, "learning_rate": 0.0008324755401549124, "loss": 3.638, "step": 19735 }, { "epoch": 1.3412148389726866, "grad_norm": 1.2372150421142578, "learning_rate": 0.0008324330751460797, "loss": 3.3241, "step": 19740 }, { "epoch": 1.3415545590433484, "grad_norm": 1.0214860439300537, "learning_rate": 0.000832390610137247, "loss": 3.6548, "step": 19745 }, { "epoch": 1.34189427911401, "grad_norm": 1.40850830078125, "learning_rate": 0.0008323481451284142, "loss": 3.4271, "step": 19750 }, { "epoch": 1.342233999184672, "grad_norm": 1.2460020780563354, "learning_rate": 0.0008323056801195815, "loss": 3.7418, "step": 19755 }, { "epoch": 1.3425737192553335, "grad_norm": 1.2140538692474365, "learning_rate": 0.0008322632151107487, "loss": 3.5422, "step": 19760 }, { "epoch": 1.3429134393259954, "grad_norm": 1.439367413520813, "learning_rate": 0.000832220750101916, "loss": 3.3451, "step": 19765 }, { "epoch": 1.3432531593966572, "grad_norm": 2.112474203109741, "learning_rate": 0.0008321782850930834, "loss": 3.5302, "step": 19770 }, { "epoch": 1.3435928794673189, "grad_norm": 1.7510645389556885, "learning_rate": 0.0008321358200842506, "loss": 3.4918, "step": 19775 }, { "epoch": 1.3439325995379807, "grad_norm": 1.1674621105194092, "learning_rate": 0.0008320933550754179, "loss": 3.6875, "step": 19780 }, { "epoch": 1.3442723196086424, "grad_norm": 1.3855783939361572, "learning_rate": 0.0008320508900665852, "loss": 3.6757, "step": 19785 }, { "epoch": 1.3446120396793042, "grad_norm": 1.0090018510818481, "learning_rate": 0.0008320084250577524, "loss": 3.7754, "step": 19790 }, { "epoch": 1.344951759749966, "grad_norm": 1.1140230894088745, "learning_rate": 0.0008319659600489196, "loss": 3.6738, "step": 19795 }, { "epoch": 1.3452914798206277, "grad_norm": 1.4363126754760742, "learning_rate": 0.000831923495040087, "loss": 3.5109, "step": 19800 }, { "epoch": 1.3456311998912895, "grad_norm": 1.1125526428222656, "learning_rate": 0.0008318810300312543, "loss": 3.6107, "step": 19805 }, { "epoch": 1.3459709199619514, "grad_norm": 1.0858979225158691, "learning_rate": 0.0008318385650224215, "loss": 3.4713, "step": 19810 }, { "epoch": 1.346310640032613, "grad_norm": 1.3593010902404785, "learning_rate": 0.0008317961000135889, "loss": 3.616, "step": 19815 }, { "epoch": 1.3466503601032749, "grad_norm": 1.1203410625457764, "learning_rate": 0.0008317536350047561, "loss": 3.5292, "step": 19820 }, { "epoch": 1.3469900801739367, "grad_norm": 1.1484616994857788, "learning_rate": 0.0008317111699959233, "loss": 3.5824, "step": 19825 }, { "epoch": 1.3473298002445984, "grad_norm": 1.140087366104126, "learning_rate": 0.0008316687049870907, "loss": 3.5447, "step": 19830 }, { "epoch": 1.3476695203152602, "grad_norm": 0.9330794215202332, "learning_rate": 0.0008316262399782579, "loss": 3.7606, "step": 19835 }, { "epoch": 1.348009240385922, "grad_norm": 1.2744101285934448, "learning_rate": 0.0008315837749694252, "loss": 3.5182, "step": 19840 }, { "epoch": 1.3483489604565837, "grad_norm": 1.4347549676895142, "learning_rate": 0.0008315413099605926, "loss": 3.566, "step": 19845 }, { "epoch": 1.3486886805272456, "grad_norm": 1.3024170398712158, "learning_rate": 0.0008314988449517598, "loss": 3.7913, "step": 19850 }, { "epoch": 1.3490284005979074, "grad_norm": 1.0193896293640137, "learning_rate": 0.000831456379942927, "loss": 3.6918, "step": 19855 }, { "epoch": 1.349368120668569, "grad_norm": 1.2969263792037964, "learning_rate": 0.0008314139149340943, "loss": 3.5937, "step": 19860 }, { "epoch": 1.349707840739231, "grad_norm": 1.2451785802841187, "learning_rate": 0.0008313714499252616, "loss": 3.7187, "step": 19865 }, { "epoch": 1.3500475608098927, "grad_norm": 1.3162521123886108, "learning_rate": 0.0008313289849164288, "loss": 3.6627, "step": 19870 }, { "epoch": 1.3503872808805544, "grad_norm": 1.6527283191680908, "learning_rate": 0.0008312865199075962, "loss": 3.8675, "step": 19875 }, { "epoch": 1.3507270009512162, "grad_norm": 1.242690086364746, "learning_rate": 0.0008312440548987635, "loss": 3.8158, "step": 19880 }, { "epoch": 1.351066721021878, "grad_norm": 1.1067355871200562, "learning_rate": 0.0008312015898899307, "loss": 3.786, "step": 19885 }, { "epoch": 1.3514064410925397, "grad_norm": 1.262027382850647, "learning_rate": 0.000831159124881098, "loss": 3.4668, "step": 19890 }, { "epoch": 1.3517461611632016, "grad_norm": 1.1641391515731812, "learning_rate": 0.0008311166598722652, "loss": 3.703, "step": 19895 }, { "epoch": 1.3520858812338634, "grad_norm": 1.4828197956085205, "learning_rate": 0.0008310741948634325, "loss": 3.5404, "step": 19900 }, { "epoch": 1.352425601304525, "grad_norm": 1.1422184705734253, "learning_rate": 0.0008310317298545998, "loss": 3.5519, "step": 19905 }, { "epoch": 1.352765321375187, "grad_norm": 1.5843803882598877, "learning_rate": 0.0008309892648457671, "loss": 3.6427, "step": 19910 }, { "epoch": 1.3531050414458488, "grad_norm": 1.1686856746673584, "learning_rate": 0.0008309467998369344, "loss": 3.6795, "step": 19915 }, { "epoch": 1.3534447615165104, "grad_norm": 1.344772458076477, "learning_rate": 0.0008309043348281017, "loss": 3.7284, "step": 19920 }, { "epoch": 1.3537844815871722, "grad_norm": 1.5158296823501587, "learning_rate": 0.0008308618698192689, "loss": 3.6348, "step": 19925 }, { "epoch": 1.3541242016578339, "grad_norm": 1.1396626234054565, "learning_rate": 0.0008308194048104362, "loss": 3.7886, "step": 19930 }, { "epoch": 1.3544639217284957, "grad_norm": 1.1487066745758057, "learning_rate": 0.0008307769398016035, "loss": 3.7526, "step": 19935 }, { "epoch": 1.3548036417991576, "grad_norm": 1.3562406301498413, "learning_rate": 0.0008307344747927707, "loss": 3.6286, "step": 19940 }, { "epoch": 1.3551433618698192, "grad_norm": 1.1540405750274658, "learning_rate": 0.0008306920097839382, "loss": 3.7426, "step": 19945 }, { "epoch": 1.355483081940481, "grad_norm": 1.269973874092102, "learning_rate": 0.0008306495447751054, "loss": 3.7182, "step": 19950 }, { "epoch": 1.3558228020111427, "grad_norm": 1.0712872743606567, "learning_rate": 0.0008306070797662726, "loss": 3.6057, "step": 19955 }, { "epoch": 1.3561625220818045, "grad_norm": 1.308842420578003, "learning_rate": 0.0008305646147574399, "loss": 3.7965, "step": 19960 }, { "epoch": 1.3565022421524664, "grad_norm": 1.3989262580871582, "learning_rate": 0.0008305221497486072, "loss": 3.8364, "step": 19965 }, { "epoch": 1.356841962223128, "grad_norm": 1.354004144668579, "learning_rate": 0.0008304796847397744, "loss": 3.5815, "step": 19970 }, { "epoch": 1.3571816822937899, "grad_norm": 1.1781164407730103, "learning_rate": 0.0008304372197309417, "loss": 3.5534, "step": 19975 }, { "epoch": 1.3575214023644517, "grad_norm": 1.0786833763122559, "learning_rate": 0.0008303947547221091, "loss": 3.6514, "step": 19980 }, { "epoch": 1.3578611224351134, "grad_norm": 1.0231415033340454, "learning_rate": 0.0008303522897132763, "loss": 3.4995, "step": 19985 }, { "epoch": 1.3582008425057752, "grad_norm": 0.9725762009620667, "learning_rate": 0.0008303098247044436, "loss": 3.6117, "step": 19990 }, { "epoch": 1.358540562576437, "grad_norm": 1.1877495050430298, "learning_rate": 0.0008302673596956108, "loss": 3.4599, "step": 19995 }, { "epoch": 1.3588802826470987, "grad_norm": 1.5476804971694946, "learning_rate": 0.0008302248946867781, "loss": 3.6985, "step": 20000 }, { "epoch": 1.3592200027177606, "grad_norm": 1.0994771718978882, "learning_rate": 0.0008301824296779454, "loss": 3.8349, "step": 20005 }, { "epoch": 1.3595597227884224, "grad_norm": 1.3579213619232178, "learning_rate": 0.0008301399646691126, "loss": 3.7652, "step": 20010 }, { "epoch": 1.359899442859084, "grad_norm": 1.1423349380493164, "learning_rate": 0.00083009749966028, "loss": 3.8002, "step": 20015 }, { "epoch": 1.360239162929746, "grad_norm": 1.0594687461853027, "learning_rate": 0.0008300550346514473, "loss": 3.7583, "step": 20020 }, { "epoch": 1.3605788830004077, "grad_norm": 1.5394322872161865, "learning_rate": 0.0008300125696426145, "loss": 3.5322, "step": 20025 }, { "epoch": 1.3609186030710694, "grad_norm": 1.0721694231033325, "learning_rate": 0.0008299701046337818, "loss": 3.5784, "step": 20030 }, { "epoch": 1.3612583231417312, "grad_norm": 2.0481741428375244, "learning_rate": 0.0008299276396249491, "loss": 3.7502, "step": 20035 }, { "epoch": 1.361598043212393, "grad_norm": 1.3270267248153687, "learning_rate": 0.0008298851746161163, "loss": 3.783, "step": 20040 }, { "epoch": 1.3619377632830547, "grad_norm": 1.2597280740737915, "learning_rate": 0.0008298427096072835, "loss": 3.585, "step": 20045 }, { "epoch": 1.3622774833537166, "grad_norm": 1.4406622648239136, "learning_rate": 0.000829800244598451, "loss": 3.7005, "step": 20050 }, { "epoch": 1.3626172034243784, "grad_norm": 1.3520548343658447, "learning_rate": 0.0008297577795896182, "loss": 3.6518, "step": 20055 }, { "epoch": 1.36295692349504, "grad_norm": 0.9149071574211121, "learning_rate": 0.0008297153145807854, "loss": 3.6829, "step": 20060 }, { "epoch": 1.363296643565702, "grad_norm": 1.233005166053772, "learning_rate": 0.0008296728495719528, "loss": 3.7263, "step": 20065 }, { "epoch": 1.3636363636363638, "grad_norm": 1.448765754699707, "learning_rate": 0.00082963038456312, "loss": 3.6125, "step": 20070 }, { "epoch": 1.3639760837070254, "grad_norm": 1.4032716751098633, "learning_rate": 0.0008295879195542872, "loss": 3.6617, "step": 20075 }, { "epoch": 1.3643158037776872, "grad_norm": 1.1730637550354004, "learning_rate": 0.0008295454545454546, "loss": 3.5324, "step": 20080 }, { "epoch": 1.364655523848349, "grad_norm": 1.3378331661224365, "learning_rate": 0.0008295029895366219, "loss": 3.4093, "step": 20085 }, { "epoch": 1.3649952439190107, "grad_norm": 1.2820035219192505, "learning_rate": 0.0008294605245277891, "loss": 3.5174, "step": 20090 }, { "epoch": 1.3653349639896726, "grad_norm": 1.0620921850204468, "learning_rate": 0.0008294180595189564, "loss": 3.5486, "step": 20095 }, { "epoch": 1.3656746840603342, "grad_norm": 1.1408665180206299, "learning_rate": 0.0008293755945101237, "loss": 3.6393, "step": 20100 }, { "epoch": 1.366014404130996, "grad_norm": 1.2826333045959473, "learning_rate": 0.0008293331295012909, "loss": 3.6945, "step": 20105 }, { "epoch": 1.366354124201658, "grad_norm": 1.8533298969268799, "learning_rate": 0.0008292906644924582, "loss": 3.4359, "step": 20110 }, { "epoch": 1.3666938442723195, "grad_norm": 1.3457766771316528, "learning_rate": 0.0008292481994836255, "loss": 3.6227, "step": 20115 }, { "epoch": 1.3670335643429814, "grad_norm": 1.9527863264083862, "learning_rate": 0.0008292057344747928, "loss": 3.0748, "step": 20120 }, { "epoch": 1.367373284413643, "grad_norm": 1.2981619834899902, "learning_rate": 0.0008291632694659601, "loss": 3.4674, "step": 20125 }, { "epoch": 1.3677130044843049, "grad_norm": 1.4666141271591187, "learning_rate": 0.0008291208044571274, "loss": 3.5832, "step": 20130 }, { "epoch": 1.3680527245549667, "grad_norm": 1.3782360553741455, "learning_rate": 0.0008290783394482946, "loss": 3.6993, "step": 20135 }, { "epoch": 1.3683924446256284, "grad_norm": 1.1072766780853271, "learning_rate": 0.0008290358744394619, "loss": 3.3544, "step": 20140 }, { "epoch": 1.3687321646962902, "grad_norm": 1.6724461317062378, "learning_rate": 0.0008289934094306291, "loss": 3.4427, "step": 20145 }, { "epoch": 1.369071884766952, "grad_norm": 1.2604455947875977, "learning_rate": 0.0008289509444217964, "loss": 3.3409, "step": 20150 }, { "epoch": 1.3694116048376137, "grad_norm": 1.5532326698303223, "learning_rate": 0.0008289084794129638, "loss": 3.755, "step": 20155 }, { "epoch": 1.3697513249082756, "grad_norm": 1.3850769996643066, "learning_rate": 0.000828866014404131, "loss": 3.5344, "step": 20160 }, { "epoch": 1.3700910449789374, "grad_norm": 1.322301983833313, "learning_rate": 0.0008288235493952983, "loss": 3.6661, "step": 20165 }, { "epoch": 1.370430765049599, "grad_norm": 1.2676843404769897, "learning_rate": 0.0008287810843864656, "loss": 3.4181, "step": 20170 }, { "epoch": 1.370770485120261, "grad_norm": 0.9812669157981873, "learning_rate": 0.0008287386193776328, "loss": 3.4686, "step": 20175 }, { "epoch": 1.3711102051909227, "grad_norm": 1.0300160646438599, "learning_rate": 0.0008286961543688, "loss": 3.6057, "step": 20180 }, { "epoch": 1.3714499252615844, "grad_norm": 1.1691724061965942, "learning_rate": 0.0008286536893599674, "loss": 3.2563, "step": 20185 }, { "epoch": 1.3717896453322462, "grad_norm": 1.430284023284912, "learning_rate": 0.0008286112243511347, "loss": 3.7786, "step": 20190 }, { "epoch": 1.372129365402908, "grad_norm": 1.7093772888183594, "learning_rate": 0.0008285687593423019, "loss": 3.3957, "step": 20195 }, { "epoch": 1.3724690854735697, "grad_norm": 1.4139708280563354, "learning_rate": 0.0008285262943334693, "loss": 3.397, "step": 20200 }, { "epoch": 1.3728088055442316, "grad_norm": 0.9313664436340332, "learning_rate": 0.0008284838293246365, "loss": 3.4922, "step": 20205 }, { "epoch": 1.3731485256148934, "grad_norm": 1.0343406200408936, "learning_rate": 0.0008284413643158037, "loss": 3.4921, "step": 20210 }, { "epoch": 1.373488245685555, "grad_norm": 1.1708855628967285, "learning_rate": 0.0008283988993069711, "loss": 3.5783, "step": 20215 }, { "epoch": 1.373827965756217, "grad_norm": 1.2546533346176147, "learning_rate": 0.0008283564342981383, "loss": 3.3261, "step": 20220 }, { "epoch": 1.3741676858268788, "grad_norm": 1.253355860710144, "learning_rate": 0.0008283139692893056, "loss": 3.6498, "step": 20225 }, { "epoch": 1.3745074058975404, "grad_norm": 0.9427648186683655, "learning_rate": 0.000828271504280473, "loss": 3.3406, "step": 20230 }, { "epoch": 1.3748471259682022, "grad_norm": 1.678530216217041, "learning_rate": 0.0008282290392716402, "loss": 3.5653, "step": 20235 }, { "epoch": 1.375186846038864, "grad_norm": 1.3473641872406006, "learning_rate": 0.0008281865742628074, "loss": 3.4596, "step": 20240 }, { "epoch": 1.3755265661095257, "grad_norm": 1.219395637512207, "learning_rate": 0.0008281441092539747, "loss": 3.571, "step": 20245 }, { "epoch": 1.3758662861801876, "grad_norm": 1.5658810138702393, "learning_rate": 0.000828101644245142, "loss": 3.4886, "step": 20250 }, { "epoch": 1.3762060062508494, "grad_norm": 1.5892081260681152, "learning_rate": 0.0008280591792363092, "loss": 3.4373, "step": 20255 }, { "epoch": 1.376545726321511, "grad_norm": 1.3443942070007324, "learning_rate": 0.0008280167142274766, "loss": 3.4153, "step": 20260 }, { "epoch": 1.376885446392173, "grad_norm": 1.8366317749023438, "learning_rate": 0.0008279742492186439, "loss": 3.6788, "step": 20265 }, { "epoch": 1.3772251664628345, "grad_norm": 1.3599299192428589, "learning_rate": 0.0008279317842098111, "loss": 3.871, "step": 20270 }, { "epoch": 1.3775648865334964, "grad_norm": 1.469582200050354, "learning_rate": 0.0008278893192009784, "loss": 3.715, "step": 20275 }, { "epoch": 1.3779046066041583, "grad_norm": 1.1960361003875732, "learning_rate": 0.0008278468541921456, "loss": 3.72, "step": 20280 }, { "epoch": 1.3782443266748199, "grad_norm": 7.764328956604004, "learning_rate": 0.000827804389183313, "loss": 3.479, "step": 20285 }, { "epoch": 1.3785840467454817, "grad_norm": 1.1626869440078735, "learning_rate": 0.0008277619241744803, "loss": 3.7995, "step": 20290 }, { "epoch": 1.3789237668161434, "grad_norm": 1.224255084991455, "learning_rate": 0.0008277194591656475, "loss": 3.7722, "step": 20295 }, { "epoch": 1.3792634868868052, "grad_norm": 1.1162796020507812, "learning_rate": 0.0008276769941568149, "loss": 3.7746, "step": 20300 }, { "epoch": 1.379603206957467, "grad_norm": 1.2418798208236694, "learning_rate": 0.0008276345291479821, "loss": 3.3927, "step": 20305 }, { "epoch": 1.3799429270281287, "grad_norm": 1.2280012369155884, "learning_rate": 0.0008275920641391493, "loss": 3.6338, "step": 20310 }, { "epoch": 1.3802826470987906, "grad_norm": 1.0730549097061157, "learning_rate": 0.0008275495991303167, "loss": 3.7152, "step": 20315 }, { "epoch": 1.3806223671694524, "grad_norm": 1.3352140188217163, "learning_rate": 0.0008275071341214839, "loss": 3.6321, "step": 20320 }, { "epoch": 1.380962087240114, "grad_norm": 1.7601209878921509, "learning_rate": 0.0008274646691126512, "loss": 3.5543, "step": 20325 }, { "epoch": 1.381301807310776, "grad_norm": 1.2091562747955322, "learning_rate": 0.0008274222041038186, "loss": 3.5823, "step": 20330 }, { "epoch": 1.3816415273814378, "grad_norm": 1.2790182828903198, "learning_rate": 0.0008273797390949858, "loss": 3.4795, "step": 20335 }, { "epoch": 1.3819812474520994, "grad_norm": 1.333156704902649, "learning_rate": 0.000827337274086153, "loss": 3.7049, "step": 20340 }, { "epoch": 1.3823209675227612, "grad_norm": 1.2725884914398193, "learning_rate": 0.0008272948090773203, "loss": 3.6018, "step": 20345 }, { "epoch": 1.382660687593423, "grad_norm": 1.294727087020874, "learning_rate": 0.0008272523440684876, "loss": 3.5682, "step": 20350 }, { "epoch": 1.3830004076640847, "grad_norm": 1.1983976364135742, "learning_rate": 0.0008272098790596548, "loss": 3.4607, "step": 20355 }, { "epoch": 1.3833401277347466, "grad_norm": 1.3016895055770874, "learning_rate": 0.0008271674140508222, "loss": 3.4821, "step": 20360 }, { "epoch": 1.3836798478054084, "grad_norm": 1.2092137336730957, "learning_rate": 0.0008271249490419895, "loss": 3.738, "step": 20365 }, { "epoch": 1.38401956787607, "grad_norm": 1.1365054845809937, "learning_rate": 0.0008270824840331567, "loss": 3.7626, "step": 20370 }, { "epoch": 1.384359287946732, "grad_norm": 1.32330322265625, "learning_rate": 0.000827040019024324, "loss": 3.6517, "step": 20375 }, { "epoch": 1.3846990080173938, "grad_norm": 1.040998101234436, "learning_rate": 0.0008269975540154912, "loss": 3.9286, "step": 20380 }, { "epoch": 1.3850387280880554, "grad_norm": 1.2660012245178223, "learning_rate": 0.0008269550890066585, "loss": 3.8052, "step": 20385 }, { "epoch": 1.3853784481587172, "grad_norm": 0.9677267074584961, "learning_rate": 0.0008269126239978258, "loss": 3.4699, "step": 20390 }, { "epoch": 1.385718168229379, "grad_norm": 1.2833420038223267, "learning_rate": 0.0008268701589889931, "loss": 3.6138, "step": 20395 }, { "epoch": 1.3860578883000407, "grad_norm": 1.23131263256073, "learning_rate": 0.0008268276939801604, "loss": 3.5587, "step": 20400 }, { "epoch": 1.3863976083707026, "grad_norm": 1.6911914348602295, "learning_rate": 0.0008267852289713277, "loss": 3.5376, "step": 20405 }, { "epoch": 1.3867373284413644, "grad_norm": 1.104675531387329, "learning_rate": 0.0008267427639624949, "loss": 3.5614, "step": 20410 }, { "epoch": 1.387077048512026, "grad_norm": 1.5108306407928467, "learning_rate": 0.0008267002989536622, "loss": 3.6927, "step": 20415 }, { "epoch": 1.387416768582688, "grad_norm": 1.059615135192871, "learning_rate": 0.0008266578339448295, "loss": 3.6758, "step": 20420 }, { "epoch": 1.3877564886533498, "grad_norm": 2.9481942653656006, "learning_rate": 0.0008266153689359967, "loss": 3.5269, "step": 20425 }, { "epoch": 1.3880962087240114, "grad_norm": 1.1449897289276123, "learning_rate": 0.000826572903927164, "loss": 3.5582, "step": 20430 }, { "epoch": 1.3884359287946733, "grad_norm": 1.1800224781036377, "learning_rate": 0.0008265304389183314, "loss": 3.7147, "step": 20435 }, { "epoch": 1.3887756488653349, "grad_norm": 1.3100883960723877, "learning_rate": 0.0008264879739094986, "loss": 3.5148, "step": 20440 }, { "epoch": 1.3891153689359967, "grad_norm": 1.2584338188171387, "learning_rate": 0.0008264455089006658, "loss": 3.8095, "step": 20445 }, { "epoch": 1.3894550890066586, "grad_norm": 1.5843713283538818, "learning_rate": 0.0008264030438918332, "loss": 3.6286, "step": 20450 }, { "epoch": 1.3897948090773202, "grad_norm": 1.2037084102630615, "learning_rate": 0.0008263605788830004, "loss": 3.3876, "step": 20455 }, { "epoch": 1.390134529147982, "grad_norm": 1.1966054439544678, "learning_rate": 0.0008263181138741676, "loss": 3.7975, "step": 20460 }, { "epoch": 1.3904742492186437, "grad_norm": 1.0185866355895996, "learning_rate": 0.0008262756488653351, "loss": 3.5, "step": 20465 }, { "epoch": 1.3908139692893056, "grad_norm": 1.3137075901031494, "learning_rate": 0.0008262331838565023, "loss": 3.4653, "step": 20470 }, { "epoch": 1.3911536893599674, "grad_norm": 1.215492844581604, "learning_rate": 0.0008261907188476695, "loss": 3.5656, "step": 20475 }, { "epoch": 1.391493409430629, "grad_norm": 1.3751649856567383, "learning_rate": 0.0008261482538388369, "loss": 3.6357, "step": 20480 }, { "epoch": 1.391833129501291, "grad_norm": 1.182353138923645, "learning_rate": 0.0008261057888300041, "loss": 3.4327, "step": 20485 }, { "epoch": 1.3921728495719528, "grad_norm": 1.1253334283828735, "learning_rate": 0.0008260633238211713, "loss": 3.5338, "step": 20490 }, { "epoch": 1.3925125696426144, "grad_norm": 1.5253503322601318, "learning_rate": 0.0008260208588123386, "loss": 3.4469, "step": 20495 }, { "epoch": 1.3928522897132762, "grad_norm": 1.5620421171188354, "learning_rate": 0.000825978393803506, "loss": 3.5066, "step": 20500 }, { "epoch": 1.393192009783938, "grad_norm": 1.2673956155776978, "learning_rate": 0.0008259359287946732, "loss": 3.5328, "step": 20505 }, { "epoch": 1.3935317298545997, "grad_norm": 1.377718210220337, "learning_rate": 0.0008258934637858405, "loss": 3.5718, "step": 20510 }, { "epoch": 1.3938714499252616, "grad_norm": 1.2963497638702393, "learning_rate": 0.0008258509987770078, "loss": 3.5012, "step": 20515 }, { "epoch": 1.3942111699959234, "grad_norm": 1.6683436632156372, "learning_rate": 0.000825808533768175, "loss": 3.7855, "step": 20520 }, { "epoch": 1.394550890066585, "grad_norm": 1.561111569404602, "learning_rate": 0.0008257660687593423, "loss": 3.6001, "step": 20525 }, { "epoch": 1.394890610137247, "grad_norm": 1.6548476219177246, "learning_rate": 0.0008257236037505095, "loss": 3.356, "step": 20530 }, { "epoch": 1.3952303302079088, "grad_norm": 1.1525578498840332, "learning_rate": 0.0008256811387416769, "loss": 3.6468, "step": 20535 }, { "epoch": 1.3955700502785704, "grad_norm": 0.9997166991233826, "learning_rate": 0.0008256386737328442, "loss": 3.7156, "step": 20540 }, { "epoch": 1.3959097703492322, "grad_norm": 1.1509108543395996, "learning_rate": 0.0008255962087240114, "loss": 3.6832, "step": 20545 }, { "epoch": 1.396249490419894, "grad_norm": 1.2911345958709717, "learning_rate": 0.0008255537437151787, "loss": 3.4464, "step": 20550 }, { "epoch": 1.3965892104905557, "grad_norm": 0.9799840450286865, "learning_rate": 0.000825511278706346, "loss": 3.4335, "step": 20555 }, { "epoch": 1.3969289305612176, "grad_norm": 0.991115927696228, "learning_rate": 0.0008254688136975132, "loss": 3.6198, "step": 20560 }, { "epoch": 1.3972686506318794, "grad_norm": 1.2448384761810303, "learning_rate": 0.0008254263486886804, "loss": 3.6449, "step": 20565 }, { "epoch": 1.397608370702541, "grad_norm": 1.2402737140655518, "learning_rate": 0.0008253838836798479, "loss": 3.4384, "step": 20570 }, { "epoch": 1.397948090773203, "grad_norm": 1.2874789237976074, "learning_rate": 0.0008253414186710151, "loss": 3.2943, "step": 20575 }, { "epoch": 1.3982878108438648, "grad_norm": 1.3859788179397583, "learning_rate": 0.0008252989536621823, "loss": 3.6211, "step": 20580 }, { "epoch": 1.3986275309145264, "grad_norm": 1.3566832542419434, "learning_rate": 0.0008252564886533497, "loss": 3.5292, "step": 20585 }, { "epoch": 1.3989672509851883, "grad_norm": 1.214120864868164, "learning_rate": 0.0008252140236445169, "loss": 3.6413, "step": 20590 }, { "epoch": 1.3993069710558501, "grad_norm": 1.3425590991973877, "learning_rate": 0.0008251715586356841, "loss": 3.4848, "step": 20595 }, { "epoch": 1.3996466911265117, "grad_norm": 1.5179847478866577, "learning_rate": 0.0008251290936268515, "loss": 3.4998, "step": 20600 }, { "epoch": 1.3999864111971736, "grad_norm": 1.4238831996917725, "learning_rate": 0.0008250866286180188, "loss": 3.8368, "step": 20605 }, { "epoch": 1.4003261312678352, "grad_norm": 1.3590368032455444, "learning_rate": 0.000825044163609186, "loss": 3.6362, "step": 20610 }, { "epoch": 1.400665851338497, "grad_norm": 0.9861757159233093, "learning_rate": 0.0008250016986003534, "loss": 3.7452, "step": 20615 }, { "epoch": 1.401005571409159, "grad_norm": 1.25343918800354, "learning_rate": 0.0008249592335915206, "loss": 3.5499, "step": 20620 }, { "epoch": 1.4013452914798206, "grad_norm": 1.6723910570144653, "learning_rate": 0.0008249167685826879, "loss": 3.5155, "step": 20625 }, { "epoch": 1.4016850115504824, "grad_norm": 0.8997228145599365, "learning_rate": 0.0008248743035738551, "loss": 3.6213, "step": 20630 }, { "epoch": 1.402024731621144, "grad_norm": 1.3580085039138794, "learning_rate": 0.0008248318385650224, "loss": 3.3636, "step": 20635 }, { "epoch": 1.402364451691806, "grad_norm": 1.2843695878982544, "learning_rate": 0.0008247893735561898, "loss": 4.0494, "step": 20640 }, { "epoch": 1.4027041717624678, "grad_norm": 1.391913890838623, "learning_rate": 0.000824746908547357, "loss": 3.9, "step": 20645 }, { "epoch": 1.4030438918331294, "grad_norm": 1.5249261856079102, "learning_rate": 0.0008247044435385243, "loss": 3.4607, "step": 20650 }, { "epoch": 1.4033836119037912, "grad_norm": 1.300368309020996, "learning_rate": 0.0008246619785296916, "loss": 3.3647, "step": 20655 }, { "epoch": 1.403723331974453, "grad_norm": 1.2999176979064941, "learning_rate": 0.0008246195135208588, "loss": 3.6001, "step": 20660 }, { "epoch": 1.4040630520451147, "grad_norm": 1.3833351135253906, "learning_rate": 0.000824577048512026, "loss": 3.7903, "step": 20665 }, { "epoch": 1.4044027721157766, "grad_norm": 1.4752861261367798, "learning_rate": 0.0008245345835031934, "loss": 3.6073, "step": 20670 }, { "epoch": 1.4047424921864384, "grad_norm": 1.1948809623718262, "learning_rate": 0.0008244921184943607, "loss": 3.5156, "step": 20675 }, { "epoch": 1.4050822122571, "grad_norm": 1.1326195001602173, "learning_rate": 0.0008244496534855279, "loss": 3.7523, "step": 20680 }, { "epoch": 1.405421932327762, "grad_norm": 1.1281806230545044, "learning_rate": 0.0008244071884766953, "loss": 3.5914, "step": 20685 }, { "epoch": 1.4057616523984238, "grad_norm": 1.068831205368042, "learning_rate": 0.0008243647234678625, "loss": 3.3978, "step": 20690 }, { "epoch": 1.4061013724690854, "grad_norm": 1.2080225944519043, "learning_rate": 0.0008243222584590297, "loss": 3.5839, "step": 20695 }, { "epoch": 1.4064410925397473, "grad_norm": 1.5324060916900635, "learning_rate": 0.0008242797934501971, "loss": 3.6363, "step": 20700 }, { "epoch": 1.406780812610409, "grad_norm": 2.1512725353240967, "learning_rate": 0.0008242373284413643, "loss": 3.4936, "step": 20705 }, { "epoch": 1.4071205326810707, "grad_norm": 1.312625765800476, "learning_rate": 0.0008241948634325316, "loss": 3.8438, "step": 20710 }, { "epoch": 1.4074602527517326, "grad_norm": 1.1705963611602783, "learning_rate": 0.000824152398423699, "loss": 3.4168, "step": 20715 }, { "epoch": 1.4077999728223944, "grad_norm": 0.9894548058509827, "learning_rate": 0.0008241099334148662, "loss": 3.2785, "step": 20720 }, { "epoch": 1.408139692893056, "grad_norm": 1.0908522605895996, "learning_rate": 0.0008240674684060334, "loss": 3.4906, "step": 20725 }, { "epoch": 1.408479412963718, "grad_norm": 1.698517918586731, "learning_rate": 0.0008240250033972007, "loss": 3.483, "step": 20730 }, { "epoch": 1.4088191330343798, "grad_norm": 1.3665062189102173, "learning_rate": 0.000823982538388368, "loss": 3.6822, "step": 20735 }, { "epoch": 1.4091588531050414, "grad_norm": 2.17169451713562, "learning_rate": 0.0008239400733795352, "loss": 3.4947, "step": 20740 }, { "epoch": 1.4094985731757033, "grad_norm": 1.2845664024353027, "learning_rate": 0.0008238976083707026, "loss": 3.5543, "step": 20745 }, { "epoch": 1.4098382932463651, "grad_norm": 1.8233779668807983, "learning_rate": 0.0008238551433618699, "loss": 3.5791, "step": 20750 }, { "epoch": 1.4101780133170267, "grad_norm": 1.5263640880584717, "learning_rate": 0.0008238126783530371, "loss": 3.3954, "step": 20755 }, { "epoch": 1.4105177333876886, "grad_norm": 1.183504581451416, "learning_rate": 0.0008237702133442044, "loss": 3.5461, "step": 20760 }, { "epoch": 1.4108574534583505, "grad_norm": 1.273681402206421, "learning_rate": 0.0008237277483353717, "loss": 3.6809, "step": 20765 }, { "epoch": 1.411197173529012, "grad_norm": 1.2119488716125488, "learning_rate": 0.0008236852833265389, "loss": 3.6867, "step": 20770 }, { "epoch": 1.411536893599674, "grad_norm": 1.4769326448440552, "learning_rate": 0.0008236428183177062, "loss": 3.5263, "step": 20775 }, { "epoch": 1.4118766136703356, "grad_norm": 1.0798884630203247, "learning_rate": 0.0008236003533088735, "loss": 3.8115, "step": 20780 }, { "epoch": 1.4122163337409974, "grad_norm": 1.494271159172058, "learning_rate": 0.0008235578883000408, "loss": 3.501, "step": 20785 }, { "epoch": 1.4125560538116593, "grad_norm": 1.745208740234375, "learning_rate": 0.0008235154232912081, "loss": 3.5609, "step": 20790 }, { "epoch": 1.412895773882321, "grad_norm": 1.1624064445495605, "learning_rate": 0.0008234729582823753, "loss": 3.7894, "step": 20795 }, { "epoch": 1.4132354939529828, "grad_norm": 1.4760578870773315, "learning_rate": 0.0008234304932735426, "loss": 3.6101, "step": 20800 }, { "epoch": 1.4135752140236444, "grad_norm": 1.246622920036316, "learning_rate": 0.0008233880282647099, "loss": 3.5029, "step": 20805 }, { "epoch": 1.4139149340943062, "grad_norm": 1.2550685405731201, "learning_rate": 0.0008233455632558771, "loss": 3.4453, "step": 20810 }, { "epoch": 1.414254654164968, "grad_norm": 1.5204224586486816, "learning_rate": 0.0008233030982470445, "loss": 3.8312, "step": 20815 }, { "epoch": 1.4145943742356297, "grad_norm": 1.7074732780456543, "learning_rate": 0.0008232606332382118, "loss": 3.6524, "step": 20820 }, { "epoch": 1.4149340943062916, "grad_norm": 1.1841588020324707, "learning_rate": 0.000823218168229379, "loss": 3.6649, "step": 20825 }, { "epoch": 1.4152738143769534, "grad_norm": 1.4260939359664917, "learning_rate": 0.0008231757032205462, "loss": 3.6587, "step": 20830 }, { "epoch": 1.415613534447615, "grad_norm": 1.1625001430511475, "learning_rate": 0.0008231332382117136, "loss": 3.5559, "step": 20835 }, { "epoch": 1.415953254518277, "grad_norm": 1.1857085227966309, "learning_rate": 0.0008230907732028808, "loss": 3.7681, "step": 20840 }, { "epoch": 1.4162929745889388, "grad_norm": 1.858168125152588, "learning_rate": 0.000823048308194048, "loss": 3.831, "step": 20845 }, { "epoch": 1.4166326946596004, "grad_norm": 1.280880331993103, "learning_rate": 0.0008230058431852155, "loss": 3.4651, "step": 20850 }, { "epoch": 1.4169724147302623, "grad_norm": 1.0524682998657227, "learning_rate": 0.0008229633781763827, "loss": 3.3279, "step": 20855 }, { "epoch": 1.417312134800924, "grad_norm": 1.2881629467010498, "learning_rate": 0.0008229209131675499, "loss": 3.5184, "step": 20860 }, { "epoch": 1.4176518548715857, "grad_norm": 1.3273589611053467, "learning_rate": 0.0008228784481587173, "loss": 3.6509, "step": 20865 }, { "epoch": 1.4179915749422476, "grad_norm": 1.149609923362732, "learning_rate": 0.0008228359831498845, "loss": 3.5975, "step": 20870 }, { "epoch": 1.4183312950129094, "grad_norm": 1.0976375341415405, "learning_rate": 0.0008227935181410517, "loss": 3.5922, "step": 20875 }, { "epoch": 1.418671015083571, "grad_norm": 1.3058350086212158, "learning_rate": 0.0008227510531322191, "loss": 3.7644, "step": 20880 }, { "epoch": 1.419010735154233, "grad_norm": 1.152694582939148, "learning_rate": 0.0008227085881233864, "loss": 3.6024, "step": 20885 }, { "epoch": 1.4193504552248948, "grad_norm": 1.240993618965149, "learning_rate": 0.0008226661231145536, "loss": 3.6243, "step": 20890 }, { "epoch": 1.4196901752955564, "grad_norm": 1.3224472999572754, "learning_rate": 0.0008226236581057209, "loss": 3.8453, "step": 20895 }, { "epoch": 1.4200298953662183, "grad_norm": 1.1133008003234863, "learning_rate": 0.0008225811930968882, "loss": 3.7172, "step": 20900 }, { "epoch": 1.4203696154368801, "grad_norm": 1.5086920261383057, "learning_rate": 0.0008225387280880554, "loss": 3.4361, "step": 20905 }, { "epoch": 1.4207093355075417, "grad_norm": 1.5744116306304932, "learning_rate": 0.0008224962630792227, "loss": 3.7975, "step": 20910 }, { "epoch": 1.4210490555782036, "grad_norm": 1.656262993812561, "learning_rate": 0.0008224537980703901, "loss": 3.5156, "step": 20915 }, { "epoch": 1.4213887756488655, "grad_norm": 1.4007917642593384, "learning_rate": 0.0008224113330615573, "loss": 3.7253, "step": 20920 }, { "epoch": 1.421728495719527, "grad_norm": 1.4467090368270874, "learning_rate": 0.0008223688680527246, "loss": 3.6157, "step": 20925 }, { "epoch": 1.422068215790189, "grad_norm": 1.2163835763931274, "learning_rate": 0.0008223264030438918, "loss": 3.5344, "step": 20930 }, { "epoch": 1.4224079358608508, "grad_norm": 1.1432774066925049, "learning_rate": 0.0008222839380350591, "loss": 3.5957, "step": 20935 }, { "epoch": 1.4227476559315124, "grad_norm": 1.130091905593872, "learning_rate": 0.0008222414730262264, "loss": 3.4552, "step": 20940 }, { "epoch": 1.4230873760021743, "grad_norm": 1.1499372720718384, "learning_rate": 0.0008221990080173936, "loss": 3.4228, "step": 20945 }, { "epoch": 1.423427096072836, "grad_norm": 1.572598934173584, "learning_rate": 0.000822156543008561, "loss": 3.3737, "step": 20950 }, { "epoch": 1.4237668161434978, "grad_norm": 1.1068103313446045, "learning_rate": 0.0008221140779997283, "loss": 3.8173, "step": 20955 }, { "epoch": 1.4241065362141596, "grad_norm": 1.4464515447616577, "learning_rate": 0.0008220716129908955, "loss": 3.6739, "step": 20960 }, { "epoch": 1.4244462562848212, "grad_norm": 0.9638551473617554, "learning_rate": 0.0008220291479820629, "loss": 3.5653, "step": 20965 }, { "epoch": 1.424785976355483, "grad_norm": 1.4037538766860962, "learning_rate": 0.0008219866829732301, "loss": 3.7363, "step": 20970 }, { "epoch": 1.4251256964261447, "grad_norm": 1.1748560667037964, "learning_rate": 0.0008219442179643973, "loss": 3.4633, "step": 20975 }, { "epoch": 1.4254654164968066, "grad_norm": 1.215262770652771, "learning_rate": 0.0008219017529555646, "loss": 3.8139, "step": 20980 }, { "epoch": 1.4258051365674684, "grad_norm": 1.249667763710022, "learning_rate": 0.000821859287946732, "loss": 3.5701, "step": 20985 }, { "epoch": 1.42614485663813, "grad_norm": 1.8257249593734741, "learning_rate": 0.0008218168229378992, "loss": 3.4635, "step": 20990 }, { "epoch": 1.426484576708792, "grad_norm": 1.0076675415039062, "learning_rate": 0.0008217743579290665, "loss": 3.3125, "step": 20995 }, { "epoch": 1.4268242967794538, "grad_norm": 1.2965049743652344, "learning_rate": 0.0008217318929202338, "loss": 3.5843, "step": 21000 }, { "epoch": 1.4271640168501154, "grad_norm": 1.3956297636032104, "learning_rate": 0.000821689427911401, "loss": 3.7045, "step": 21005 }, { "epoch": 1.4275037369207773, "grad_norm": 1.4150187969207764, "learning_rate": 0.0008216469629025683, "loss": 3.6637, "step": 21010 }, { "epoch": 1.427843456991439, "grad_norm": 1.1471997499465942, "learning_rate": 0.0008216044978937355, "loss": 3.8624, "step": 21015 }, { "epoch": 1.4281831770621007, "grad_norm": 1.1860029697418213, "learning_rate": 0.0008215620328849029, "loss": 3.3923, "step": 21020 }, { "epoch": 1.4285228971327626, "grad_norm": 1.261267900466919, "learning_rate": 0.0008215195678760702, "loss": 3.5876, "step": 21025 }, { "epoch": 1.4288626172034244, "grad_norm": 1.2019227743148804, "learning_rate": 0.0008214771028672374, "loss": 3.6939, "step": 21030 }, { "epoch": 1.429202337274086, "grad_norm": 2.9363322257995605, "learning_rate": 0.0008214346378584047, "loss": 3.6123, "step": 21035 }, { "epoch": 1.429542057344748, "grad_norm": 1.1694625616073608, "learning_rate": 0.000821392172849572, "loss": 3.7976, "step": 21040 }, { "epoch": 1.4298817774154098, "grad_norm": 0.9552502036094666, "learning_rate": 0.0008213497078407392, "loss": 3.6452, "step": 21045 }, { "epoch": 1.4302214974860714, "grad_norm": 1.0778777599334717, "learning_rate": 0.0008213072428319065, "loss": 3.7853, "step": 21050 }, { "epoch": 1.4305612175567333, "grad_norm": 2.6119844913482666, "learning_rate": 0.0008212647778230739, "loss": 3.7619, "step": 21055 }, { "epoch": 1.4309009376273951, "grad_norm": 1.5461480617523193, "learning_rate": 0.0008212223128142411, "loss": 3.6407, "step": 21060 }, { "epoch": 1.4312406576980568, "grad_norm": 1.4349606037139893, "learning_rate": 0.0008211798478054083, "loss": 3.6414, "step": 21065 }, { "epoch": 1.4315803777687186, "grad_norm": 1.4407016038894653, "learning_rate": 0.0008211373827965757, "loss": 3.496, "step": 21070 }, { "epoch": 1.4319200978393805, "grad_norm": 1.1912832260131836, "learning_rate": 0.0008210949177877429, "loss": 3.4939, "step": 21075 }, { "epoch": 1.432259817910042, "grad_norm": 1.2450463771820068, "learning_rate": 0.0008210524527789101, "loss": 3.5431, "step": 21080 }, { "epoch": 1.432599537980704, "grad_norm": 1.5709940195083618, "learning_rate": 0.0008210099877700775, "loss": 3.7348, "step": 21085 }, { "epoch": 1.4329392580513658, "grad_norm": 1.242585301399231, "learning_rate": 0.0008209675227612448, "loss": 3.5404, "step": 21090 }, { "epoch": 1.4332789781220274, "grad_norm": 1.1083921194076538, "learning_rate": 0.000820925057752412, "loss": 3.4687, "step": 21095 }, { "epoch": 1.4336186981926893, "grad_norm": 1.5684362649917603, "learning_rate": 0.0008208825927435794, "loss": 3.2798, "step": 21100 }, { "epoch": 1.4339584182633511, "grad_norm": 1.2835513353347778, "learning_rate": 0.0008208401277347466, "loss": 3.4125, "step": 21105 }, { "epoch": 1.4342981383340128, "grad_norm": 1.365962028503418, "learning_rate": 0.0008207976627259138, "loss": 3.6546, "step": 21110 }, { "epoch": 1.4346378584046746, "grad_norm": 1.0926878452301025, "learning_rate": 0.0008207551977170811, "loss": 3.5864, "step": 21115 }, { "epoch": 1.4349775784753362, "grad_norm": 1.4290715456008911, "learning_rate": 0.0008207127327082484, "loss": 3.7169, "step": 21120 }, { "epoch": 1.435317298545998, "grad_norm": 0.9967925548553467, "learning_rate": 0.0008206702676994157, "loss": 3.5952, "step": 21125 }, { "epoch": 1.43565701861666, "grad_norm": 1.3336035013198853, "learning_rate": 0.000820627802690583, "loss": 3.6618, "step": 21130 }, { "epoch": 1.4359967386873216, "grad_norm": 1.0384528636932373, "learning_rate": 0.0008205853376817503, "loss": 3.7271, "step": 21135 }, { "epoch": 1.4363364587579834, "grad_norm": 1.3340785503387451, "learning_rate": 0.0008205428726729175, "loss": 3.6745, "step": 21140 }, { "epoch": 1.436676178828645, "grad_norm": 1.3153777122497559, "learning_rate": 0.0008205004076640848, "loss": 3.664, "step": 21145 }, { "epoch": 1.437015898899307, "grad_norm": 1.5713272094726562, "learning_rate": 0.0008204579426552521, "loss": 3.6126, "step": 21150 }, { "epoch": 1.4373556189699688, "grad_norm": 1.0841761827468872, "learning_rate": 0.0008204154776464193, "loss": 3.4797, "step": 21155 }, { "epoch": 1.4376953390406304, "grad_norm": 1.2794647216796875, "learning_rate": 0.0008203730126375867, "loss": 3.5136, "step": 21160 }, { "epoch": 1.4380350591112923, "grad_norm": 1.7816683053970337, "learning_rate": 0.000820330547628754, "loss": 3.6879, "step": 21165 }, { "epoch": 1.438374779181954, "grad_norm": 1.0792571306228638, "learning_rate": 0.0008202880826199212, "loss": 3.9799, "step": 21170 }, { "epoch": 1.4387144992526157, "grad_norm": 1.913262128829956, "learning_rate": 0.0008202456176110885, "loss": 3.7087, "step": 21175 }, { "epoch": 1.4390542193232776, "grad_norm": 0.9596566557884216, "learning_rate": 0.0008202031526022557, "loss": 3.5407, "step": 21180 }, { "epoch": 1.4393939393939394, "grad_norm": 1.1543760299682617, "learning_rate": 0.000820160687593423, "loss": 3.7384, "step": 21185 }, { "epoch": 1.439733659464601, "grad_norm": 1.5558826923370361, "learning_rate": 0.0008201182225845903, "loss": 3.4889, "step": 21190 }, { "epoch": 1.440073379535263, "grad_norm": 1.267754316329956, "learning_rate": 0.0008200757575757576, "loss": 3.6117, "step": 21195 }, { "epoch": 1.4404130996059248, "grad_norm": 1.2456238269805908, "learning_rate": 0.0008200332925669249, "loss": 3.5815, "step": 21200 }, { "epoch": 1.4407528196765864, "grad_norm": 1.2221205234527588, "learning_rate": 0.0008199908275580922, "loss": 3.9226, "step": 21205 }, { "epoch": 1.4410925397472483, "grad_norm": 1.1406300067901611, "learning_rate": 0.0008199483625492594, "loss": 3.5699, "step": 21210 }, { "epoch": 1.4414322598179101, "grad_norm": 1.3195043802261353, "learning_rate": 0.0008199058975404266, "loss": 3.8165, "step": 21215 }, { "epoch": 1.4417719798885718, "grad_norm": 1.543225884437561, "learning_rate": 0.000819863432531594, "loss": 3.6741, "step": 21220 }, { "epoch": 1.4421116999592336, "grad_norm": 1.3881837129592896, "learning_rate": 0.0008198209675227612, "loss": 3.6576, "step": 21225 }, { "epoch": 1.4424514200298955, "grad_norm": 1.3606287240982056, "learning_rate": 0.0008197785025139285, "loss": 3.6186, "step": 21230 }, { "epoch": 1.442791140100557, "grad_norm": 1.7920453548431396, "learning_rate": 0.0008197360375050959, "loss": 3.5647, "step": 21235 }, { "epoch": 1.443130860171219, "grad_norm": 2.580580472946167, "learning_rate": 0.0008196935724962631, "loss": 3.4965, "step": 21240 }, { "epoch": 1.4434705802418808, "grad_norm": 1.3009382486343384, "learning_rate": 0.0008196511074874303, "loss": 3.7823, "step": 21245 }, { "epoch": 1.4438103003125424, "grad_norm": 1.2247087955474854, "learning_rate": 0.0008196086424785977, "loss": 3.4622, "step": 21250 }, { "epoch": 1.4441500203832043, "grad_norm": 1.4931490421295166, "learning_rate": 0.0008195661774697649, "loss": 3.7644, "step": 21255 }, { "epoch": 1.4444897404538661, "grad_norm": 1.2548755407333374, "learning_rate": 0.0008195237124609321, "loss": 3.4991, "step": 21260 }, { "epoch": 1.4448294605245278, "grad_norm": 1.18594229221344, "learning_rate": 0.0008194812474520996, "loss": 3.4308, "step": 21265 }, { "epoch": 1.4451691805951896, "grad_norm": 1.1856731176376343, "learning_rate": 0.0008194387824432668, "loss": 3.5923, "step": 21270 }, { "epoch": 1.4455089006658515, "grad_norm": 1.8720967769622803, "learning_rate": 0.000819396317434434, "loss": 3.4291, "step": 21275 }, { "epoch": 1.445848620736513, "grad_norm": 1.265806794166565, "learning_rate": 0.0008193538524256013, "loss": 3.4626, "step": 21280 }, { "epoch": 1.446188340807175, "grad_norm": 1.2074356079101562, "learning_rate": 0.0008193113874167686, "loss": 3.7788, "step": 21285 }, { "epoch": 1.4465280608778366, "grad_norm": 1.3033462762832642, "learning_rate": 0.0008192689224079358, "loss": 3.9628, "step": 21290 }, { "epoch": 1.4468677809484984, "grad_norm": 1.4346723556518555, "learning_rate": 0.0008192264573991031, "loss": 3.5483, "step": 21295 }, { "epoch": 1.4472075010191603, "grad_norm": 1.3269932270050049, "learning_rate": 0.0008191839923902705, "loss": 3.6633, "step": 21300 }, { "epoch": 1.447547221089822, "grad_norm": 1.4506723880767822, "learning_rate": 0.0008191415273814378, "loss": 3.523, "step": 21305 }, { "epoch": 1.4478869411604838, "grad_norm": 1.3245407342910767, "learning_rate": 0.000819099062372605, "loss": 3.4868, "step": 21310 }, { "epoch": 1.4482266612311454, "grad_norm": 1.7666738033294678, "learning_rate": 0.0008190565973637722, "loss": 3.522, "step": 21315 }, { "epoch": 1.4485663813018073, "grad_norm": 1.2197129726409912, "learning_rate": 0.0008190141323549396, "loss": 3.5613, "step": 21320 }, { "epoch": 1.4489061013724691, "grad_norm": 1.3429501056671143, "learning_rate": 0.0008189716673461068, "loss": 3.7257, "step": 21325 }, { "epoch": 1.4492458214431307, "grad_norm": 1.240012288093567, "learning_rate": 0.000818929202337274, "loss": 3.5686, "step": 21330 }, { "epoch": 1.4495855415137926, "grad_norm": 1.2686747312545776, "learning_rate": 0.0008188867373284415, "loss": 3.7651, "step": 21335 }, { "epoch": 1.4499252615844545, "grad_norm": 1.2259761095046997, "learning_rate": 0.0008188442723196087, "loss": 3.506, "step": 21340 }, { "epoch": 1.450264981655116, "grad_norm": 1.2584679126739502, "learning_rate": 0.0008188018073107759, "loss": 3.7975, "step": 21345 }, { "epoch": 1.450604701725778, "grad_norm": 1.9890897274017334, "learning_rate": 0.0008187593423019433, "loss": 3.6373, "step": 21350 }, { "epoch": 1.4509444217964398, "grad_norm": 1.4009349346160889, "learning_rate": 0.0008187168772931105, "loss": 3.6504, "step": 21355 }, { "epoch": 1.4512841418671014, "grad_norm": 1.2476840019226074, "learning_rate": 0.0008186744122842777, "loss": 3.5745, "step": 21360 }, { "epoch": 1.4516238619377633, "grad_norm": 1.1485488414764404, "learning_rate": 0.0008186319472754452, "loss": 3.5542, "step": 21365 }, { "epoch": 1.4519635820084251, "grad_norm": 1.1123571395874023, "learning_rate": 0.0008185894822666124, "loss": 3.9671, "step": 21370 }, { "epoch": 1.4523033020790868, "grad_norm": 1.1160653829574585, "learning_rate": 0.0008185470172577796, "loss": 3.5845, "step": 21375 }, { "epoch": 1.4526430221497486, "grad_norm": 1.580903172492981, "learning_rate": 0.0008185045522489469, "loss": 3.3039, "step": 21380 }, { "epoch": 1.4529827422204105, "grad_norm": 1.2749340534210205, "learning_rate": 0.0008184620872401142, "loss": 3.7797, "step": 21385 }, { "epoch": 1.453322462291072, "grad_norm": 1.301289439201355, "learning_rate": 0.0008184196222312814, "loss": 3.6777, "step": 21390 }, { "epoch": 1.453662182361734, "grad_norm": 1.4013962745666504, "learning_rate": 0.0008183771572224487, "loss": 3.3572, "step": 21395 }, { "epoch": 1.4540019024323958, "grad_norm": 1.2055376768112183, "learning_rate": 0.0008183346922136161, "loss": 3.4677, "step": 21400 }, { "epoch": 1.4543416225030574, "grad_norm": 1.026432752609253, "learning_rate": 0.0008182922272047833, "loss": 3.7112, "step": 21405 }, { "epoch": 1.4546813425737193, "grad_norm": 1.5137932300567627, "learning_rate": 0.0008182497621959506, "loss": 3.7032, "step": 21410 }, { "epoch": 1.4550210626443811, "grad_norm": 1.294756293296814, "learning_rate": 0.0008182072971871178, "loss": 3.6183, "step": 21415 }, { "epoch": 1.4553607827150428, "grad_norm": 1.3502923250198364, "learning_rate": 0.0008181648321782851, "loss": 3.4585, "step": 21420 }, { "epoch": 1.4557005027857046, "grad_norm": 1.268247127532959, "learning_rate": 0.0008181223671694524, "loss": 3.4502, "step": 21425 }, { "epoch": 1.4560402228563665, "grad_norm": 1.6482478380203247, "learning_rate": 0.0008180799021606196, "loss": 3.5915, "step": 21430 }, { "epoch": 1.456379942927028, "grad_norm": 1.376718282699585, "learning_rate": 0.000818037437151787, "loss": 3.4692, "step": 21435 }, { "epoch": 1.45671966299769, "grad_norm": 1.4824079275131226, "learning_rate": 0.0008179949721429543, "loss": 3.8049, "step": 21440 }, { "epoch": 1.4570593830683518, "grad_norm": 1.2466566562652588, "learning_rate": 0.0008179525071341215, "loss": 3.5222, "step": 21445 }, { "epoch": 1.4573991031390134, "grad_norm": 1.067481279373169, "learning_rate": 0.0008179100421252888, "loss": 3.84, "step": 21450 }, { "epoch": 1.4577388232096753, "grad_norm": 1.12952721118927, "learning_rate": 0.0008178675771164561, "loss": 3.721, "step": 21455 }, { "epoch": 1.458078543280337, "grad_norm": 1.2710442543029785, "learning_rate": 0.0008178251121076233, "loss": 3.4273, "step": 21460 }, { "epoch": 1.4584182633509988, "grad_norm": 1.2409396171569824, "learning_rate": 0.0008177826470987905, "loss": 3.7291, "step": 21465 }, { "epoch": 1.4587579834216606, "grad_norm": 1.1498180627822876, "learning_rate": 0.000817740182089958, "loss": 3.4746, "step": 21470 }, { "epoch": 1.4590977034923223, "grad_norm": 1.2782318592071533, "learning_rate": 0.0008176977170811252, "loss": 3.4361, "step": 21475 }, { "epoch": 1.4594374235629841, "grad_norm": 1.279120683670044, "learning_rate": 0.0008176552520722924, "loss": 3.7306, "step": 21480 }, { "epoch": 1.4597771436336457, "grad_norm": 1.5366346836090088, "learning_rate": 0.0008176127870634598, "loss": 3.6035, "step": 21485 }, { "epoch": 1.4601168637043076, "grad_norm": 1.8137903213500977, "learning_rate": 0.000817570322054627, "loss": 3.9372, "step": 21490 }, { "epoch": 1.4604565837749695, "grad_norm": 1.2047325372695923, "learning_rate": 0.0008175278570457942, "loss": 3.7999, "step": 21495 }, { "epoch": 1.460796303845631, "grad_norm": 1.284428596496582, "learning_rate": 0.0008174853920369616, "loss": 3.402, "step": 21500 }, { "epoch": 1.461136023916293, "grad_norm": 1.292004942893982, "learning_rate": 0.0008174429270281289, "loss": 3.5614, "step": 21505 }, { "epoch": 1.4614757439869548, "grad_norm": 1.126724362373352, "learning_rate": 0.0008174004620192961, "loss": 3.8124, "step": 21510 }, { "epoch": 1.4618154640576164, "grad_norm": 1.4925068616867065, "learning_rate": 0.0008173579970104634, "loss": 3.7287, "step": 21515 }, { "epoch": 1.4621551841282783, "grad_norm": 1.5917295217514038, "learning_rate": 0.0008173155320016307, "loss": 3.4317, "step": 21520 }, { "epoch": 1.4624949041989401, "grad_norm": 1.2780559062957764, "learning_rate": 0.0008172730669927979, "loss": 3.8529, "step": 21525 }, { "epoch": 1.4628346242696018, "grad_norm": 1.241574764251709, "learning_rate": 0.0008172306019839652, "loss": 3.6464, "step": 21530 }, { "epoch": 1.4631743443402636, "grad_norm": 1.4433292150497437, "learning_rate": 0.0008171881369751325, "loss": 3.6668, "step": 21535 }, { "epoch": 1.4635140644109255, "grad_norm": 1.4580693244934082, "learning_rate": 0.0008171456719662998, "loss": 3.5818, "step": 21540 }, { "epoch": 1.463853784481587, "grad_norm": 1.1880322694778442, "learning_rate": 0.0008171032069574671, "loss": 3.3916, "step": 21545 }, { "epoch": 1.464193504552249, "grad_norm": 1.5103577375411987, "learning_rate": 0.0008170607419486344, "loss": 3.8075, "step": 21550 }, { "epoch": 1.4645332246229108, "grad_norm": 1.551251769065857, "learning_rate": 0.0008170182769398016, "loss": 3.4373, "step": 21555 }, { "epoch": 1.4648729446935724, "grad_norm": 1.041155457496643, "learning_rate": 0.0008169758119309689, "loss": 3.6476, "step": 21560 }, { "epoch": 1.4652126647642343, "grad_norm": 1.0714060068130493, "learning_rate": 0.0008169333469221361, "loss": 3.8143, "step": 21565 }, { "epoch": 1.4655523848348961, "grad_norm": 1.213655948638916, "learning_rate": 0.0008168908819133034, "loss": 3.7458, "step": 21570 }, { "epoch": 1.4658921049055578, "grad_norm": 1.1980124711990356, "learning_rate": 0.0008168484169044708, "loss": 3.7503, "step": 21575 }, { "epoch": 1.4662318249762196, "grad_norm": 1.8067193031311035, "learning_rate": 0.000816805951895638, "loss": 3.9315, "step": 21580 }, { "epoch": 1.4665715450468815, "grad_norm": 1.176366925239563, "learning_rate": 0.0008167634868868053, "loss": 3.612, "step": 21585 }, { "epoch": 1.466911265117543, "grad_norm": 1.9023854732513428, "learning_rate": 0.0008167210218779726, "loss": 3.5254, "step": 21590 }, { "epoch": 1.467250985188205, "grad_norm": 1.172239899635315, "learning_rate": 0.0008166785568691398, "loss": 3.6068, "step": 21595 }, { "epoch": 1.4675907052588668, "grad_norm": 1.3232165575027466, "learning_rate": 0.000816636091860307, "loss": 3.7466, "step": 21600 }, { "epoch": 1.4679304253295284, "grad_norm": 1.2893768548965454, "learning_rate": 0.0008165936268514744, "loss": 3.7037, "step": 21605 }, { "epoch": 1.4682701454001903, "grad_norm": 1.2423648834228516, "learning_rate": 0.0008165511618426417, "loss": 3.6719, "step": 21610 }, { "epoch": 1.4686098654708521, "grad_norm": 1.055600643157959, "learning_rate": 0.0008165086968338089, "loss": 3.4634, "step": 21615 }, { "epoch": 1.4689495855415138, "grad_norm": 1.4844311475753784, "learning_rate": 0.0008164662318249763, "loss": 3.6302, "step": 21620 }, { "epoch": 1.4692893056121756, "grad_norm": 1.1337465047836304, "learning_rate": 0.0008164237668161435, "loss": 3.3167, "step": 21625 }, { "epoch": 1.4696290256828373, "grad_norm": 1.238940954208374, "learning_rate": 0.0008163813018073107, "loss": 3.3698, "step": 21630 }, { "epoch": 1.4699687457534991, "grad_norm": 1.2874524593353271, "learning_rate": 0.0008163388367984781, "loss": 3.7448, "step": 21635 }, { "epoch": 1.470308465824161, "grad_norm": 1.41439688205719, "learning_rate": 0.0008162963717896453, "loss": 3.3999, "step": 21640 }, { "epoch": 1.4706481858948226, "grad_norm": 1.243151068687439, "learning_rate": 0.0008162539067808127, "loss": 3.4271, "step": 21645 }, { "epoch": 1.4709879059654845, "grad_norm": 1.670512080192566, "learning_rate": 0.00081621144177198, "loss": 3.5737, "step": 21650 }, { "epoch": 1.471327626036146, "grad_norm": 1.4700932502746582, "learning_rate": 0.0008161689767631472, "loss": 3.6009, "step": 21655 }, { "epoch": 1.471667346106808, "grad_norm": 1.155946135520935, "learning_rate": 0.0008161265117543145, "loss": 3.3863, "step": 21660 }, { "epoch": 1.4720070661774698, "grad_norm": 1.487093210220337, "learning_rate": 0.0008160840467454817, "loss": 3.7052, "step": 21665 }, { "epoch": 1.4723467862481314, "grad_norm": 1.3514273166656494, "learning_rate": 0.000816041581736649, "loss": 3.8608, "step": 21670 }, { "epoch": 1.4726865063187933, "grad_norm": 1.1365692615509033, "learning_rate": 0.0008159991167278163, "loss": 3.6055, "step": 21675 }, { "epoch": 1.4730262263894551, "grad_norm": 1.0428715944290161, "learning_rate": 0.0008159566517189836, "loss": 3.5698, "step": 21680 }, { "epoch": 1.4733659464601168, "grad_norm": 0.9803807139396667, "learning_rate": 0.0008159141867101509, "loss": 3.5456, "step": 21685 }, { "epoch": 1.4737056665307786, "grad_norm": 1.224067211151123, "learning_rate": 0.0008158717217013182, "loss": 3.5221, "step": 21690 }, { "epoch": 1.4740453866014405, "grad_norm": 1.6612730026245117, "learning_rate": 0.0008158292566924854, "loss": 3.7269, "step": 21695 }, { "epoch": 1.474385106672102, "grad_norm": 1.4501711130142212, "learning_rate": 0.0008157867916836526, "loss": 3.5573, "step": 21700 }, { "epoch": 1.474724826742764, "grad_norm": 1.1612762212753296, "learning_rate": 0.00081574432667482, "loss": 3.5434, "step": 21705 }, { "epoch": 1.4750645468134258, "grad_norm": 1.1859369277954102, "learning_rate": 0.0008157018616659872, "loss": 3.6459, "step": 21710 }, { "epoch": 1.4754042668840874, "grad_norm": 1.0834674835205078, "learning_rate": 0.0008156593966571545, "loss": 3.7501, "step": 21715 }, { "epoch": 1.4757439869547493, "grad_norm": 1.2172625064849854, "learning_rate": 0.0008156169316483219, "loss": 3.5994, "step": 21720 }, { "epoch": 1.4760837070254111, "grad_norm": 1.232134461402893, "learning_rate": 0.0008155744666394891, "loss": 3.5168, "step": 21725 }, { "epoch": 1.4764234270960728, "grad_norm": 0.9762646555900574, "learning_rate": 0.0008155320016306563, "loss": 3.5633, "step": 21730 }, { "epoch": 1.4767631471667346, "grad_norm": 1.216476321220398, "learning_rate": 0.0008154895366218237, "loss": 3.5673, "step": 21735 }, { "epoch": 1.4771028672373965, "grad_norm": 1.2705098390579224, "learning_rate": 0.0008154470716129909, "loss": 3.4976, "step": 21740 }, { "epoch": 1.477442587308058, "grad_norm": 1.3459237813949585, "learning_rate": 0.0008154046066041581, "loss": 3.4865, "step": 21745 }, { "epoch": 1.47778230737872, "grad_norm": 1.4700475931167603, "learning_rate": 0.0008153621415953256, "loss": 3.5585, "step": 21750 }, { "epoch": 1.4781220274493818, "grad_norm": 1.0697382688522339, "learning_rate": 0.0008153196765864928, "loss": 3.6339, "step": 21755 }, { "epoch": 1.4784617475200434, "grad_norm": 1.3217405080795288, "learning_rate": 0.00081527721157766, "loss": 3.6475, "step": 21760 }, { "epoch": 1.4788014675907053, "grad_norm": 1.3545054197311401, "learning_rate": 0.0008152347465688273, "loss": 3.9974, "step": 21765 }, { "epoch": 1.4791411876613672, "grad_norm": 1.1921753883361816, "learning_rate": 0.0008151922815599946, "loss": 3.5641, "step": 21770 }, { "epoch": 1.4794809077320288, "grad_norm": 1.2429115772247314, "learning_rate": 0.0008151498165511618, "loss": 3.563, "step": 21775 }, { "epoch": 1.4798206278026906, "grad_norm": 1.1901109218597412, "learning_rate": 0.0008151073515423291, "loss": 3.4194, "step": 21780 }, { "epoch": 1.4801603478733525, "grad_norm": 1.036932349205017, "learning_rate": 0.0008150648865334965, "loss": 3.7844, "step": 21785 }, { "epoch": 1.4805000679440141, "grad_norm": 1.4394645690917969, "learning_rate": 0.0008150224215246637, "loss": 3.6258, "step": 21790 }, { "epoch": 1.480839788014676, "grad_norm": 1.4070106744766235, "learning_rate": 0.000814979956515831, "loss": 3.8531, "step": 21795 }, { "epoch": 1.4811795080853376, "grad_norm": 1.4051578044891357, "learning_rate": 0.0008149374915069982, "loss": 3.5971, "step": 21800 }, { "epoch": 1.4815192281559995, "grad_norm": 0.894418478012085, "learning_rate": 0.0008148950264981655, "loss": 3.7966, "step": 21805 }, { "epoch": 1.4818589482266613, "grad_norm": 0.9260138273239136, "learning_rate": 0.0008148525614893328, "loss": 3.7343, "step": 21810 }, { "epoch": 1.482198668297323, "grad_norm": 1.4408066272735596, "learning_rate": 0.0008148100964805, "loss": 3.7915, "step": 21815 }, { "epoch": 1.4825383883679848, "grad_norm": 1.1741498708724976, "learning_rate": 0.0008147676314716674, "loss": 3.3434, "step": 21820 }, { "epoch": 1.4828781084386464, "grad_norm": 1.2646280527114868, "learning_rate": 0.0008147251664628347, "loss": 3.8862, "step": 21825 }, { "epoch": 1.4832178285093083, "grad_norm": 1.1981998682022095, "learning_rate": 0.0008146827014540019, "loss": 3.689, "step": 21830 }, { "epoch": 1.4835575485799701, "grad_norm": 1.3672101497650146, "learning_rate": 0.0008146402364451692, "loss": 3.7791, "step": 21835 }, { "epoch": 1.4838972686506318, "grad_norm": 1.1338402032852173, "learning_rate": 0.0008145977714363365, "loss": 3.5821, "step": 21840 }, { "epoch": 1.4842369887212936, "grad_norm": 1.1958779096603394, "learning_rate": 0.0008145553064275037, "loss": 3.5592, "step": 21845 }, { "epoch": 1.4845767087919555, "grad_norm": 1.4357374906539917, "learning_rate": 0.0008145128414186709, "loss": 3.4352, "step": 21850 }, { "epoch": 1.484916428862617, "grad_norm": 1.7839640378952026, "learning_rate": 0.0008144703764098384, "loss": 3.7772, "step": 21855 }, { "epoch": 1.485256148933279, "grad_norm": 1.4058414697647095, "learning_rate": 0.0008144279114010056, "loss": 3.5983, "step": 21860 }, { "epoch": 1.4855958690039408, "grad_norm": 1.2267005443572998, "learning_rate": 0.0008143854463921728, "loss": 3.7945, "step": 21865 }, { "epoch": 1.4859355890746024, "grad_norm": 1.5191218852996826, "learning_rate": 0.0008143429813833402, "loss": 3.4657, "step": 21870 }, { "epoch": 1.4862753091452643, "grad_norm": 1.2165732383728027, "learning_rate": 0.0008143005163745074, "loss": 3.4487, "step": 21875 }, { "epoch": 1.4866150292159261, "grad_norm": 1.3693114519119263, "learning_rate": 0.0008142580513656746, "loss": 3.6366, "step": 21880 }, { "epoch": 1.4869547492865878, "grad_norm": 1.3779596090316772, "learning_rate": 0.000814215586356842, "loss": 3.3553, "step": 21885 }, { "epoch": 1.4872944693572496, "grad_norm": 1.334214210510254, "learning_rate": 0.0008141731213480093, "loss": 3.369, "step": 21890 }, { "epoch": 1.4876341894279115, "grad_norm": 1.1611868143081665, "learning_rate": 0.0008141306563391765, "loss": 3.6309, "step": 21895 }, { "epoch": 1.487973909498573, "grad_norm": 1.545289158821106, "learning_rate": 0.0008140881913303439, "loss": 3.4096, "step": 21900 }, { "epoch": 1.488313629569235, "grad_norm": 1.536049485206604, "learning_rate": 0.0008140457263215111, "loss": 3.6059, "step": 21905 }, { "epoch": 1.4886533496398968, "grad_norm": 1.1154600381851196, "learning_rate": 0.0008140032613126783, "loss": 3.2585, "step": 21910 }, { "epoch": 1.4889930697105584, "grad_norm": 1.574630618095398, "learning_rate": 0.0008139607963038456, "loss": 3.455, "step": 21915 }, { "epoch": 1.4893327897812203, "grad_norm": 1.581252932548523, "learning_rate": 0.0008139183312950129, "loss": 3.4766, "step": 21920 }, { "epoch": 1.4896725098518822, "grad_norm": 1.5329824686050415, "learning_rate": 0.0008138758662861802, "loss": 3.3775, "step": 21925 }, { "epoch": 1.4900122299225438, "grad_norm": 1.3632237911224365, "learning_rate": 0.0008138334012773475, "loss": 3.7469, "step": 21930 }, { "epoch": 1.4903519499932056, "grad_norm": 0.9411647915840149, "learning_rate": 0.0008137909362685148, "loss": 3.6401, "step": 21935 }, { "epoch": 1.4906916700638675, "grad_norm": 1.246442437171936, "learning_rate": 0.000813748471259682, "loss": 3.7077, "step": 21940 }, { "epoch": 1.4910313901345291, "grad_norm": 1.1933437585830688, "learning_rate": 0.0008137060062508493, "loss": 3.5354, "step": 21945 }, { "epoch": 1.491371110205191, "grad_norm": 1.4961117506027222, "learning_rate": 0.0008136635412420165, "loss": 3.7128, "step": 21950 }, { "epoch": 1.4917108302758528, "grad_norm": 1.1888870000839233, "learning_rate": 0.0008136210762331838, "loss": 3.5921, "step": 21955 }, { "epoch": 1.4920505503465145, "grad_norm": 1.0914280414581299, "learning_rate": 0.0008135786112243512, "loss": 3.5306, "step": 21960 }, { "epoch": 1.4923902704171763, "grad_norm": 1.3475760221481323, "learning_rate": 0.0008135361462155184, "loss": 3.4743, "step": 21965 }, { "epoch": 1.492729990487838, "grad_norm": 1.4352227449417114, "learning_rate": 0.0008134936812066857, "loss": 3.4786, "step": 21970 }, { "epoch": 1.4930697105584998, "grad_norm": 1.5595706701278687, "learning_rate": 0.000813451216197853, "loss": 3.7437, "step": 21975 }, { "epoch": 1.4934094306291616, "grad_norm": 1.256055474281311, "learning_rate": 0.0008134087511890202, "loss": 3.6457, "step": 21980 }, { "epoch": 1.4937491506998233, "grad_norm": 1.6612766981124878, "learning_rate": 0.0008133662861801876, "loss": 3.2986, "step": 21985 }, { "epoch": 1.4940888707704851, "grad_norm": 1.0520174503326416, "learning_rate": 0.0008133238211713549, "loss": 3.47, "step": 21990 }, { "epoch": 1.4944285908411468, "grad_norm": 1.1552717685699463, "learning_rate": 0.0008132813561625221, "loss": 3.6233, "step": 21995 }, { "epoch": 1.4947683109118086, "grad_norm": 1.1479976177215576, "learning_rate": 0.0008132388911536895, "loss": 3.6761, "step": 22000 }, { "epoch": 1.4951080309824705, "grad_norm": 1.5431290864944458, "learning_rate": 0.0008131964261448567, "loss": 3.7492, "step": 22005 }, { "epoch": 1.495447751053132, "grad_norm": 1.5675028562545776, "learning_rate": 0.0008131539611360239, "loss": 3.5314, "step": 22010 }, { "epoch": 1.495787471123794, "grad_norm": 1.3080723285675049, "learning_rate": 0.0008131114961271912, "loss": 3.6551, "step": 22015 }, { "epoch": 1.4961271911944558, "grad_norm": 1.6528934240341187, "learning_rate": 0.0008130690311183585, "loss": 3.6818, "step": 22020 }, { "epoch": 1.4964669112651174, "grad_norm": 1.2556277513504028, "learning_rate": 0.0008130265661095258, "loss": 3.5728, "step": 22025 }, { "epoch": 1.4968066313357793, "grad_norm": 1.7991598844528198, "learning_rate": 0.0008129841011006931, "loss": 3.3321, "step": 22030 }, { "epoch": 1.4971463514064411, "grad_norm": 1.4006946086883545, "learning_rate": 0.0008129416360918604, "loss": 3.8317, "step": 22035 }, { "epoch": 1.4974860714771028, "grad_norm": 1.2326018810272217, "learning_rate": 0.0008128991710830276, "loss": 3.5118, "step": 22040 }, { "epoch": 1.4978257915477646, "grad_norm": 1.5764042139053345, "learning_rate": 0.0008128567060741949, "loss": 3.6289, "step": 22045 }, { "epoch": 1.4981655116184265, "grad_norm": 1.2781339883804321, "learning_rate": 0.0008128142410653621, "loss": 3.5859, "step": 22050 }, { "epoch": 1.4985052316890881, "grad_norm": 1.2254722118377686, "learning_rate": 0.0008127717760565294, "loss": 3.4396, "step": 22055 }, { "epoch": 1.49884495175975, "grad_norm": 1.0915368795394897, "learning_rate": 0.0008127293110476968, "loss": 3.7041, "step": 22060 }, { "epoch": 1.4991846718304118, "grad_norm": 1.1119288206100464, "learning_rate": 0.000812686846038864, "loss": 3.9106, "step": 22065 }, { "epoch": 1.4995243919010735, "grad_norm": 1.2748817205429077, "learning_rate": 0.0008126443810300313, "loss": 3.4243, "step": 22070 }, { "epoch": 1.4998641119717353, "grad_norm": 1.3209283351898193, "learning_rate": 0.0008126019160211986, "loss": 3.668, "step": 22075 }, { "epoch": 1.5002038320423972, "grad_norm": 1.3676775693893433, "learning_rate": 0.0008125594510123658, "loss": 3.7698, "step": 22080 }, { "epoch": 1.5005435521130588, "grad_norm": 1.1626230478286743, "learning_rate": 0.000812516986003533, "loss": 3.5069, "step": 22085 }, { "epoch": 1.5008832721837206, "grad_norm": 1.790891408920288, "learning_rate": 0.0008124745209947004, "loss": 3.5299, "step": 22090 }, { "epoch": 1.5012229922543825, "grad_norm": 1.534904956817627, "learning_rate": 0.0008124320559858677, "loss": 3.5935, "step": 22095 }, { "epoch": 1.5015627123250441, "grad_norm": 1.2645468711853027, "learning_rate": 0.0008123895909770349, "loss": 3.3715, "step": 22100 }, { "epoch": 1.501902432395706, "grad_norm": 1.3027653694152832, "learning_rate": 0.0008123471259682023, "loss": 3.6783, "step": 22105 }, { "epoch": 1.5022421524663678, "grad_norm": 1.109789252281189, "learning_rate": 0.0008123046609593695, "loss": 3.7867, "step": 22110 }, { "epoch": 1.5025818725370295, "grad_norm": 1.2821261882781982, "learning_rate": 0.0008122621959505367, "loss": 3.6411, "step": 22115 }, { "epoch": 1.5029215926076913, "grad_norm": 1.262097954750061, "learning_rate": 0.0008122197309417041, "loss": 3.5104, "step": 22120 }, { "epoch": 1.5032613126783532, "grad_norm": 1.2244715690612793, "learning_rate": 0.0008121772659328713, "loss": 3.5485, "step": 22125 }, { "epoch": 1.5036010327490148, "grad_norm": 1.1284165382385254, "learning_rate": 0.0008121348009240386, "loss": 3.4124, "step": 22130 }, { "epoch": 1.5039407528196764, "grad_norm": 1.5318681001663208, "learning_rate": 0.000812092335915206, "loss": 3.6334, "step": 22135 }, { "epoch": 1.5042804728903385, "grad_norm": 1.436610221862793, "learning_rate": 0.0008120498709063732, "loss": 3.639, "step": 22140 }, { "epoch": 1.5046201929610001, "grad_norm": 1.5357285737991333, "learning_rate": 0.0008120074058975404, "loss": 3.8114, "step": 22145 }, { "epoch": 1.5049599130316618, "grad_norm": 1.5086508989334106, "learning_rate": 0.0008119649408887077, "loss": 3.796, "step": 22150 }, { "epoch": 1.5052996331023238, "grad_norm": 1.2217419147491455, "learning_rate": 0.000811922475879875, "loss": 3.5523, "step": 22155 }, { "epoch": 1.5056393531729855, "grad_norm": 1.564122200012207, "learning_rate": 0.0008118800108710422, "loss": 3.6736, "step": 22160 }, { "epoch": 1.505979073243647, "grad_norm": 1.4854644536972046, "learning_rate": 0.0008118375458622096, "loss": 3.7477, "step": 22165 }, { "epoch": 1.506318793314309, "grad_norm": 1.254192590713501, "learning_rate": 0.0008117950808533769, "loss": 3.4018, "step": 22170 }, { "epoch": 1.5066585133849708, "grad_norm": 1.6836023330688477, "learning_rate": 0.0008117526158445441, "loss": 3.54, "step": 22175 }, { "epoch": 1.5069982334556324, "grad_norm": 1.2354227304458618, "learning_rate": 0.0008117101508357114, "loss": 3.5431, "step": 22180 }, { "epoch": 1.5073379535262943, "grad_norm": 1.1997119188308716, "learning_rate": 0.0008116676858268787, "loss": 3.695, "step": 22185 }, { "epoch": 1.5076776735969561, "grad_norm": 1.4102555513381958, "learning_rate": 0.0008116252208180459, "loss": 3.3595, "step": 22190 }, { "epoch": 1.5080173936676178, "grad_norm": 1.3159911632537842, "learning_rate": 0.0008115827558092132, "loss": 3.7263, "step": 22195 }, { "epoch": 1.5083571137382796, "grad_norm": 1.1120002269744873, "learning_rate": 0.0008115402908003805, "loss": 3.6058, "step": 22200 }, { "epoch": 1.5086968338089415, "grad_norm": 1.5766525268554688, "learning_rate": 0.0008114978257915478, "loss": 3.745, "step": 22205 }, { "epoch": 1.5090365538796031, "grad_norm": 1.470405101776123, "learning_rate": 0.0008114553607827151, "loss": 3.5878, "step": 22210 }, { "epoch": 1.509376273950265, "grad_norm": 1.0001336336135864, "learning_rate": 0.0008114128957738823, "loss": 3.6824, "step": 22215 }, { "epoch": 1.5097159940209268, "grad_norm": 1.1731386184692383, "learning_rate": 0.0008113704307650496, "loss": 3.4799, "step": 22220 }, { "epoch": 1.5100557140915885, "grad_norm": 1.3896983861923218, "learning_rate": 0.0008113279657562169, "loss": 3.5524, "step": 22225 }, { "epoch": 1.5103954341622503, "grad_norm": 1.6133008003234863, "learning_rate": 0.0008112855007473841, "loss": 3.4715, "step": 22230 }, { "epoch": 1.5107351542329122, "grad_norm": 1.2378860712051392, "learning_rate": 0.0008112430357385515, "loss": 3.4524, "step": 22235 }, { "epoch": 1.5110748743035738, "grad_norm": 1.1567316055297852, "learning_rate": 0.0008112005707297188, "loss": 3.6801, "step": 22240 }, { "epoch": 1.5114145943742356, "grad_norm": 1.2322652339935303, "learning_rate": 0.000811158105720886, "loss": 3.4612, "step": 22245 }, { "epoch": 1.5117543144448975, "grad_norm": 1.2793523073196411, "learning_rate": 0.0008111156407120532, "loss": 3.689, "step": 22250 }, { "epoch": 1.5120940345155591, "grad_norm": 1.3955837488174438, "learning_rate": 0.0008110731757032206, "loss": 3.7024, "step": 22255 }, { "epoch": 1.512433754586221, "grad_norm": 1.1466336250305176, "learning_rate": 0.0008110307106943878, "loss": 3.4298, "step": 22260 }, { "epoch": 1.5127734746568828, "grad_norm": 0.9798950552940369, "learning_rate": 0.000810988245685555, "loss": 3.5673, "step": 22265 }, { "epoch": 1.5131131947275445, "grad_norm": 1.200825810432434, "learning_rate": 0.0008109457806767225, "loss": 3.5861, "step": 22270 }, { "epoch": 1.5134529147982063, "grad_norm": 1.162855863571167, "learning_rate": 0.0008109033156678897, "loss": 3.3821, "step": 22275 }, { "epoch": 1.5137926348688682, "grad_norm": 2.0936279296875, "learning_rate": 0.0008108608506590569, "loss": 3.4329, "step": 22280 }, { "epoch": 1.5141323549395298, "grad_norm": 1.3141642808914185, "learning_rate": 0.0008108183856502243, "loss": 3.7163, "step": 22285 }, { "epoch": 1.5144720750101917, "grad_norm": 1.1984156370162964, "learning_rate": 0.0008107759206413915, "loss": 3.6053, "step": 22290 }, { "epoch": 1.5148117950808535, "grad_norm": 1.5997724533081055, "learning_rate": 0.0008107334556325587, "loss": 3.4476, "step": 22295 }, { "epoch": 1.5151515151515151, "grad_norm": 1.2436416149139404, "learning_rate": 0.000810690990623726, "loss": 3.6315, "step": 22300 }, { "epoch": 1.5154912352221768, "grad_norm": 1.0314195156097412, "learning_rate": 0.0008106485256148934, "loss": 3.812, "step": 22305 }, { "epoch": 1.5158309552928388, "grad_norm": 1.4282503128051758, "learning_rate": 0.0008106060606060606, "loss": 3.5905, "step": 22310 }, { "epoch": 1.5161706753635005, "grad_norm": 1.3202416896820068, "learning_rate": 0.0008105635955972279, "loss": 3.7043, "step": 22315 }, { "epoch": 1.516510395434162, "grad_norm": 1.17867112159729, "learning_rate": 0.0008105211305883952, "loss": 3.6199, "step": 22320 }, { "epoch": 1.5168501155048242, "grad_norm": 1.4976178407669067, "learning_rate": 0.0008104786655795624, "loss": 3.6135, "step": 22325 }, { "epoch": 1.5171898355754858, "grad_norm": 1.3449647426605225, "learning_rate": 0.0008104362005707297, "loss": 3.5776, "step": 22330 }, { "epoch": 1.5175295556461474, "grad_norm": 1.020971417427063, "learning_rate": 0.000810393735561897, "loss": 3.6219, "step": 22335 }, { "epoch": 1.5178692757168093, "grad_norm": 1.4237109422683716, "learning_rate": 0.0008103512705530644, "loss": 3.5549, "step": 22340 }, { "epoch": 1.5182089957874711, "grad_norm": 1.4017924070358276, "learning_rate": 0.0008103088055442316, "loss": 3.57, "step": 22345 }, { "epoch": 1.5185487158581328, "grad_norm": 1.2406572103500366, "learning_rate": 0.0008102663405353988, "loss": 3.5216, "step": 22350 }, { "epoch": 1.5188884359287946, "grad_norm": 1.3110032081604004, "learning_rate": 0.0008102238755265662, "loss": 3.4947, "step": 22355 }, { "epoch": 1.5192281559994565, "grad_norm": 1.0229239463806152, "learning_rate": 0.0008101814105177334, "loss": 3.6957, "step": 22360 }, { "epoch": 1.5195678760701181, "grad_norm": 1.8527090549468994, "learning_rate": 0.0008101389455089006, "loss": 3.406, "step": 22365 }, { "epoch": 1.51990759614078, "grad_norm": 1.2035632133483887, "learning_rate": 0.000810096480500068, "loss": 3.4449, "step": 22370 }, { "epoch": 1.5202473162114418, "grad_norm": 1.2449145317077637, "learning_rate": 0.0008100540154912353, "loss": 3.7389, "step": 22375 }, { "epoch": 1.5205870362821035, "grad_norm": 1.1115539073944092, "learning_rate": 0.0008100115504824025, "loss": 3.6861, "step": 22380 }, { "epoch": 1.5209267563527653, "grad_norm": 1.093229055404663, "learning_rate": 0.0008099690854735699, "loss": 3.6211, "step": 22385 }, { "epoch": 1.5212664764234272, "grad_norm": 1.3188869953155518, "learning_rate": 0.0008099266204647371, "loss": 3.64, "step": 22390 }, { "epoch": 1.5216061964940888, "grad_norm": 1.6953117847442627, "learning_rate": 0.0008098841554559043, "loss": 3.6924, "step": 22395 }, { "epoch": 1.5219459165647506, "grad_norm": 1.4610689878463745, "learning_rate": 0.0008098416904470716, "loss": 3.8022, "step": 22400 }, { "epoch": 1.5222856366354125, "grad_norm": 1.62521231174469, "learning_rate": 0.0008097992254382389, "loss": 3.637, "step": 22405 }, { "epoch": 1.5226253567060741, "grad_norm": 1.1948636770248413, "learning_rate": 0.0008097567604294062, "loss": 3.8246, "step": 22410 }, { "epoch": 1.522965076776736, "grad_norm": 1.1003972291946411, "learning_rate": 0.0008097142954205735, "loss": 3.6554, "step": 22415 }, { "epoch": 1.5233047968473978, "grad_norm": 1.3436939716339111, "learning_rate": 0.0008096718304117408, "loss": 3.6331, "step": 22420 }, { "epoch": 1.5236445169180595, "grad_norm": 0.9319984912872314, "learning_rate": 0.000809629365402908, "loss": 3.7299, "step": 22425 }, { "epoch": 1.5239842369887213, "grad_norm": 1.187792420387268, "learning_rate": 0.0008095869003940753, "loss": 3.3395, "step": 22430 }, { "epoch": 1.5243239570593832, "grad_norm": 1.4113562107086182, "learning_rate": 0.0008095444353852425, "loss": 3.4964, "step": 22435 }, { "epoch": 1.5246636771300448, "grad_norm": 1.3560389280319214, "learning_rate": 0.0008095019703764098, "loss": 3.5964, "step": 22440 }, { "epoch": 1.5250033972007067, "grad_norm": 1.3721368312835693, "learning_rate": 0.0008094595053675772, "loss": 3.6337, "step": 22445 }, { "epoch": 1.5253431172713685, "grad_norm": 1.6911921501159668, "learning_rate": 0.0008094170403587444, "loss": 3.4261, "step": 22450 }, { "epoch": 1.5256828373420301, "grad_norm": 1.2763967514038086, "learning_rate": 0.0008093745753499117, "loss": 3.5934, "step": 22455 }, { "epoch": 1.526022557412692, "grad_norm": 1.4034963846206665, "learning_rate": 0.000809332110341079, "loss": 3.7027, "step": 22460 }, { "epoch": 1.5263622774833538, "grad_norm": 1.0065054893493652, "learning_rate": 0.0008092896453322462, "loss": 3.654, "step": 22465 }, { "epoch": 1.5267019975540155, "grad_norm": 1.086043357849121, "learning_rate": 0.0008092471803234135, "loss": 3.5708, "step": 22470 }, { "epoch": 1.527041717624677, "grad_norm": 1.2649028301239014, "learning_rate": 0.0008092047153145808, "loss": 3.8737, "step": 22475 }, { "epoch": 1.5273814376953392, "grad_norm": 1.3312736749649048, "learning_rate": 0.0008091622503057481, "loss": 3.6435, "step": 22480 }, { "epoch": 1.5277211577660008, "grad_norm": 1.0210615396499634, "learning_rate": 0.0008091197852969153, "loss": 3.7733, "step": 22485 }, { "epoch": 1.5280608778366624, "grad_norm": 1.5060818195343018, "learning_rate": 0.0008090773202880827, "loss": 3.7138, "step": 22490 }, { "epoch": 1.5284005979073245, "grad_norm": 1.632001280784607, "learning_rate": 0.0008090348552792499, "loss": 3.5611, "step": 22495 }, { "epoch": 1.5287403179779862, "grad_norm": 1.6520994901657104, "learning_rate": 0.0008089923902704171, "loss": 3.6404, "step": 22500 }, { "epoch": 1.5290800380486478, "grad_norm": 1.1957415342330933, "learning_rate": 0.0008089499252615845, "loss": 3.383, "step": 22505 }, { "epoch": 1.5294197581193096, "grad_norm": 1.1497372388839722, "learning_rate": 0.0008089074602527517, "loss": 3.5598, "step": 22510 }, { "epoch": 1.5297594781899715, "grad_norm": 1.1340280771255493, "learning_rate": 0.000808864995243919, "loss": 3.371, "step": 22515 }, { "epoch": 1.5300991982606331, "grad_norm": 1.532944917678833, "learning_rate": 0.0008088225302350864, "loss": 3.315, "step": 22520 }, { "epoch": 1.530438918331295, "grad_norm": 1.102677345275879, "learning_rate": 0.0008087800652262536, "loss": 3.7437, "step": 22525 }, { "epoch": 1.5307786384019568, "grad_norm": 1.1409943103790283, "learning_rate": 0.0008087376002174208, "loss": 3.5872, "step": 22530 }, { "epoch": 1.5311183584726185, "grad_norm": 1.192995548248291, "learning_rate": 0.0008086951352085881, "loss": 3.7765, "step": 22535 }, { "epoch": 1.5314580785432803, "grad_norm": 1.4076792001724243, "learning_rate": 0.0008086526701997554, "loss": 3.5696, "step": 22540 }, { "epoch": 1.5317977986139422, "grad_norm": 1.0266146659851074, "learning_rate": 0.0008086102051909226, "loss": 3.5761, "step": 22545 }, { "epoch": 1.5321375186846038, "grad_norm": 1.7376453876495361, "learning_rate": 0.00080856774018209, "loss": 3.5852, "step": 22550 }, { "epoch": 1.5324772387552656, "grad_norm": 1.1994174718856812, "learning_rate": 0.0008085252751732573, "loss": 3.7096, "step": 22555 }, { "epoch": 1.5328169588259275, "grad_norm": 1.1529725790023804, "learning_rate": 0.0008084828101644245, "loss": 3.591, "step": 22560 }, { "epoch": 1.5331566788965891, "grad_norm": 1.6908351182937622, "learning_rate": 0.0008084403451555918, "loss": 3.5217, "step": 22565 }, { "epoch": 1.533496398967251, "grad_norm": 1.2590796947479248, "learning_rate": 0.0008083978801467591, "loss": 3.7914, "step": 22570 }, { "epoch": 1.5338361190379128, "grad_norm": 1.1937522888183594, "learning_rate": 0.0008083554151379263, "loss": 3.5312, "step": 22575 }, { "epoch": 1.5341758391085745, "grad_norm": 1.5171705484390259, "learning_rate": 0.0008083129501290937, "loss": 3.5501, "step": 22580 }, { "epoch": 1.5345155591792363, "grad_norm": 1.6651036739349365, "learning_rate": 0.000808270485120261, "loss": 3.7489, "step": 22585 }, { "epoch": 1.5348552792498982, "grad_norm": 1.5665321350097656, "learning_rate": 0.0008082280201114282, "loss": 3.9858, "step": 22590 }, { "epoch": 1.5351949993205598, "grad_norm": 1.5444412231445312, "learning_rate": 0.0008081855551025955, "loss": 3.917, "step": 22595 }, { "epoch": 1.5355347193912217, "grad_norm": 1.2785003185272217, "learning_rate": 0.0008081430900937627, "loss": 3.5897, "step": 22600 }, { "epoch": 1.5358744394618835, "grad_norm": 1.0684908628463745, "learning_rate": 0.00080810062508493, "loss": 3.5938, "step": 22605 }, { "epoch": 1.5362141595325451, "grad_norm": 1.5551729202270508, "learning_rate": 0.0008080581600760973, "loss": 3.5775, "step": 22610 }, { "epoch": 1.536553879603207, "grad_norm": 1.1253442764282227, "learning_rate": 0.0008080156950672646, "loss": 3.9429, "step": 22615 }, { "epoch": 1.5368935996738688, "grad_norm": 1.5806562900543213, "learning_rate": 0.0008079732300584319, "loss": 3.6228, "step": 22620 }, { "epoch": 1.5372333197445305, "grad_norm": 1.1379916667938232, "learning_rate": 0.0008079307650495992, "loss": 3.5787, "step": 22625 }, { "epoch": 1.5375730398151923, "grad_norm": 1.1610219478607178, "learning_rate": 0.0008078883000407664, "loss": 3.6244, "step": 22630 }, { "epoch": 1.5379127598858542, "grad_norm": 1.2174487113952637, "learning_rate": 0.0008078458350319336, "loss": 3.7281, "step": 22635 }, { "epoch": 1.5382524799565158, "grad_norm": 1.3164175748825073, "learning_rate": 0.000807803370023101, "loss": 3.5547, "step": 22640 }, { "epoch": 1.5385922000271774, "grad_norm": 1.2714073657989502, "learning_rate": 0.0008077609050142682, "loss": 3.5608, "step": 22645 }, { "epoch": 1.5389319200978395, "grad_norm": 1.4458556175231934, "learning_rate": 0.0008077184400054355, "loss": 3.6858, "step": 22650 }, { "epoch": 1.5392716401685012, "grad_norm": 1.2930186986923218, "learning_rate": 0.0008076759749966029, "loss": 3.4783, "step": 22655 }, { "epoch": 1.5396113602391628, "grad_norm": 1.1344150304794312, "learning_rate": 0.0008076335099877701, "loss": 3.6193, "step": 22660 }, { "epoch": 1.5399510803098249, "grad_norm": 1.2900755405426025, "learning_rate": 0.0008075910449789373, "loss": 3.436, "step": 22665 }, { "epoch": 1.5402908003804865, "grad_norm": 1.0281996726989746, "learning_rate": 0.0008075485799701047, "loss": 3.5201, "step": 22670 }, { "epoch": 1.5406305204511481, "grad_norm": 1.209721565246582, "learning_rate": 0.0008075061149612719, "loss": 3.6338, "step": 22675 }, { "epoch": 1.54097024052181, "grad_norm": 1.205194115638733, "learning_rate": 0.0008074636499524392, "loss": 3.5718, "step": 22680 }, { "epoch": 1.5413099605924718, "grad_norm": 1.1798079013824463, "learning_rate": 0.0008074211849436066, "loss": 3.7411, "step": 22685 }, { "epoch": 1.5416496806631335, "grad_norm": 1.3207451105117798, "learning_rate": 0.0008073787199347738, "loss": 3.2954, "step": 22690 }, { "epoch": 1.5419894007337953, "grad_norm": 1.2711255550384521, "learning_rate": 0.0008073362549259411, "loss": 3.757, "step": 22695 }, { "epoch": 1.5423291208044572, "grad_norm": 1.4318599700927734, "learning_rate": 0.0008072937899171083, "loss": 3.5241, "step": 22700 }, { "epoch": 1.5426688408751188, "grad_norm": 0.8431128263473511, "learning_rate": 0.0008072513249082756, "loss": 3.5927, "step": 22705 }, { "epoch": 1.5430085609457806, "grad_norm": 1.525837779045105, "learning_rate": 0.0008072088598994429, "loss": 3.5237, "step": 22710 }, { "epoch": 1.5433482810164425, "grad_norm": 1.4766762256622314, "learning_rate": 0.0008071663948906101, "loss": 3.6149, "step": 22715 }, { "epoch": 1.5436880010871041, "grad_norm": 1.2465651035308838, "learning_rate": 0.0008071239298817775, "loss": 3.3208, "step": 22720 }, { "epoch": 1.544027721157766, "grad_norm": 1.3884135484695435, "learning_rate": 0.0008070814648729448, "loss": 3.6642, "step": 22725 }, { "epoch": 1.5443674412284278, "grad_norm": 1.6741777658462524, "learning_rate": 0.000807038999864112, "loss": 3.782, "step": 22730 }, { "epoch": 1.5447071612990895, "grad_norm": 1.0582441091537476, "learning_rate": 0.0008069965348552792, "loss": 3.614, "step": 22735 }, { "epoch": 1.5450468813697513, "grad_norm": 0.9632676839828491, "learning_rate": 0.0008069540698464466, "loss": 3.5567, "step": 22740 }, { "epoch": 1.5453866014404132, "grad_norm": 1.254266381263733, "learning_rate": 0.0008069116048376138, "loss": 3.5873, "step": 22745 }, { "epoch": 1.5457263215110748, "grad_norm": 1.1307737827301025, "learning_rate": 0.000806869139828781, "loss": 3.7528, "step": 22750 }, { "epoch": 1.5460660415817367, "grad_norm": 1.261049747467041, "learning_rate": 0.0008068266748199485, "loss": 3.5355, "step": 22755 }, { "epoch": 1.5464057616523985, "grad_norm": 1.4563586711883545, "learning_rate": 0.0008067842098111157, "loss": 3.4978, "step": 22760 }, { "epoch": 1.5467454817230601, "grad_norm": 1.1471030712127686, "learning_rate": 0.0008067417448022829, "loss": 3.5116, "step": 22765 }, { "epoch": 1.547085201793722, "grad_norm": 1.419179081916809, "learning_rate": 0.0008066992797934503, "loss": 3.5083, "step": 22770 }, { "epoch": 1.5474249218643839, "grad_norm": 1.378209114074707, "learning_rate": 0.0008066568147846175, "loss": 3.7799, "step": 22775 }, { "epoch": 1.5477646419350455, "grad_norm": 1.5530263185501099, "learning_rate": 0.0008066143497757847, "loss": 3.5323, "step": 22780 }, { "epoch": 1.5481043620057073, "grad_norm": 1.402791976928711, "learning_rate": 0.000806571884766952, "loss": 3.7353, "step": 22785 }, { "epoch": 1.5484440820763692, "grad_norm": 1.394650936126709, "learning_rate": 0.0008065294197581194, "loss": 3.6175, "step": 22790 }, { "epoch": 1.5487838021470308, "grad_norm": 1.5817092657089233, "learning_rate": 0.0008064869547492866, "loss": 3.86, "step": 22795 }, { "epoch": 1.5491235222176927, "grad_norm": 1.1515130996704102, "learning_rate": 0.0008064444897404539, "loss": 3.2909, "step": 22800 }, { "epoch": 1.5494632422883545, "grad_norm": 1.4163291454315186, "learning_rate": 0.0008064020247316212, "loss": 3.3459, "step": 22805 }, { "epoch": 1.5498029623590162, "grad_norm": 1.411190152168274, "learning_rate": 0.0008063595597227884, "loss": 3.4396, "step": 22810 }, { "epoch": 1.5501426824296778, "grad_norm": 1.2666178941726685, "learning_rate": 0.0008063170947139557, "loss": 3.7338, "step": 22815 }, { "epoch": 1.5504824025003399, "grad_norm": 0.9279419779777527, "learning_rate": 0.000806274629705123, "loss": 3.5167, "step": 22820 }, { "epoch": 1.5508221225710015, "grad_norm": 1.4964802265167236, "learning_rate": 0.0008062321646962903, "loss": 3.5463, "step": 22825 }, { "epoch": 1.5511618426416631, "grad_norm": 1.560472011566162, "learning_rate": 0.0008061896996874576, "loss": 3.8358, "step": 22830 }, { "epoch": 1.5515015627123252, "grad_norm": 1.3112486600875854, "learning_rate": 0.0008061472346786248, "loss": 3.7288, "step": 22835 }, { "epoch": 1.5518412827829868, "grad_norm": 1.2037197351455688, "learning_rate": 0.0008061047696697921, "loss": 3.6838, "step": 22840 }, { "epoch": 1.5521810028536485, "grad_norm": 1.4134389162063599, "learning_rate": 0.0008060623046609594, "loss": 3.696, "step": 22845 }, { "epoch": 1.5525207229243103, "grad_norm": 1.1354684829711914, "learning_rate": 0.0008060198396521266, "loss": 3.5687, "step": 22850 }, { "epoch": 1.5528604429949722, "grad_norm": 1.4831956624984741, "learning_rate": 0.0008059773746432939, "loss": 3.4035, "step": 22855 }, { "epoch": 1.5532001630656338, "grad_norm": 1.3167918920516968, "learning_rate": 0.0008059349096344613, "loss": 3.6337, "step": 22860 }, { "epoch": 1.5535398831362957, "grad_norm": 1.3154926300048828, "learning_rate": 0.0008058924446256285, "loss": 3.663, "step": 22865 }, { "epoch": 1.5538796032069575, "grad_norm": 1.4685992002487183, "learning_rate": 0.0008058499796167958, "loss": 3.523, "step": 22870 }, { "epoch": 1.5542193232776191, "grad_norm": 1.1567156314849854, "learning_rate": 0.0008058075146079631, "loss": 3.6565, "step": 22875 }, { "epoch": 1.554559043348281, "grad_norm": 0.9947147965431213, "learning_rate": 0.0008057650495991303, "loss": 3.6656, "step": 22880 }, { "epoch": 1.5548987634189428, "grad_norm": 1.4266517162322998, "learning_rate": 0.0008057225845902975, "loss": 3.629, "step": 22885 }, { "epoch": 1.5552384834896045, "grad_norm": 1.794588565826416, "learning_rate": 0.0008056801195814649, "loss": 3.6729, "step": 22890 }, { "epoch": 1.5555782035602663, "grad_norm": 1.4221376180648804, "learning_rate": 0.0008056376545726322, "loss": 3.715, "step": 22895 }, { "epoch": 1.5559179236309282, "grad_norm": 2.4475362300872803, "learning_rate": 0.0008055951895637994, "loss": 3.5129, "step": 22900 }, { "epoch": 1.5562576437015898, "grad_norm": 1.062941551208496, "learning_rate": 0.0008055527245549668, "loss": 3.6441, "step": 22905 }, { "epoch": 1.5565973637722517, "grad_norm": 1.2358840703964233, "learning_rate": 0.000805510259546134, "loss": 3.6474, "step": 22910 }, { "epoch": 1.5569370838429135, "grad_norm": 1.2501533031463623, "learning_rate": 0.0008054677945373012, "loss": 3.705, "step": 22915 }, { "epoch": 1.5572768039135751, "grad_norm": 1.2161157131195068, "learning_rate": 0.0008054253295284686, "loss": 3.5851, "step": 22920 }, { "epoch": 1.557616523984237, "grad_norm": 1.2693897485733032, "learning_rate": 0.0008053828645196358, "loss": 3.6864, "step": 22925 }, { "epoch": 1.5579562440548989, "grad_norm": 1.177846074104309, "learning_rate": 0.0008053403995108031, "loss": 3.5087, "step": 22930 }, { "epoch": 1.5582959641255605, "grad_norm": 1.4680665731430054, "learning_rate": 0.0008052979345019704, "loss": 3.5321, "step": 22935 }, { "epoch": 1.5586356841962223, "grad_norm": 1.0168124437332153, "learning_rate": 0.0008052554694931377, "loss": 3.664, "step": 22940 }, { "epoch": 1.5589754042668842, "grad_norm": 1.3887529373168945, "learning_rate": 0.0008052130044843049, "loss": 3.62, "step": 22945 }, { "epoch": 1.5593151243375458, "grad_norm": 1.0376441478729248, "learning_rate": 0.0008051705394754722, "loss": 3.5179, "step": 22950 }, { "epoch": 1.5596548444082077, "grad_norm": 1.145410180091858, "learning_rate": 0.0008051280744666395, "loss": 3.5621, "step": 22955 }, { "epoch": 1.5599945644788695, "grad_norm": 3.072444438934326, "learning_rate": 0.0008050856094578067, "loss": 3.6098, "step": 22960 }, { "epoch": 1.5603342845495312, "grad_norm": 1.8168396949768066, "learning_rate": 0.0008050431444489741, "loss": 3.6555, "step": 22965 }, { "epoch": 1.560674004620193, "grad_norm": 1.4360228776931763, "learning_rate": 0.0008050006794401414, "loss": 3.7559, "step": 22970 }, { "epoch": 1.5610137246908549, "grad_norm": 1.328173279762268, "learning_rate": 0.0008049582144313086, "loss": 3.4066, "step": 22975 }, { "epoch": 1.5613534447615165, "grad_norm": 1.4142612218856812, "learning_rate": 0.0008049157494224759, "loss": 3.4131, "step": 22980 }, { "epoch": 1.5616931648321781, "grad_norm": 1.4907419681549072, "learning_rate": 0.0008048732844136431, "loss": 3.4948, "step": 22985 }, { "epoch": 1.5620328849028402, "grad_norm": 1.4368531703948975, "learning_rate": 0.0008048308194048104, "loss": 3.7957, "step": 22990 }, { "epoch": 1.5623726049735018, "grad_norm": 1.0585213899612427, "learning_rate": 0.0008047883543959777, "loss": 3.4681, "step": 22995 }, { "epoch": 1.5627123250441635, "grad_norm": 1.2771497964859009, "learning_rate": 0.000804745889387145, "loss": 3.5139, "step": 23000 }, { "epoch": 1.5630520451148255, "grad_norm": 1.3524712324142456, "learning_rate": 0.0008047034243783123, "loss": 3.5233, "step": 23005 }, { "epoch": 1.5633917651854872, "grad_norm": 1.0831351280212402, "learning_rate": 0.0008046609593694796, "loss": 3.4339, "step": 23010 }, { "epoch": 1.5637314852561488, "grad_norm": 1.4909963607788086, "learning_rate": 0.0008046184943606468, "loss": 3.9663, "step": 23015 }, { "epoch": 1.5640712053268107, "grad_norm": 1.1989046335220337, "learning_rate": 0.0008045760293518142, "loss": 3.7495, "step": 23020 }, { "epoch": 1.5644109253974725, "grad_norm": 1.216452717781067, "learning_rate": 0.0008045335643429814, "loss": 3.4925, "step": 23025 }, { "epoch": 1.5647506454681341, "grad_norm": 3.8137638568878174, "learning_rate": 0.0008044910993341486, "loss": 3.4737, "step": 23030 }, { "epoch": 1.565090365538796, "grad_norm": 1.2566262483596802, "learning_rate": 0.000804448634325316, "loss": 3.8343, "step": 23035 }, { "epoch": 1.5654300856094578, "grad_norm": 1.2132188081741333, "learning_rate": 0.0008044061693164833, "loss": 3.5904, "step": 23040 }, { "epoch": 1.5657698056801195, "grad_norm": 1.2761101722717285, "learning_rate": 0.0008043637043076505, "loss": 3.503, "step": 23045 }, { "epoch": 1.5661095257507813, "grad_norm": 1.2194273471832275, "learning_rate": 0.0008043212392988178, "loss": 3.6194, "step": 23050 }, { "epoch": 1.5664492458214432, "grad_norm": 1.4458385705947876, "learning_rate": 0.0008042787742899851, "loss": 3.6241, "step": 23055 }, { "epoch": 1.5667889658921048, "grad_norm": 0.9784530997276306, "learning_rate": 0.0008042363092811523, "loss": 3.7403, "step": 23060 }, { "epoch": 1.5671286859627667, "grad_norm": 1.2642148733139038, "learning_rate": 0.0008041938442723196, "loss": 3.9618, "step": 23065 }, { "epoch": 1.5674684060334285, "grad_norm": 1.3045411109924316, "learning_rate": 0.000804151379263487, "loss": 3.9583, "step": 23070 }, { "epoch": 1.5678081261040901, "grad_norm": 1.5596656799316406, "learning_rate": 0.0008041089142546542, "loss": 3.5788, "step": 23075 }, { "epoch": 1.568147846174752, "grad_norm": 1.1612082719802856, "learning_rate": 0.0008040664492458215, "loss": 3.7473, "step": 23080 }, { "epoch": 1.5684875662454139, "grad_norm": 1.5220822095870972, "learning_rate": 0.0008040239842369887, "loss": 3.5752, "step": 23085 }, { "epoch": 1.5688272863160755, "grad_norm": 1.5413116216659546, "learning_rate": 0.000803981519228156, "loss": 3.5876, "step": 23090 }, { "epoch": 1.5691670063867373, "grad_norm": 1.3697108030319214, "learning_rate": 0.0008039390542193233, "loss": 3.5503, "step": 23095 }, { "epoch": 1.5695067264573992, "grad_norm": 1.1691190004348755, "learning_rate": 0.0008038965892104905, "loss": 3.7024, "step": 23100 }, { "epoch": 1.5698464465280608, "grad_norm": 1.266478180885315, "learning_rate": 0.0008038541242016579, "loss": 3.6918, "step": 23105 }, { "epoch": 1.5701861665987227, "grad_norm": 1.2537908554077148, "learning_rate": 0.0008038116591928252, "loss": 3.6029, "step": 23110 }, { "epoch": 1.5705258866693845, "grad_norm": 1.4618935585021973, "learning_rate": 0.0008037691941839924, "loss": 3.5607, "step": 23115 }, { "epoch": 1.5708656067400462, "grad_norm": 1.1456414461135864, "learning_rate": 0.0008037267291751596, "loss": 3.5573, "step": 23120 }, { "epoch": 1.571205326810708, "grad_norm": 1.0948057174682617, "learning_rate": 0.000803684264166327, "loss": 3.7144, "step": 23125 }, { "epoch": 1.5715450468813699, "grad_norm": 1.1921391487121582, "learning_rate": 0.0008036417991574942, "loss": 3.6492, "step": 23130 }, { "epoch": 1.5718847669520315, "grad_norm": 1.3137551546096802, "learning_rate": 0.0008035993341486614, "loss": 3.7278, "step": 23135 }, { "epoch": 1.5722244870226934, "grad_norm": 2.1183228492736816, "learning_rate": 0.0008035568691398289, "loss": 3.665, "step": 23140 }, { "epoch": 1.5725642070933552, "grad_norm": 1.085267424583435, "learning_rate": 0.0008035144041309961, "loss": 3.6572, "step": 23145 }, { "epoch": 1.5729039271640168, "grad_norm": 1.1150239706039429, "learning_rate": 0.0008034719391221633, "loss": 3.7317, "step": 23150 }, { "epoch": 1.5732436472346785, "grad_norm": 1.7030164003372192, "learning_rate": 0.0008034294741133307, "loss": 3.7834, "step": 23155 }, { "epoch": 1.5735833673053405, "grad_norm": 1.1758298873901367, "learning_rate": 0.0008033870091044979, "loss": 3.4654, "step": 23160 }, { "epoch": 1.5739230873760022, "grad_norm": 0.9881402254104614, "learning_rate": 0.0008033445440956651, "loss": 3.5896, "step": 23165 }, { "epoch": 1.5742628074466638, "grad_norm": 3.0764663219451904, "learning_rate": 0.0008033020790868326, "loss": 3.787, "step": 23170 }, { "epoch": 1.5746025275173259, "grad_norm": 1.5009336471557617, "learning_rate": 0.0008032596140779998, "loss": 3.7058, "step": 23175 }, { "epoch": 1.5749422475879875, "grad_norm": 2.538383722305298, "learning_rate": 0.000803217149069167, "loss": 3.7973, "step": 23180 }, { "epoch": 1.5752819676586491, "grad_norm": 1.2719906568527222, "learning_rate": 0.0008031746840603343, "loss": 3.584, "step": 23185 }, { "epoch": 1.575621687729311, "grad_norm": 1.361403465270996, "learning_rate": 0.0008031322190515016, "loss": 3.7638, "step": 23190 }, { "epoch": 1.5759614077999728, "grad_norm": 1.6047990322113037, "learning_rate": 0.0008030897540426688, "loss": 3.9704, "step": 23195 }, { "epoch": 1.5763011278706345, "grad_norm": 1.2564419507980347, "learning_rate": 0.0008030472890338361, "loss": 3.3538, "step": 23200 }, { "epoch": 1.5766408479412963, "grad_norm": 1.3492902517318726, "learning_rate": 0.0008030048240250035, "loss": 3.6101, "step": 23205 }, { "epoch": 1.5769805680119582, "grad_norm": 1.6752448081970215, "learning_rate": 0.0008029623590161707, "loss": 3.4484, "step": 23210 }, { "epoch": 1.5773202880826198, "grad_norm": 1.1593483686447144, "learning_rate": 0.000802919894007338, "loss": 3.493, "step": 23215 }, { "epoch": 1.5776600081532817, "grad_norm": 1.3333911895751953, "learning_rate": 0.0008028774289985052, "loss": 3.5439, "step": 23220 }, { "epoch": 1.5779997282239435, "grad_norm": 1.1318862438201904, "learning_rate": 0.0008028349639896725, "loss": 3.6591, "step": 23225 }, { "epoch": 1.5783394482946052, "grad_norm": 1.3366618156433105, "learning_rate": 0.0008027924989808398, "loss": 3.4728, "step": 23230 }, { "epoch": 1.578679168365267, "grad_norm": 1.5659490823745728, "learning_rate": 0.000802750033972007, "loss": 3.6452, "step": 23235 }, { "epoch": 1.5790188884359289, "grad_norm": 1.4365782737731934, "learning_rate": 0.0008027075689631744, "loss": 3.4858, "step": 23240 }, { "epoch": 1.5793586085065905, "grad_norm": 1.3607667684555054, "learning_rate": 0.0008026651039543417, "loss": 3.3744, "step": 23245 }, { "epoch": 1.5796983285772523, "grad_norm": 1.1839078664779663, "learning_rate": 0.0008026226389455089, "loss": 3.6055, "step": 23250 }, { "epoch": 1.5800380486479142, "grad_norm": 1.3012104034423828, "learning_rate": 0.0008025801739366762, "loss": 3.7665, "step": 23255 }, { "epoch": 1.5803777687185758, "grad_norm": 1.30666983127594, "learning_rate": 0.0008025377089278435, "loss": 3.642, "step": 23260 }, { "epoch": 1.5807174887892377, "grad_norm": 1.615011215209961, "learning_rate": 0.0008024952439190107, "loss": 3.5576, "step": 23265 }, { "epoch": 1.5810572088598995, "grad_norm": 1.2802430391311646, "learning_rate": 0.0008024527789101779, "loss": 3.6625, "step": 23270 }, { "epoch": 1.5813969289305612, "grad_norm": 1.2481166124343872, "learning_rate": 0.0008024103139013454, "loss": 3.8976, "step": 23275 }, { "epoch": 1.581736649001223, "grad_norm": 1.3300013542175293, "learning_rate": 0.0008023678488925126, "loss": 3.823, "step": 23280 }, { "epoch": 1.5820763690718849, "grad_norm": 1.314767599105835, "learning_rate": 0.0008023253838836798, "loss": 3.6368, "step": 23285 }, { "epoch": 1.5824160891425465, "grad_norm": 1.4710981845855713, "learning_rate": 0.0008022829188748472, "loss": 3.1354, "step": 23290 }, { "epoch": 1.5827558092132084, "grad_norm": 1.3957995176315308, "learning_rate": 0.0008022404538660144, "loss": 3.5272, "step": 23295 }, { "epoch": 1.5830955292838702, "grad_norm": 1.4961551427841187, "learning_rate": 0.0008021979888571816, "loss": 3.63, "step": 23300 }, { "epoch": 1.5834352493545318, "grad_norm": 1.4052133560180664, "learning_rate": 0.000802155523848349, "loss": 3.6017, "step": 23305 }, { "epoch": 1.5837749694251937, "grad_norm": 1.3190820217132568, "learning_rate": 0.0008021130588395163, "loss": 3.4272, "step": 23310 }, { "epoch": 1.5841146894958555, "grad_norm": 1.0898985862731934, "learning_rate": 0.0008020705938306835, "loss": 3.2768, "step": 23315 }, { "epoch": 1.5844544095665172, "grad_norm": 1.1997334957122803, "learning_rate": 0.0008020281288218509, "loss": 3.3593, "step": 23320 }, { "epoch": 1.5847941296371788, "grad_norm": 1.254241704940796, "learning_rate": 0.0008019856638130181, "loss": 3.6226, "step": 23325 }, { "epoch": 1.5851338497078409, "grad_norm": 1.172741413116455, "learning_rate": 0.0008019431988041853, "loss": 3.6634, "step": 23330 }, { "epoch": 1.5854735697785025, "grad_norm": 1.151342749595642, "learning_rate": 0.0008019007337953526, "loss": 3.6284, "step": 23335 }, { "epoch": 1.5858132898491641, "grad_norm": 1.631990671157837, "learning_rate": 0.0008018582687865199, "loss": 3.4831, "step": 23340 }, { "epoch": 1.5861530099198262, "grad_norm": 1.3245784044265747, "learning_rate": 0.0008018158037776872, "loss": 3.576, "step": 23345 }, { "epoch": 1.5864927299904878, "grad_norm": 1.2720754146575928, "learning_rate": 0.0008017733387688545, "loss": 3.9497, "step": 23350 }, { "epoch": 1.5868324500611495, "grad_norm": 1.715367317199707, "learning_rate": 0.0008017308737600218, "loss": 3.6381, "step": 23355 }, { "epoch": 1.5871721701318113, "grad_norm": 1.288109302520752, "learning_rate": 0.0008016884087511891, "loss": 3.3941, "step": 23360 }, { "epoch": 1.5875118902024732, "grad_norm": 1.076594352722168, "learning_rate": 0.0008016459437423563, "loss": 3.6962, "step": 23365 }, { "epoch": 1.5878516102731348, "grad_norm": 1.0920621156692505, "learning_rate": 0.0008016034787335235, "loss": 3.7386, "step": 23370 }, { "epoch": 1.5881913303437967, "grad_norm": 1.1481564044952393, "learning_rate": 0.0008015610137246909, "loss": 3.5043, "step": 23375 }, { "epoch": 1.5885310504144585, "grad_norm": 1.2330849170684814, "learning_rate": 0.0008015185487158582, "loss": 3.6899, "step": 23380 }, { "epoch": 1.5888707704851202, "grad_norm": 1.5526723861694336, "learning_rate": 0.0008014760837070254, "loss": 3.7559, "step": 23385 }, { "epoch": 1.589210490555782, "grad_norm": 2.2937679290771484, "learning_rate": 0.0008014336186981928, "loss": 3.4802, "step": 23390 }, { "epoch": 1.5895502106264439, "grad_norm": 1.1754717826843262, "learning_rate": 0.00080139115368936, "loss": 3.7195, "step": 23395 }, { "epoch": 1.5898899306971055, "grad_norm": 1.3415248394012451, "learning_rate": 0.0008013486886805272, "loss": 3.5778, "step": 23400 }, { "epoch": 1.5902296507677673, "grad_norm": 1.76779043674469, "learning_rate": 0.0008013062236716946, "loss": 3.8687, "step": 23405 }, { "epoch": 1.5905693708384292, "grad_norm": 1.3652948141098022, "learning_rate": 0.0008012637586628618, "loss": 3.836, "step": 23410 }, { "epoch": 1.5909090909090908, "grad_norm": 1.0428130626678467, "learning_rate": 0.0008012212936540291, "loss": 3.6195, "step": 23415 }, { "epoch": 1.5912488109797527, "grad_norm": 1.350566029548645, "learning_rate": 0.0008011788286451965, "loss": 3.3958, "step": 23420 }, { "epoch": 1.5915885310504145, "grad_norm": 1.3463091850280762, "learning_rate": 0.0008011363636363637, "loss": 3.7365, "step": 23425 }, { "epoch": 1.5919282511210762, "grad_norm": 1.1214609146118164, "learning_rate": 0.0008010938986275309, "loss": 3.5668, "step": 23430 }, { "epoch": 1.592267971191738, "grad_norm": 1.4567327499389648, "learning_rate": 0.0008010514336186982, "loss": 3.2511, "step": 23435 }, { "epoch": 1.5926076912623999, "grad_norm": 1.9105473756790161, "learning_rate": 0.0008010089686098655, "loss": 3.2727, "step": 23440 }, { "epoch": 1.5929474113330615, "grad_norm": 1.4723087549209595, "learning_rate": 0.0008009665036010327, "loss": 3.5148, "step": 23445 }, { "epoch": 1.5932871314037234, "grad_norm": 1.218369960784912, "learning_rate": 0.0008009240385922001, "loss": 3.8277, "step": 23450 }, { "epoch": 1.5936268514743852, "grad_norm": 1.308727741241455, "learning_rate": 0.0008008815735833674, "loss": 3.446, "step": 23455 }, { "epoch": 1.5939665715450468, "grad_norm": 1.3212417364120483, "learning_rate": 0.0008008391085745346, "loss": 3.7432, "step": 23460 }, { "epoch": 1.5943062916157087, "grad_norm": 1.1117372512817383, "learning_rate": 0.0008007966435657019, "loss": 3.4119, "step": 23465 }, { "epoch": 1.5946460116863705, "grad_norm": 1.504333257675171, "learning_rate": 0.0008007541785568691, "loss": 3.6844, "step": 23470 }, { "epoch": 1.5949857317570322, "grad_norm": 1.1989927291870117, "learning_rate": 0.0008007117135480364, "loss": 3.2962, "step": 23475 }, { "epoch": 1.595325451827694, "grad_norm": 1.5015735626220703, "learning_rate": 0.0008006692485392037, "loss": 3.5761, "step": 23480 }, { "epoch": 1.5956651718983559, "grad_norm": 1.5947929620742798, "learning_rate": 0.000800626783530371, "loss": 3.4482, "step": 23485 }, { "epoch": 1.5960048919690175, "grad_norm": 1.2789154052734375, "learning_rate": 0.0008005843185215383, "loss": 3.4641, "step": 23490 }, { "epoch": 1.5963446120396791, "grad_norm": 1.4168009757995605, "learning_rate": 0.0008005418535127056, "loss": 3.4413, "step": 23495 }, { "epoch": 1.5966843321103412, "grad_norm": 1.4924339056015015, "learning_rate": 0.0008004993885038728, "loss": 3.7173, "step": 23500 }, { "epoch": 1.5970240521810029, "grad_norm": 1.6144036054611206, "learning_rate": 0.00080045692349504, "loss": 3.3291, "step": 23505 }, { "epoch": 1.5973637722516645, "grad_norm": 1.379496693611145, "learning_rate": 0.0008004144584862074, "loss": 3.5827, "step": 23510 }, { "epoch": 1.5977034923223266, "grad_norm": 1.254181146621704, "learning_rate": 0.0008003719934773746, "loss": 3.6969, "step": 23515 }, { "epoch": 1.5980432123929882, "grad_norm": 1.4016964435577393, "learning_rate": 0.0008003295284685419, "loss": 3.5921, "step": 23520 }, { "epoch": 1.5983829324636498, "grad_norm": 1.2989836931228638, "learning_rate": 0.0008002870634597093, "loss": 3.7936, "step": 23525 }, { "epoch": 1.5987226525343117, "grad_norm": 1.2678810358047485, "learning_rate": 0.0008002445984508765, "loss": 3.4713, "step": 23530 }, { "epoch": 1.5990623726049735, "grad_norm": 1.3817206621170044, "learning_rate": 0.0008002021334420437, "loss": 3.3129, "step": 23535 }, { "epoch": 1.5994020926756352, "grad_norm": 1.5479806661605835, "learning_rate": 0.0008001596684332111, "loss": 3.7664, "step": 23540 }, { "epoch": 1.599741812746297, "grad_norm": 1.979860544204712, "learning_rate": 0.0008001172034243783, "loss": 3.432, "step": 23545 }, { "epoch": 1.6000815328169589, "grad_norm": 1.1605676412582397, "learning_rate": 0.0008000747384155455, "loss": 3.8203, "step": 23550 }, { "epoch": 1.6004212528876205, "grad_norm": 1.1024047136306763, "learning_rate": 0.000800032273406713, "loss": 3.6605, "step": 23555 }, { "epoch": 1.6007609729582823, "grad_norm": 1.227097749710083, "learning_rate": 0.0007999898083978802, "loss": 3.6796, "step": 23560 }, { "epoch": 1.6011006930289442, "grad_norm": 2.8422415256500244, "learning_rate": 0.0007999473433890474, "loss": 3.5822, "step": 23565 }, { "epoch": 1.6014404130996058, "grad_norm": 1.4344569444656372, "learning_rate": 0.0007999048783802147, "loss": 3.7212, "step": 23570 }, { "epoch": 1.6017801331702677, "grad_norm": 1.4090243577957153, "learning_rate": 0.000799862413371382, "loss": 3.5623, "step": 23575 }, { "epoch": 1.6021198532409295, "grad_norm": 1.8399988412857056, "learning_rate": 0.0007998199483625492, "loss": 3.5051, "step": 23580 }, { "epoch": 1.6024595733115912, "grad_norm": 1.3197181224822998, "learning_rate": 0.0007997774833537165, "loss": 3.9359, "step": 23585 }, { "epoch": 1.602799293382253, "grad_norm": 1.3598952293395996, "learning_rate": 0.0007997350183448839, "loss": 3.8039, "step": 23590 }, { "epoch": 1.6031390134529149, "grad_norm": 1.1941637992858887, "learning_rate": 0.0007996925533360511, "loss": 3.5206, "step": 23595 }, { "epoch": 1.6034787335235765, "grad_norm": 1.3605802059173584, "learning_rate": 0.0007996500883272184, "loss": 3.8511, "step": 23600 }, { "epoch": 1.6038184535942384, "grad_norm": 1.1827150583267212, "learning_rate": 0.0007996076233183857, "loss": 3.7694, "step": 23605 }, { "epoch": 1.6041581736649002, "grad_norm": 1.202412486076355, "learning_rate": 0.0007995651583095529, "loss": 3.5973, "step": 23610 }, { "epoch": 1.6044978937355618, "grad_norm": 1.7130420207977295, "learning_rate": 0.0007995226933007202, "loss": 3.238, "step": 23615 }, { "epoch": 1.6048376138062237, "grad_norm": 1.4812672138214111, "learning_rate": 0.0007994802282918874, "loss": 3.5502, "step": 23620 }, { "epoch": 1.6051773338768855, "grad_norm": 1.1873213052749634, "learning_rate": 0.0007994377632830548, "loss": 3.6985, "step": 23625 }, { "epoch": 1.6055170539475472, "grad_norm": 1.0790226459503174, "learning_rate": 0.0007993952982742221, "loss": 3.5821, "step": 23630 }, { "epoch": 1.605856774018209, "grad_norm": 1.357358694076538, "learning_rate": 0.0007993528332653893, "loss": 3.476, "step": 23635 }, { "epoch": 1.6061964940888709, "grad_norm": 1.3191001415252686, "learning_rate": 0.0007993103682565566, "loss": 3.7619, "step": 23640 }, { "epoch": 1.6065362141595325, "grad_norm": 1.1990470886230469, "learning_rate": 0.0007992679032477239, "loss": 3.3844, "step": 23645 }, { "epoch": 1.6068759342301944, "grad_norm": 1.1613215208053589, "learning_rate": 0.0007992254382388911, "loss": 3.5796, "step": 23650 }, { "epoch": 1.6072156543008562, "grad_norm": 1.9179291725158691, "learning_rate": 0.0007991829732300583, "loss": 3.6774, "step": 23655 }, { "epoch": 1.6075553743715179, "grad_norm": 1.0920639038085938, "learning_rate": 0.0007991405082212258, "loss": 3.754, "step": 23660 }, { "epoch": 1.6078950944421795, "grad_norm": 1.0980825424194336, "learning_rate": 0.000799098043212393, "loss": 3.4038, "step": 23665 }, { "epoch": 1.6082348145128416, "grad_norm": 1.3725696802139282, "learning_rate": 0.0007990555782035602, "loss": 3.5209, "step": 23670 }, { "epoch": 1.6085745345835032, "grad_norm": 1.2681727409362793, "learning_rate": 0.0007990131131947276, "loss": 3.4494, "step": 23675 }, { "epoch": 1.6089142546541648, "grad_norm": 1.3472793102264404, "learning_rate": 0.0007989706481858948, "loss": 3.6206, "step": 23680 }, { "epoch": 1.609253974724827, "grad_norm": 1.161422610282898, "learning_rate": 0.000798928183177062, "loss": 3.618, "step": 23685 }, { "epoch": 1.6095936947954885, "grad_norm": 0.9578686952590942, "learning_rate": 0.0007988857181682295, "loss": 3.7387, "step": 23690 }, { "epoch": 1.6099334148661502, "grad_norm": 1.180944561958313, "learning_rate": 0.0007988432531593967, "loss": 3.2238, "step": 23695 }, { "epoch": 1.610273134936812, "grad_norm": 1.0440435409545898, "learning_rate": 0.000798800788150564, "loss": 3.5525, "step": 23700 }, { "epoch": 1.6106128550074739, "grad_norm": 0.9774317145347595, "learning_rate": 0.0007987583231417313, "loss": 3.5841, "step": 23705 }, { "epoch": 1.6109525750781355, "grad_norm": 1.3590381145477295, "learning_rate": 0.0007987158581328985, "loss": 3.6598, "step": 23710 }, { "epoch": 1.6112922951487973, "grad_norm": 1.1606674194335938, "learning_rate": 0.0007986733931240658, "loss": 3.6745, "step": 23715 }, { "epoch": 1.6116320152194592, "grad_norm": 1.218925952911377, "learning_rate": 0.000798630928115233, "loss": 3.9072, "step": 23720 }, { "epoch": 1.6119717352901208, "grad_norm": 1.2476770877838135, "learning_rate": 0.0007985884631064004, "loss": 3.5386, "step": 23725 }, { "epoch": 1.6123114553607827, "grad_norm": 1.353126049041748, "learning_rate": 0.0007985459980975677, "loss": 3.8453, "step": 23730 }, { "epoch": 1.6126511754314445, "grad_norm": 1.419842004776001, "learning_rate": 0.0007985035330887349, "loss": 3.6887, "step": 23735 }, { "epoch": 1.6129908955021062, "grad_norm": 1.0607928037643433, "learning_rate": 0.0007984610680799022, "loss": 3.3105, "step": 23740 }, { "epoch": 1.613330615572768, "grad_norm": 1.3651641607284546, "learning_rate": 0.0007984186030710695, "loss": 3.6473, "step": 23745 }, { "epoch": 1.6136703356434299, "grad_norm": 1.2093615531921387, "learning_rate": 0.0007983761380622367, "loss": 3.6401, "step": 23750 }, { "epoch": 1.6140100557140915, "grad_norm": 1.2676985263824463, "learning_rate": 0.000798333673053404, "loss": 3.7299, "step": 23755 }, { "epoch": 1.6143497757847534, "grad_norm": 1.2602815628051758, "learning_rate": 0.0007982912080445714, "loss": 3.6305, "step": 23760 }, { "epoch": 1.6146894958554152, "grad_norm": 1.3846454620361328, "learning_rate": 0.0007982487430357386, "loss": 3.5471, "step": 23765 }, { "epoch": 1.6150292159260768, "grad_norm": 1.3324280977249146, "learning_rate": 0.0007982062780269058, "loss": 3.3257, "step": 23770 }, { "epoch": 1.6153689359967387, "grad_norm": 1.1483615636825562, "learning_rate": 0.0007981638130180732, "loss": 3.6681, "step": 23775 }, { "epoch": 1.6157086560674006, "grad_norm": 1.395862102508545, "learning_rate": 0.0007981213480092404, "loss": 3.658, "step": 23780 }, { "epoch": 1.6160483761380622, "grad_norm": 1.1852202415466309, "learning_rate": 0.0007980788830004076, "loss": 3.4284, "step": 23785 }, { "epoch": 1.616388096208724, "grad_norm": 1.1963207721710205, "learning_rate": 0.000798036417991575, "loss": 3.5869, "step": 23790 }, { "epoch": 1.6167278162793859, "grad_norm": 1.560212254524231, "learning_rate": 0.0007979939529827423, "loss": 3.3556, "step": 23795 }, { "epoch": 1.6170675363500475, "grad_norm": 1.2622990608215332, "learning_rate": 0.0007979514879739095, "loss": 3.6144, "step": 23800 }, { "epoch": 1.6174072564207094, "grad_norm": 1.164423942565918, "learning_rate": 0.0007979090229650769, "loss": 3.3801, "step": 23805 }, { "epoch": 1.6177469764913712, "grad_norm": 1.3897839784622192, "learning_rate": 0.0007978665579562441, "loss": 3.5403, "step": 23810 }, { "epoch": 1.6180866965620329, "grad_norm": 1.1599195003509521, "learning_rate": 0.0007978240929474113, "loss": 3.8534, "step": 23815 }, { "epoch": 1.6184264166326947, "grad_norm": 1.144518494606018, "learning_rate": 0.0007977816279385786, "loss": 3.5343, "step": 23820 }, { "epoch": 1.6187661367033566, "grad_norm": 1.5647425651550293, "learning_rate": 0.0007977391629297459, "loss": 3.6082, "step": 23825 }, { "epoch": 1.6191058567740182, "grad_norm": 1.4926596879959106, "learning_rate": 0.0007976966979209132, "loss": 3.5704, "step": 23830 }, { "epoch": 1.6194455768446798, "grad_norm": 1.370436191558838, "learning_rate": 0.0007976542329120805, "loss": 3.537, "step": 23835 }, { "epoch": 1.619785296915342, "grad_norm": 1.247840404510498, "learning_rate": 0.0007976117679032478, "loss": 3.4028, "step": 23840 }, { "epoch": 1.6201250169860035, "grad_norm": 1.3202054500579834, "learning_rate": 0.000797569302894415, "loss": 3.2608, "step": 23845 }, { "epoch": 1.6204647370566652, "grad_norm": 1.2550209760665894, "learning_rate": 0.0007975268378855823, "loss": 3.8348, "step": 23850 }, { "epoch": 1.6208044571273272, "grad_norm": 1.2390421628952026, "learning_rate": 0.0007974843728767495, "loss": 3.8093, "step": 23855 }, { "epoch": 1.6211441771979889, "grad_norm": 1.1434688568115234, "learning_rate": 0.0007974419078679168, "loss": 3.4672, "step": 23860 }, { "epoch": 1.6214838972686505, "grad_norm": 1.1894410848617554, "learning_rate": 0.0007973994428590842, "loss": 3.4388, "step": 23865 }, { "epoch": 1.6218236173393124, "grad_norm": 1.4108253717422485, "learning_rate": 0.0007973569778502514, "loss": 3.4632, "step": 23870 }, { "epoch": 1.6221633374099742, "grad_norm": 1.5686062574386597, "learning_rate": 0.0007973145128414187, "loss": 3.5698, "step": 23875 }, { "epoch": 1.6225030574806358, "grad_norm": 1.5123628377914429, "learning_rate": 0.000797272047832586, "loss": 3.7296, "step": 23880 }, { "epoch": 1.6228427775512977, "grad_norm": 1.320263385772705, "learning_rate": 0.0007972295828237532, "loss": 3.6213, "step": 23885 }, { "epoch": 1.6231824976219595, "grad_norm": 1.396625280380249, "learning_rate": 0.0007971871178149205, "loss": 3.6403, "step": 23890 }, { "epoch": 1.6235222176926212, "grad_norm": 1.2074706554412842, "learning_rate": 0.0007971446528060878, "loss": 3.3562, "step": 23895 }, { "epoch": 1.623861937763283, "grad_norm": 1.2179303169250488, "learning_rate": 0.0007971021877972551, "loss": 3.6265, "step": 23900 }, { "epoch": 1.6242016578339449, "grad_norm": 1.202686071395874, "learning_rate": 0.0007970597227884223, "loss": 3.6565, "step": 23905 }, { "epoch": 1.6245413779046065, "grad_norm": 1.3129931688308716, "learning_rate": 0.0007970172577795897, "loss": 3.5287, "step": 23910 }, { "epoch": 1.6248810979752684, "grad_norm": 1.6891599893569946, "learning_rate": 0.0007969747927707569, "loss": 3.5572, "step": 23915 }, { "epoch": 1.6252208180459302, "grad_norm": 1.1682325601577759, "learning_rate": 0.0007969323277619241, "loss": 3.5692, "step": 23920 }, { "epoch": 1.6255605381165918, "grad_norm": 1.0552423000335693, "learning_rate": 0.0007968898627530915, "loss": 3.6038, "step": 23925 }, { "epoch": 1.6259002581872537, "grad_norm": 1.0790436267852783, "learning_rate": 0.0007968473977442587, "loss": 3.6007, "step": 23930 }, { "epoch": 1.6262399782579156, "grad_norm": 1.2637858390808105, "learning_rate": 0.000796804932735426, "loss": 3.4285, "step": 23935 }, { "epoch": 1.6265796983285772, "grad_norm": 0.8932505249977112, "learning_rate": 0.0007967624677265934, "loss": 3.5105, "step": 23940 }, { "epoch": 1.626919418399239, "grad_norm": 1.5063456296920776, "learning_rate": 0.0007967200027177606, "loss": 3.6023, "step": 23945 }, { "epoch": 1.627259138469901, "grad_norm": 1.4307953119277954, "learning_rate": 0.0007966775377089278, "loss": 3.3486, "step": 23950 }, { "epoch": 1.6275988585405625, "grad_norm": 1.2944436073303223, "learning_rate": 0.0007966350727000951, "loss": 3.6584, "step": 23955 }, { "epoch": 1.6279385786112244, "grad_norm": 1.403580665588379, "learning_rate": 0.0007965926076912624, "loss": 3.8711, "step": 23960 }, { "epoch": 1.6282782986818862, "grad_norm": 1.4158071279525757, "learning_rate": 0.0007965501426824296, "loss": 3.6708, "step": 23965 }, { "epoch": 1.6286180187525479, "grad_norm": 1.0535014867782593, "learning_rate": 0.000796507677673597, "loss": 3.7111, "step": 23970 }, { "epoch": 1.6289577388232097, "grad_norm": 1.1518834829330444, "learning_rate": 0.0007964652126647643, "loss": 3.603, "step": 23975 }, { "epoch": 1.6292974588938716, "grad_norm": 1.467543363571167, "learning_rate": 0.0007964227476559315, "loss": 3.5906, "step": 23980 }, { "epoch": 1.6296371789645332, "grad_norm": 1.2042369842529297, "learning_rate": 0.0007963802826470988, "loss": 3.8697, "step": 23985 }, { "epoch": 1.629976899035195, "grad_norm": 1.4726500511169434, "learning_rate": 0.0007963378176382661, "loss": 3.7046, "step": 23990 }, { "epoch": 1.630316619105857, "grad_norm": 1.2029874324798584, "learning_rate": 0.0007962953526294333, "loss": 3.7073, "step": 23995 }, { "epoch": 1.6306563391765185, "grad_norm": 1.2349467277526855, "learning_rate": 0.0007962528876206006, "loss": 3.6284, "step": 24000 }, { "epoch": 1.6309960592471802, "grad_norm": 1.5119150876998901, "learning_rate": 0.000796210422611768, "loss": 3.4863, "step": 24005 }, { "epoch": 1.6313357793178422, "grad_norm": 2.358738899230957, "learning_rate": 0.0007961679576029352, "loss": 3.3893, "step": 24010 }, { "epoch": 1.6316754993885039, "grad_norm": 1.4333430528640747, "learning_rate": 0.0007961254925941025, "loss": 3.3977, "step": 24015 }, { "epoch": 1.6320152194591655, "grad_norm": 1.2046984434127808, "learning_rate": 0.0007960830275852697, "loss": 3.7274, "step": 24020 }, { "epoch": 1.6323549395298276, "grad_norm": 1.3186802864074707, "learning_rate": 0.000796040562576437, "loss": 3.7042, "step": 24025 }, { "epoch": 1.6326946596004892, "grad_norm": 1.144371747970581, "learning_rate": 0.0007959980975676043, "loss": 3.6772, "step": 24030 }, { "epoch": 1.6330343796711508, "grad_norm": 1.2246867418289185, "learning_rate": 0.0007959556325587715, "loss": 3.6445, "step": 24035 }, { "epoch": 1.633374099741813, "grad_norm": 1.2240707874298096, "learning_rate": 0.000795913167549939, "loss": 3.5559, "step": 24040 }, { "epoch": 1.6337138198124745, "grad_norm": 3.890946626663208, "learning_rate": 0.0007958707025411062, "loss": 3.5183, "step": 24045 }, { "epoch": 1.6340535398831362, "grad_norm": 1.6294724941253662, "learning_rate": 0.0007958282375322734, "loss": 3.4831, "step": 24050 }, { "epoch": 1.634393259953798, "grad_norm": 1.4461690187454224, "learning_rate": 0.0007957857725234408, "loss": 3.2669, "step": 24055 }, { "epoch": 1.6347329800244599, "grad_norm": 1.1117630004882812, "learning_rate": 0.000795743307514608, "loss": 3.6437, "step": 24060 }, { "epoch": 1.6350727000951215, "grad_norm": 1.1890597343444824, "learning_rate": 0.0007957008425057752, "loss": 3.7927, "step": 24065 }, { "epoch": 1.6354124201657834, "grad_norm": 1.5633950233459473, "learning_rate": 0.0007956583774969425, "loss": 3.7742, "step": 24070 }, { "epoch": 1.6357521402364452, "grad_norm": 1.0457028150558472, "learning_rate": 0.0007956159124881099, "loss": 3.6506, "step": 24075 }, { "epoch": 1.6360918603071068, "grad_norm": 1.2874082326889038, "learning_rate": 0.0007955734474792771, "loss": 3.8436, "step": 24080 }, { "epoch": 1.6364315803777687, "grad_norm": 1.3151531219482422, "learning_rate": 0.0007955309824704444, "loss": 4.0406, "step": 24085 }, { "epoch": 1.6367713004484306, "grad_norm": 1.6897907257080078, "learning_rate": 0.0007954885174616117, "loss": 3.6088, "step": 24090 }, { "epoch": 1.6371110205190922, "grad_norm": 1.403216004371643, "learning_rate": 0.0007954460524527789, "loss": 3.5636, "step": 24095 }, { "epoch": 1.637450740589754, "grad_norm": 1.4081348180770874, "learning_rate": 0.0007954035874439462, "loss": 3.6143, "step": 24100 }, { "epoch": 1.637790460660416, "grad_norm": 1.3975932598114014, "learning_rate": 0.0007953611224351134, "loss": 3.5924, "step": 24105 }, { "epoch": 1.6381301807310775, "grad_norm": 1.4803853034973145, "learning_rate": 0.0007953186574262808, "loss": 3.5043, "step": 24110 }, { "epoch": 1.6384699008017394, "grad_norm": 1.3079506158828735, "learning_rate": 0.0007952761924174481, "loss": 3.5382, "step": 24115 }, { "epoch": 1.6388096208724012, "grad_norm": 1.6161797046661377, "learning_rate": 0.0007952337274086153, "loss": 3.5855, "step": 24120 }, { "epoch": 1.6391493409430629, "grad_norm": 1.3096120357513428, "learning_rate": 0.0007951912623997826, "loss": 3.6388, "step": 24125 }, { "epoch": 1.6394890610137247, "grad_norm": 1.1429561376571655, "learning_rate": 0.0007951487973909499, "loss": 3.655, "step": 24130 }, { "epoch": 1.6398287810843866, "grad_norm": 1.4898838996887207, "learning_rate": 0.0007951063323821171, "loss": 3.5576, "step": 24135 }, { "epoch": 1.6401685011550482, "grad_norm": 1.5161864757537842, "learning_rate": 0.0007950638673732843, "loss": 3.7582, "step": 24140 }, { "epoch": 1.64050822122571, "grad_norm": 1.3571258783340454, "learning_rate": 0.0007950214023644518, "loss": 3.4492, "step": 24145 }, { "epoch": 1.640847941296372, "grad_norm": 1.16364586353302, "learning_rate": 0.000794978937355619, "loss": 3.7655, "step": 24150 }, { "epoch": 1.6411876613670335, "grad_norm": 1.567591905593872, "learning_rate": 0.0007949364723467862, "loss": 3.4962, "step": 24155 }, { "epoch": 1.6415273814376954, "grad_norm": 1.625676155090332, "learning_rate": 0.0007948940073379536, "loss": 3.1611, "step": 24160 }, { "epoch": 1.6418671015083572, "grad_norm": 1.2612632513046265, "learning_rate": 0.0007948515423291208, "loss": 3.7122, "step": 24165 }, { "epoch": 1.6422068215790189, "grad_norm": 1.3868526220321655, "learning_rate": 0.000794809077320288, "loss": 3.2869, "step": 24170 }, { "epoch": 1.6425465416496805, "grad_norm": 1.4961578845977783, "learning_rate": 0.0007947666123114554, "loss": 3.5478, "step": 24175 }, { "epoch": 1.6428862617203426, "grad_norm": 1.3149811029434204, "learning_rate": 0.0007947241473026227, "loss": 3.5233, "step": 24180 }, { "epoch": 1.6432259817910042, "grad_norm": 2.0599515438079834, "learning_rate": 0.0007946816822937899, "loss": 3.7285, "step": 24185 }, { "epoch": 1.6435657018616658, "grad_norm": 1.3248729705810547, "learning_rate": 0.0007946392172849573, "loss": 3.29, "step": 24190 }, { "epoch": 1.643905421932328, "grad_norm": 1.2336013317108154, "learning_rate": 0.0007945967522761245, "loss": 3.528, "step": 24195 }, { "epoch": 1.6442451420029895, "grad_norm": 1.373941421508789, "learning_rate": 0.0007945542872672917, "loss": 3.8371, "step": 24200 }, { "epoch": 1.6445848620736512, "grad_norm": 1.2046750783920288, "learning_rate": 0.000794511822258459, "loss": 3.7489, "step": 24205 }, { "epoch": 1.6449245821443133, "grad_norm": 0.9907199740409851, "learning_rate": 0.0007944693572496263, "loss": 3.4333, "step": 24210 }, { "epoch": 1.6452643022149749, "grad_norm": 1.4450747966766357, "learning_rate": 0.0007944268922407936, "loss": 3.5693, "step": 24215 }, { "epoch": 1.6456040222856365, "grad_norm": 1.4344724416732788, "learning_rate": 0.0007943844272319609, "loss": 3.6072, "step": 24220 }, { "epoch": 1.6459437423562984, "grad_norm": 1.978028416633606, "learning_rate": 0.0007943419622231282, "loss": 3.5824, "step": 24225 }, { "epoch": 1.6462834624269602, "grad_norm": 1.7284966707229614, "learning_rate": 0.0007942994972142954, "loss": 3.6514, "step": 24230 }, { "epoch": 1.6466231824976219, "grad_norm": 1.3441507816314697, "learning_rate": 0.0007942570322054627, "loss": 3.4773, "step": 24235 }, { "epoch": 1.6469629025682837, "grad_norm": 1.1722644567489624, "learning_rate": 0.00079421456719663, "loss": 3.6591, "step": 24240 }, { "epoch": 1.6473026226389456, "grad_norm": 1.1255048513412476, "learning_rate": 0.0007941721021877972, "loss": 3.4708, "step": 24245 }, { "epoch": 1.6476423427096072, "grad_norm": 1.1710830926895142, "learning_rate": 0.0007941296371789646, "loss": 3.6807, "step": 24250 }, { "epoch": 1.647982062780269, "grad_norm": 1.820044994354248, "learning_rate": 0.0007940871721701318, "loss": 3.5483, "step": 24255 }, { "epoch": 1.648321782850931, "grad_norm": 1.3650000095367432, "learning_rate": 0.0007940447071612991, "loss": 3.7071, "step": 24260 }, { "epoch": 1.6486615029215925, "grad_norm": 6.438742160797119, "learning_rate": 0.0007940022421524664, "loss": 3.7239, "step": 24265 }, { "epoch": 1.6490012229922544, "grad_norm": 2.3314902782440186, "learning_rate": 0.0007939597771436336, "loss": 3.548, "step": 24270 }, { "epoch": 1.6493409430629162, "grad_norm": 1.124151349067688, "learning_rate": 0.0007939173121348009, "loss": 3.5137, "step": 24275 }, { "epoch": 1.6496806631335779, "grad_norm": 1.9692270755767822, "learning_rate": 0.0007938748471259683, "loss": 3.8212, "step": 24280 }, { "epoch": 1.6500203832042397, "grad_norm": 1.2538806200027466, "learning_rate": 0.0007938323821171355, "loss": 3.4465, "step": 24285 }, { "epoch": 1.6503601032749016, "grad_norm": 1.8465170860290527, "learning_rate": 0.0007937899171083028, "loss": 3.4733, "step": 24290 }, { "epoch": 1.6506998233455632, "grad_norm": 1.511839747428894, "learning_rate": 0.0007937474520994701, "loss": 3.8151, "step": 24295 }, { "epoch": 1.651039543416225, "grad_norm": 1.5605610609054565, "learning_rate": 0.0007937049870906373, "loss": 3.4256, "step": 24300 }, { "epoch": 1.651379263486887, "grad_norm": 1.1925573348999023, "learning_rate": 0.0007936625220818045, "loss": 3.6036, "step": 24305 }, { "epoch": 1.6517189835575485, "grad_norm": 1.4499708414077759, "learning_rate": 0.0007936200570729719, "loss": 3.6551, "step": 24310 }, { "epoch": 1.6520587036282104, "grad_norm": 1.1050344705581665, "learning_rate": 0.0007935775920641392, "loss": 3.6715, "step": 24315 }, { "epoch": 1.6523984236988722, "grad_norm": 1.5083160400390625, "learning_rate": 0.0007935351270553064, "loss": 3.5411, "step": 24320 }, { "epoch": 1.6527381437695339, "grad_norm": 1.1414016485214233, "learning_rate": 0.0007934926620464738, "loss": 3.5899, "step": 24325 }, { "epoch": 1.6530778638401957, "grad_norm": 1.4946118593215942, "learning_rate": 0.000793450197037641, "loss": 3.44, "step": 24330 }, { "epoch": 1.6534175839108576, "grad_norm": 1.2709985971450806, "learning_rate": 0.0007934077320288082, "loss": 3.5448, "step": 24335 }, { "epoch": 1.6537573039815192, "grad_norm": 1.264732003211975, "learning_rate": 0.0007933652670199756, "loss": 3.4113, "step": 24340 }, { "epoch": 1.6540970240521808, "grad_norm": 1.1927975416183472, "learning_rate": 0.0007933228020111428, "loss": 3.5494, "step": 24345 }, { "epoch": 1.654436744122843, "grad_norm": 1.4398038387298584, "learning_rate": 0.0007932803370023101, "loss": 3.7251, "step": 24350 }, { "epoch": 1.6547764641935045, "grad_norm": 1.2737462520599365, "learning_rate": 0.0007932378719934774, "loss": 3.5644, "step": 24355 }, { "epoch": 1.6551161842641662, "grad_norm": 1.3725371360778809, "learning_rate": 0.0007931954069846447, "loss": 3.6697, "step": 24360 }, { "epoch": 1.6554559043348283, "grad_norm": 1.1562385559082031, "learning_rate": 0.0007931529419758119, "loss": 3.5817, "step": 24365 }, { "epoch": 1.6557956244054899, "grad_norm": 1.2704498767852783, "learning_rate": 0.0007931104769669792, "loss": 3.7172, "step": 24370 }, { "epoch": 1.6561353444761515, "grad_norm": 1.2604660987854004, "learning_rate": 0.0007930680119581465, "loss": 3.5487, "step": 24375 }, { "epoch": 1.6564750645468136, "grad_norm": 1.1362173557281494, "learning_rate": 0.0007930255469493138, "loss": 3.433, "step": 24380 }, { "epoch": 1.6568147846174752, "grad_norm": 1.3450204133987427, "learning_rate": 0.0007929830819404811, "loss": 3.8698, "step": 24385 }, { "epoch": 1.6571545046881369, "grad_norm": 1.2935442924499512, "learning_rate": 0.0007929406169316484, "loss": 3.5716, "step": 24390 }, { "epoch": 1.6574942247587987, "grad_norm": 1.4113314151763916, "learning_rate": 0.0007928981519228157, "loss": 3.7142, "step": 24395 }, { "epoch": 1.6578339448294606, "grad_norm": 1.181688666343689, "learning_rate": 0.0007928556869139829, "loss": 3.7604, "step": 24400 }, { "epoch": 1.6581736649001222, "grad_norm": 1.4217017889022827, "learning_rate": 0.0007928132219051501, "loss": 3.5246, "step": 24405 }, { "epoch": 1.658513384970784, "grad_norm": 1.2738250494003296, "learning_rate": 0.0007927707568963175, "loss": 3.6575, "step": 24410 }, { "epoch": 1.658853105041446, "grad_norm": 1.4355518817901611, "learning_rate": 0.0007927282918874847, "loss": 3.5579, "step": 24415 }, { "epoch": 1.6591928251121075, "grad_norm": 1.3256713151931763, "learning_rate": 0.000792685826878652, "loss": 3.545, "step": 24420 }, { "epoch": 1.6595325451827694, "grad_norm": 1.480383276939392, "learning_rate": 0.0007926433618698194, "loss": 3.6678, "step": 24425 }, { "epoch": 1.6598722652534312, "grad_norm": 1.2839874029159546, "learning_rate": 0.0007926008968609866, "loss": 3.7086, "step": 24430 }, { "epoch": 1.6602119853240929, "grad_norm": 1.4594266414642334, "learning_rate": 0.0007925584318521538, "loss": 3.6055, "step": 24435 }, { "epoch": 1.6605517053947547, "grad_norm": 1.7217376232147217, "learning_rate": 0.0007925159668433212, "loss": 3.6119, "step": 24440 }, { "epoch": 1.6608914254654166, "grad_norm": 1.232133388519287, "learning_rate": 0.0007924735018344884, "loss": 3.5793, "step": 24445 }, { "epoch": 1.6612311455360782, "grad_norm": 1.0531150102615356, "learning_rate": 0.0007924310368256556, "loss": 3.7081, "step": 24450 }, { "epoch": 1.66157086560674, "grad_norm": 1.6760601997375488, "learning_rate": 0.000792388571816823, "loss": 3.5578, "step": 24455 }, { "epoch": 1.661910585677402, "grad_norm": 1.1965383291244507, "learning_rate": 0.0007923461068079903, "loss": 3.7967, "step": 24460 }, { "epoch": 1.6622503057480635, "grad_norm": 1.5488543510437012, "learning_rate": 0.0007923036417991575, "loss": 3.6145, "step": 24465 }, { "epoch": 1.6625900258187254, "grad_norm": 3.529010534286499, "learning_rate": 0.0007922611767903248, "loss": 3.8135, "step": 24470 }, { "epoch": 1.6629297458893872, "grad_norm": 2.7681190967559814, "learning_rate": 0.0007922187117814921, "loss": 3.7338, "step": 24475 }, { "epoch": 1.6632694659600489, "grad_norm": 1.1216248273849487, "learning_rate": 0.0007921762467726593, "loss": 3.7799, "step": 24480 }, { "epoch": 1.6636091860307107, "grad_norm": 1.4836260080337524, "learning_rate": 0.0007921337817638266, "loss": 3.4387, "step": 24485 }, { "epoch": 1.6639489061013726, "grad_norm": 1.3058602809906006, "learning_rate": 0.000792091316754994, "loss": 3.5284, "step": 24490 }, { "epoch": 1.6642886261720342, "grad_norm": 1.8352466821670532, "learning_rate": 0.0007920488517461612, "loss": 3.5209, "step": 24495 }, { "epoch": 1.664628346242696, "grad_norm": 1.227566123008728, "learning_rate": 0.0007920063867373285, "loss": 3.8218, "step": 24500 }, { "epoch": 1.664968066313358, "grad_norm": 1.3657146692276, "learning_rate": 0.0007919639217284957, "loss": 3.5082, "step": 24505 }, { "epoch": 1.6653077863840196, "grad_norm": 1.3241747617721558, "learning_rate": 0.000791921456719663, "loss": 3.5489, "step": 24510 }, { "epoch": 1.6656475064546812, "grad_norm": 1.0754691362380981, "learning_rate": 0.0007918789917108303, "loss": 3.4144, "step": 24515 }, { "epoch": 1.6659872265253433, "grad_norm": 1.452553629875183, "learning_rate": 0.0007918365267019975, "loss": 3.4308, "step": 24520 }, { "epoch": 1.6663269465960049, "grad_norm": 1.0390487909317017, "learning_rate": 0.0007917940616931649, "loss": 3.6902, "step": 24525 }, { "epoch": 1.6666666666666665, "grad_norm": 1.3130162954330444, "learning_rate": 0.0007917515966843322, "loss": 3.6912, "step": 24530 }, { "epoch": 1.6670063867373286, "grad_norm": 1.548625111579895, "learning_rate": 0.0007917091316754994, "loss": 3.7353, "step": 24535 }, { "epoch": 1.6673461068079902, "grad_norm": 1.4269354343414307, "learning_rate": 0.0007916666666666666, "loss": 3.5951, "step": 24540 }, { "epoch": 1.6676858268786519, "grad_norm": 1.3057600259780884, "learning_rate": 0.000791624201657834, "loss": 3.6405, "step": 24545 }, { "epoch": 1.668025546949314, "grad_norm": 1.5141143798828125, "learning_rate": 0.0007915817366490012, "loss": 3.5977, "step": 24550 }, { "epoch": 1.6683652670199756, "grad_norm": 1.2172667980194092, "learning_rate": 0.0007915392716401684, "loss": 3.8944, "step": 24555 }, { "epoch": 1.6687049870906372, "grad_norm": 1.076653242111206, "learning_rate": 0.0007914968066313359, "loss": 3.597, "step": 24560 }, { "epoch": 1.669044707161299, "grad_norm": 1.0477195978164673, "learning_rate": 0.0007914543416225031, "loss": 3.7682, "step": 24565 }, { "epoch": 1.669384427231961, "grad_norm": 1.203762173652649, "learning_rate": 0.0007914118766136703, "loss": 3.8639, "step": 24570 }, { "epoch": 1.6697241473026225, "grad_norm": 1.3573834896087646, "learning_rate": 0.0007913694116048377, "loss": 3.4773, "step": 24575 }, { "epoch": 1.6700638673732844, "grad_norm": 1.4982397556304932, "learning_rate": 0.0007913269465960049, "loss": 3.7005, "step": 24580 }, { "epoch": 1.6704035874439462, "grad_norm": 1.947966456413269, "learning_rate": 0.0007912844815871721, "loss": 3.6035, "step": 24585 }, { "epoch": 1.6707433075146079, "grad_norm": 1.222813367843628, "learning_rate": 0.0007912420165783394, "loss": 3.4196, "step": 24590 }, { "epoch": 1.6710830275852697, "grad_norm": 1.0647673606872559, "learning_rate": 0.0007911995515695068, "loss": 3.628, "step": 24595 }, { "epoch": 1.6714227476559316, "grad_norm": 1.1924124956130981, "learning_rate": 0.000791157086560674, "loss": 3.4078, "step": 24600 }, { "epoch": 1.6717624677265932, "grad_norm": 1.311295509338379, "learning_rate": 0.0007911146215518413, "loss": 3.5725, "step": 24605 }, { "epoch": 1.672102187797255, "grad_norm": 1.5224164724349976, "learning_rate": 0.0007910721565430086, "loss": 3.7463, "step": 24610 }, { "epoch": 1.672441907867917, "grad_norm": 1.5262459516525269, "learning_rate": 0.0007910296915341758, "loss": 3.7723, "step": 24615 }, { "epoch": 1.6727816279385785, "grad_norm": 3.4491207599639893, "learning_rate": 0.0007909872265253431, "loss": 3.3804, "step": 24620 }, { "epoch": 1.6731213480092404, "grad_norm": 1.3065603971481323, "learning_rate": 0.0007909447615165104, "loss": 3.6337, "step": 24625 }, { "epoch": 1.6734610680799022, "grad_norm": 1.898334264755249, "learning_rate": 0.0007909022965076777, "loss": 3.7662, "step": 24630 }, { "epoch": 1.6738007881505639, "grad_norm": 1.1223403215408325, "learning_rate": 0.000790859831498845, "loss": 3.6668, "step": 24635 }, { "epoch": 1.6741405082212257, "grad_norm": 1.2102267742156982, "learning_rate": 0.0007908173664900122, "loss": 3.6079, "step": 24640 }, { "epoch": 1.6744802282918876, "grad_norm": 1.3095556497573853, "learning_rate": 0.0007907749014811795, "loss": 3.6667, "step": 24645 }, { "epoch": 1.6748199483625492, "grad_norm": 1.2442665100097656, "learning_rate": 0.0007907324364723468, "loss": 3.4422, "step": 24650 }, { "epoch": 1.675159668433211, "grad_norm": 1.6629438400268555, "learning_rate": 0.000790689971463514, "loss": 3.7044, "step": 24655 }, { "epoch": 1.675499388503873, "grad_norm": 1.3282963037490845, "learning_rate": 0.0007906475064546813, "loss": 3.4969, "step": 24660 }, { "epoch": 1.6758391085745346, "grad_norm": 1.3098843097686768, "learning_rate": 0.0007906050414458487, "loss": 3.7168, "step": 24665 }, { "epoch": 1.6761788286451964, "grad_norm": 1.782535195350647, "learning_rate": 0.0007905625764370159, "loss": 3.5077, "step": 24670 }, { "epoch": 1.6765185487158583, "grad_norm": 1.2407444715499878, "learning_rate": 0.0007905201114281832, "loss": 3.7347, "step": 24675 }, { "epoch": 1.67685826878652, "grad_norm": 1.129311203956604, "learning_rate": 0.0007904776464193505, "loss": 3.4712, "step": 24680 }, { "epoch": 1.6771979888571815, "grad_norm": 1.446895956993103, "learning_rate": 0.0007904351814105177, "loss": 3.8479, "step": 24685 }, { "epoch": 1.6775377089278436, "grad_norm": 1.905299425125122, "learning_rate": 0.0007903927164016849, "loss": 3.6407, "step": 24690 }, { "epoch": 1.6778774289985052, "grad_norm": 1.573884129524231, "learning_rate": 0.0007903502513928523, "loss": 3.6626, "step": 24695 }, { "epoch": 1.6782171490691669, "grad_norm": 1.2224366664886475, "learning_rate": 0.0007903077863840196, "loss": 3.5434, "step": 24700 }, { "epoch": 1.678556869139829, "grad_norm": 1.4240459203720093, "learning_rate": 0.0007902653213751868, "loss": 3.6487, "step": 24705 }, { "epoch": 1.6788965892104906, "grad_norm": 1.1450217962265015, "learning_rate": 0.0007902228563663542, "loss": 3.6256, "step": 24710 }, { "epoch": 1.6792363092811522, "grad_norm": 12.048566818237305, "learning_rate": 0.0007901803913575214, "loss": 3.8907, "step": 24715 }, { "epoch": 1.6795760293518143, "grad_norm": 3.5553627014160156, "learning_rate": 0.0007901379263486887, "loss": 3.5708, "step": 24720 }, { "epoch": 1.679915749422476, "grad_norm": 1.184717059135437, "learning_rate": 0.000790095461339856, "loss": 3.4621, "step": 24725 }, { "epoch": 1.6802554694931375, "grad_norm": 1.55134117603302, "learning_rate": 0.0007900529963310232, "loss": 3.6676, "step": 24730 }, { "epoch": 1.6805951895637994, "grad_norm": 1.8036682605743408, "learning_rate": 0.0007900105313221906, "loss": 3.6279, "step": 24735 }, { "epoch": 1.6809349096344612, "grad_norm": 1.0451287031173706, "learning_rate": 0.0007899680663133579, "loss": 3.5972, "step": 24740 }, { "epoch": 1.6812746297051229, "grad_norm": 1.4373153448104858, "learning_rate": 0.0007899256013045251, "loss": 3.8967, "step": 24745 }, { "epoch": 1.6816143497757847, "grad_norm": 2.2885215282440186, "learning_rate": 0.0007898831362956924, "loss": 3.6781, "step": 24750 }, { "epoch": 1.6819540698464466, "grad_norm": 1.2931582927703857, "learning_rate": 0.0007898406712868596, "loss": 3.5693, "step": 24755 }, { "epoch": 1.6822937899171082, "grad_norm": 1.3562191724777222, "learning_rate": 0.0007897982062780269, "loss": 3.4157, "step": 24760 }, { "epoch": 1.68263350998777, "grad_norm": 1.1658076047897339, "learning_rate": 0.0007897557412691942, "loss": 3.1198, "step": 24765 }, { "epoch": 1.682973230058432, "grad_norm": 2.1665894985198975, "learning_rate": 0.0007897132762603615, "loss": 3.4804, "step": 24770 }, { "epoch": 1.6833129501290935, "grad_norm": 1.2151201963424683, "learning_rate": 0.0007896708112515288, "loss": 3.6457, "step": 24775 }, { "epoch": 1.6836526701997554, "grad_norm": 1.4208903312683105, "learning_rate": 0.0007896283462426961, "loss": 3.4438, "step": 24780 }, { "epoch": 1.6839923902704172, "grad_norm": 1.3591113090515137, "learning_rate": 0.0007895858812338633, "loss": 3.5856, "step": 24785 }, { "epoch": 1.6843321103410789, "grad_norm": 1.0284219980239868, "learning_rate": 0.0007895434162250305, "loss": 3.4501, "step": 24790 }, { "epoch": 1.6846718304117407, "grad_norm": 1.2531086206436157, "learning_rate": 0.0007895009512161979, "loss": 3.5968, "step": 24795 }, { "epoch": 1.6850115504824026, "grad_norm": 1.1157997846603394, "learning_rate": 0.0007894584862073651, "loss": 3.4742, "step": 24800 }, { "epoch": 1.6853512705530642, "grad_norm": 1.5016025304794312, "learning_rate": 0.0007894160211985324, "loss": 3.3434, "step": 24805 }, { "epoch": 1.685690990623726, "grad_norm": 2.1933417320251465, "learning_rate": 0.0007893735561896998, "loss": 3.1687, "step": 24810 }, { "epoch": 1.686030710694388, "grad_norm": 1.2975763082504272, "learning_rate": 0.000789331091180867, "loss": 3.6307, "step": 24815 }, { "epoch": 1.6863704307650496, "grad_norm": 1.3379111289978027, "learning_rate": 0.0007892886261720342, "loss": 3.5201, "step": 24820 }, { "epoch": 1.6867101508357114, "grad_norm": 1.1806894540786743, "learning_rate": 0.0007892461611632016, "loss": 3.5952, "step": 24825 }, { "epoch": 1.6870498709063733, "grad_norm": 1.389589548110962, "learning_rate": 0.0007892036961543688, "loss": 3.8362, "step": 24830 }, { "epoch": 1.687389590977035, "grad_norm": 1.2096058130264282, "learning_rate": 0.000789161231145536, "loss": 3.5228, "step": 24835 }, { "epoch": 1.6877293110476967, "grad_norm": 1.1324658393859863, "learning_rate": 0.0007891187661367035, "loss": 3.6817, "step": 24840 }, { "epoch": 1.6880690311183586, "grad_norm": 1.340321660041809, "learning_rate": 0.0007890763011278707, "loss": 3.71, "step": 24845 }, { "epoch": 1.6884087511890202, "grad_norm": 1.4233360290527344, "learning_rate": 0.0007890338361190379, "loss": 3.4366, "step": 24850 }, { "epoch": 1.6887484712596819, "grad_norm": 1.1386281251907349, "learning_rate": 0.0007889913711102052, "loss": 3.4193, "step": 24855 }, { "epoch": 1.689088191330344, "grad_norm": 1.2421454191207886, "learning_rate": 0.0007889489061013725, "loss": 3.6865, "step": 24860 }, { "epoch": 1.6894279114010056, "grad_norm": 1.2989827394485474, "learning_rate": 0.0007889064410925397, "loss": 3.4451, "step": 24865 }, { "epoch": 1.6897676314716672, "grad_norm": 1.2895359992980957, "learning_rate": 0.0007888639760837071, "loss": 3.4147, "step": 24870 }, { "epoch": 1.6901073515423293, "grad_norm": 1.1038788557052612, "learning_rate": 0.0007888215110748744, "loss": 3.5616, "step": 24875 }, { "epoch": 1.690447071612991, "grad_norm": 1.0726652145385742, "learning_rate": 0.0007887790460660416, "loss": 3.4614, "step": 24880 }, { "epoch": 1.6907867916836525, "grad_norm": 2.398037910461426, "learning_rate": 0.0007887365810572089, "loss": 3.3781, "step": 24885 }, { "epoch": 1.6911265117543146, "grad_norm": 1.2423174381256104, "learning_rate": 0.0007886941160483761, "loss": 3.5773, "step": 24890 }, { "epoch": 1.6914662318249762, "grad_norm": 1.578305959701538, "learning_rate": 0.0007886516510395434, "loss": 3.6404, "step": 24895 }, { "epoch": 1.6918059518956379, "grad_norm": 1.0930159091949463, "learning_rate": 0.0007886091860307107, "loss": 3.4245, "step": 24900 }, { "epoch": 1.6921456719662997, "grad_norm": 1.5704890489578247, "learning_rate": 0.000788566721021878, "loss": 3.4128, "step": 24905 }, { "epoch": 1.6924853920369616, "grad_norm": 1.2084709405899048, "learning_rate": 0.0007885242560130453, "loss": 3.7296, "step": 24910 }, { "epoch": 1.6928251121076232, "grad_norm": 1.2945857048034668, "learning_rate": 0.0007884817910042126, "loss": 3.5682, "step": 24915 }, { "epoch": 1.693164832178285, "grad_norm": 1.2978886365890503, "learning_rate": 0.0007884393259953798, "loss": 3.6015, "step": 24920 }, { "epoch": 1.693504552248947, "grad_norm": 1.2556092739105225, "learning_rate": 0.000788396860986547, "loss": 3.9535, "step": 24925 }, { "epoch": 1.6938442723196085, "grad_norm": 1.8297568559646606, "learning_rate": 0.0007883543959777144, "loss": 3.2762, "step": 24930 }, { "epoch": 1.6941839923902704, "grad_norm": 1.0603591203689575, "learning_rate": 0.0007883119309688816, "loss": 3.8779, "step": 24935 }, { "epoch": 1.6945237124609323, "grad_norm": 1.433775544166565, "learning_rate": 0.0007882694659600489, "loss": 3.4546, "step": 24940 }, { "epoch": 1.6948634325315939, "grad_norm": 1.2286499738693237, "learning_rate": 0.0007882270009512163, "loss": 3.6281, "step": 24945 }, { "epoch": 1.6952031526022557, "grad_norm": 1.6964120864868164, "learning_rate": 0.0007881845359423835, "loss": 3.6245, "step": 24950 }, { "epoch": 1.6955428726729176, "grad_norm": 1.5339699983596802, "learning_rate": 0.0007881420709335507, "loss": 3.6626, "step": 24955 }, { "epoch": 1.6958825927435792, "grad_norm": 1.3028963804244995, "learning_rate": 0.0007880996059247181, "loss": 3.2527, "step": 24960 }, { "epoch": 1.696222312814241, "grad_norm": 1.161851406097412, "learning_rate": 0.0007880571409158853, "loss": 3.4699, "step": 24965 }, { "epoch": 1.696562032884903, "grad_norm": 1.6118552684783936, "learning_rate": 0.0007880146759070525, "loss": 3.7081, "step": 24970 }, { "epoch": 1.6969017529555646, "grad_norm": 1.1439189910888672, "learning_rate": 0.00078797221089822, "loss": 3.654, "step": 24975 }, { "epoch": 1.6972414730262264, "grad_norm": 1.2751868963241577, "learning_rate": 0.0007879297458893872, "loss": 3.5175, "step": 24980 }, { "epoch": 1.6975811930968883, "grad_norm": 1.5788350105285645, "learning_rate": 0.0007878872808805544, "loss": 3.3214, "step": 24985 }, { "epoch": 1.69792091316755, "grad_norm": 1.2668722867965698, "learning_rate": 0.0007878448158717217, "loss": 3.5449, "step": 24990 }, { "epoch": 1.6982606332382117, "grad_norm": 1.3957774639129639, "learning_rate": 0.000787802350862889, "loss": 3.7501, "step": 24995 }, { "epoch": 1.6986003533088736, "grad_norm": 1.1232844591140747, "learning_rate": 0.0007877598858540562, "loss": 3.6657, "step": 25000 }, { "epoch": 1.6989400733795352, "grad_norm": 1.2781869173049927, "learning_rate": 0.0007877174208452235, "loss": 3.5356, "step": 25005 }, { "epoch": 1.699279793450197, "grad_norm": 1.5139356851577759, "learning_rate": 0.0007876749558363909, "loss": 3.7433, "step": 25010 }, { "epoch": 1.699619513520859, "grad_norm": 1.4858046770095825, "learning_rate": 0.0007876324908275581, "loss": 3.3949, "step": 25015 }, { "epoch": 1.6999592335915206, "grad_norm": 1.4656959772109985, "learning_rate": 0.0007875900258187254, "loss": 3.7149, "step": 25020 }, { "epoch": 1.7002989536621822, "grad_norm": 1.739025592803955, "learning_rate": 0.0007875475608098927, "loss": 3.6282, "step": 25025 }, { "epoch": 1.7006386737328443, "grad_norm": 1.2555748224258423, "learning_rate": 0.0007875050958010599, "loss": 3.6424, "step": 25030 }, { "epoch": 1.700978393803506, "grad_norm": 1.177536129951477, "learning_rate": 0.0007874626307922272, "loss": 3.2696, "step": 25035 }, { "epoch": 1.7013181138741675, "grad_norm": 1.440755844116211, "learning_rate": 0.0007874201657833944, "loss": 3.7318, "step": 25040 }, { "epoch": 1.7016578339448296, "grad_norm": 1.3385366201400757, "learning_rate": 0.0007873777007745618, "loss": 3.5962, "step": 25045 }, { "epoch": 1.7019975540154912, "grad_norm": 1.4707765579223633, "learning_rate": 0.0007873352357657291, "loss": 3.5958, "step": 25050 }, { "epoch": 1.7023372740861529, "grad_norm": 1.6026078462600708, "learning_rate": 0.0007872927707568963, "loss": 3.5497, "step": 25055 }, { "epoch": 1.702676994156815, "grad_norm": 1.3270305395126343, "learning_rate": 0.0007872503057480637, "loss": 3.5146, "step": 25060 }, { "epoch": 1.7030167142274766, "grad_norm": 1.0326151847839355, "learning_rate": 0.0007872078407392309, "loss": 3.3253, "step": 25065 }, { "epoch": 1.7033564342981382, "grad_norm": 1.1665436029434204, "learning_rate": 0.0007871653757303981, "loss": 3.6772, "step": 25070 }, { "epoch": 1.7036961543688, "grad_norm": 1.209957480430603, "learning_rate": 0.0007871229107215655, "loss": 3.2965, "step": 25075 }, { "epoch": 1.704035874439462, "grad_norm": 1.1079269647598267, "learning_rate": 0.0007870804457127328, "loss": 3.4967, "step": 25080 }, { "epoch": 1.7043755945101235, "grad_norm": 1.3267030715942383, "learning_rate": 0.0007870379807039, "loss": 3.6662, "step": 25085 }, { "epoch": 1.7047153145807854, "grad_norm": 1.0032449960708618, "learning_rate": 0.0007869955156950673, "loss": 3.6167, "step": 25090 }, { "epoch": 1.7050550346514473, "grad_norm": 1.2946958541870117, "learning_rate": 0.0007869530506862346, "loss": 3.6755, "step": 25095 }, { "epoch": 1.7053947547221089, "grad_norm": 1.1642515659332275, "learning_rate": 0.0007869105856774018, "loss": 3.6394, "step": 25100 }, { "epoch": 1.7057344747927707, "grad_norm": 1.426925539970398, "learning_rate": 0.0007868681206685691, "loss": 3.6659, "step": 25105 }, { "epoch": 1.7060741948634326, "grad_norm": 0.976898729801178, "learning_rate": 0.0007868256556597364, "loss": 3.5774, "step": 25110 }, { "epoch": 1.7064139149340942, "grad_norm": 1.1787259578704834, "learning_rate": 0.0007867831906509037, "loss": 3.537, "step": 25115 }, { "epoch": 1.706753635004756, "grad_norm": 1.2691556215286255, "learning_rate": 0.000786740725642071, "loss": 3.4327, "step": 25120 }, { "epoch": 1.707093355075418, "grad_norm": 1.5942116975784302, "learning_rate": 0.0007866982606332383, "loss": 3.5954, "step": 25125 }, { "epoch": 1.7074330751460796, "grad_norm": 1.274442434310913, "learning_rate": 0.0007866557956244055, "loss": 3.9157, "step": 25130 }, { "epoch": 1.7077727952167414, "grad_norm": 1.1958403587341309, "learning_rate": 0.0007866133306155728, "loss": 3.4841, "step": 25135 }, { "epoch": 1.7081125152874033, "grad_norm": 1.0675551891326904, "learning_rate": 0.00078657086560674, "loss": 3.7632, "step": 25140 }, { "epoch": 1.708452235358065, "grad_norm": 1.781571626663208, "learning_rate": 0.0007865284005979073, "loss": 3.6189, "step": 25145 }, { "epoch": 1.7087919554287267, "grad_norm": 1.2592705488204956, "learning_rate": 0.0007864859355890747, "loss": 3.893, "step": 25150 }, { "epoch": 1.7091316754993886, "grad_norm": 1.9648029804229736, "learning_rate": 0.0007864434705802419, "loss": 3.7145, "step": 25155 }, { "epoch": 1.7094713955700502, "grad_norm": 1.8630398511886597, "learning_rate": 0.0007864010055714092, "loss": 3.7209, "step": 25160 }, { "epoch": 1.709811115640712, "grad_norm": 1.1949024200439453, "learning_rate": 0.0007863585405625765, "loss": 3.5463, "step": 25165 }, { "epoch": 1.710150835711374, "grad_norm": 1.374454140663147, "learning_rate": 0.0007863160755537437, "loss": 3.682, "step": 25170 }, { "epoch": 1.7104905557820356, "grad_norm": 1.4707225561141968, "learning_rate": 0.0007862736105449109, "loss": 3.7616, "step": 25175 }, { "epoch": 1.7108302758526974, "grad_norm": 2.0065360069274902, "learning_rate": 0.0007862311455360783, "loss": 3.7752, "step": 25180 }, { "epoch": 1.7111699959233593, "grad_norm": 1.2051922082901, "learning_rate": 0.0007861886805272456, "loss": 3.7317, "step": 25185 }, { "epoch": 1.711509715994021, "grad_norm": 1.6090751886367798, "learning_rate": 0.0007861462155184128, "loss": 3.5543, "step": 25190 }, { "epoch": 1.7118494360646825, "grad_norm": 1.502647876739502, "learning_rate": 0.0007861037505095802, "loss": 3.7497, "step": 25195 }, { "epoch": 1.7121891561353446, "grad_norm": 1.1247425079345703, "learning_rate": 0.0007860612855007474, "loss": 3.4666, "step": 25200 }, { "epoch": 1.7125288762060062, "grad_norm": 1.3547959327697754, "learning_rate": 0.0007860188204919146, "loss": 3.4691, "step": 25205 }, { "epoch": 1.7128685962766679, "grad_norm": 1.3701566457748413, "learning_rate": 0.000785976355483082, "loss": 3.6201, "step": 25210 }, { "epoch": 1.71320831634733, "grad_norm": 1.304641842842102, "learning_rate": 0.0007859338904742492, "loss": 3.516, "step": 25215 }, { "epoch": 1.7135480364179916, "grad_norm": 1.2925723791122437, "learning_rate": 0.0007858914254654165, "loss": 3.6404, "step": 25220 }, { "epoch": 1.7138877564886532, "grad_norm": 1.205875277519226, "learning_rate": 0.0007858489604565839, "loss": 3.6426, "step": 25225 }, { "epoch": 1.7142274765593153, "grad_norm": 1.4978801012039185, "learning_rate": 0.0007858064954477511, "loss": 3.3613, "step": 25230 }, { "epoch": 1.714567196629977, "grad_norm": 1.4387638568878174, "learning_rate": 0.0007857640304389183, "loss": 3.5995, "step": 25235 }, { "epoch": 1.7149069167006386, "grad_norm": 2.2461094856262207, "learning_rate": 0.0007857215654300856, "loss": 4.1091, "step": 25240 }, { "epoch": 1.7152466367713004, "grad_norm": 0.9835636019706726, "learning_rate": 0.0007856791004212529, "loss": 3.5199, "step": 25245 }, { "epoch": 1.7155863568419623, "grad_norm": 1.110608458518982, "learning_rate": 0.0007856366354124201, "loss": 3.6423, "step": 25250 }, { "epoch": 1.7159260769126239, "grad_norm": 1.1750861406326294, "learning_rate": 0.0007855941704035875, "loss": 3.5474, "step": 25255 }, { "epoch": 1.7162657969832857, "grad_norm": 1.3371957540512085, "learning_rate": 0.0007855517053947548, "loss": 3.4925, "step": 25260 }, { "epoch": 1.7166055170539476, "grad_norm": 1.287442684173584, "learning_rate": 0.000785509240385922, "loss": 3.7379, "step": 25265 }, { "epoch": 1.7169452371246092, "grad_norm": 1.152247428894043, "learning_rate": 0.0007854667753770893, "loss": 3.5964, "step": 25270 }, { "epoch": 1.717284957195271, "grad_norm": 1.0952067375183105, "learning_rate": 0.0007854243103682565, "loss": 3.5086, "step": 25275 }, { "epoch": 1.717624677265933, "grad_norm": 1.5887936353683472, "learning_rate": 0.0007853818453594238, "loss": 3.6868, "step": 25280 }, { "epoch": 1.7179643973365946, "grad_norm": 1.1731857061386108, "learning_rate": 0.0007853393803505911, "loss": 3.7215, "step": 25285 }, { "epoch": 1.7183041174072564, "grad_norm": 1.1096712350845337, "learning_rate": 0.0007852969153417584, "loss": 3.6916, "step": 25290 }, { "epoch": 1.7186438374779183, "grad_norm": 1.446373701095581, "learning_rate": 0.0007852544503329257, "loss": 3.345, "step": 25295 }, { "epoch": 1.71898355754858, "grad_norm": 1.097629427909851, "learning_rate": 0.000785211985324093, "loss": 3.6127, "step": 25300 }, { "epoch": 1.7193232776192418, "grad_norm": 1.3311485052108765, "learning_rate": 0.0007851695203152602, "loss": 3.4152, "step": 25305 }, { "epoch": 1.7196629976899036, "grad_norm": 1.4050878286361694, "learning_rate": 0.0007851270553064275, "loss": 3.7314, "step": 25310 }, { "epoch": 1.7200027177605652, "grad_norm": 1.4513243436813354, "learning_rate": 0.0007850845902975948, "loss": 3.6773, "step": 25315 }, { "epoch": 1.720342437831227, "grad_norm": 1.2353708744049072, "learning_rate": 0.000785042125288762, "loss": 3.6075, "step": 25320 }, { "epoch": 1.720682157901889, "grad_norm": 1.3386558294296265, "learning_rate": 0.0007849996602799293, "loss": 3.5577, "step": 25325 }, { "epoch": 1.7210218779725506, "grad_norm": 1.0846165418624878, "learning_rate": 0.0007849571952710967, "loss": 3.7795, "step": 25330 }, { "epoch": 1.7213615980432124, "grad_norm": 1.8722167015075684, "learning_rate": 0.0007849147302622639, "loss": 3.4624, "step": 25335 }, { "epoch": 1.7217013181138743, "grad_norm": 1.1655840873718262, "learning_rate": 0.0007848722652534311, "loss": 3.6147, "step": 25340 }, { "epoch": 1.722041038184536, "grad_norm": 1.1649894714355469, "learning_rate": 0.0007848298002445985, "loss": 3.4594, "step": 25345 }, { "epoch": 1.7223807582551978, "grad_norm": 1.1167716979980469, "learning_rate": 0.0007847873352357657, "loss": 3.2982, "step": 25350 }, { "epoch": 1.7227204783258596, "grad_norm": 1.4002087116241455, "learning_rate": 0.0007847448702269329, "loss": 3.4698, "step": 25355 }, { "epoch": 1.7230601983965212, "grad_norm": 1.2356611490249634, "learning_rate": 0.0007847024052181004, "loss": 3.4648, "step": 25360 }, { "epoch": 1.7233999184671829, "grad_norm": 1.2046325206756592, "learning_rate": 0.0007846599402092676, "loss": 3.6196, "step": 25365 }, { "epoch": 1.723739638537845, "grad_norm": 1.3220444917678833, "learning_rate": 0.0007846174752004348, "loss": 3.8597, "step": 25370 }, { "epoch": 1.7240793586085066, "grad_norm": 1.2369343042373657, "learning_rate": 0.0007845750101916021, "loss": 3.525, "step": 25375 }, { "epoch": 1.7244190786791682, "grad_norm": 1.3969796895980835, "learning_rate": 0.0007845325451827694, "loss": 3.5248, "step": 25380 }, { "epoch": 1.7247587987498303, "grad_norm": 1.443158507347107, "learning_rate": 0.0007844900801739366, "loss": 3.7232, "step": 25385 }, { "epoch": 1.725098518820492, "grad_norm": 1.1024307012557983, "learning_rate": 0.000784447615165104, "loss": 3.6952, "step": 25390 }, { "epoch": 1.7254382388911536, "grad_norm": 1.8113429546356201, "learning_rate": 0.0007844051501562713, "loss": 3.6577, "step": 25395 }, { "epoch": 1.7257779589618156, "grad_norm": 1.286757230758667, "learning_rate": 0.0007843626851474386, "loss": 3.5289, "step": 25400 }, { "epoch": 1.7261176790324773, "grad_norm": 1.2638248205184937, "learning_rate": 0.0007843202201386058, "loss": 3.5726, "step": 25405 }, { "epoch": 1.726457399103139, "grad_norm": 1.2147488594055176, "learning_rate": 0.0007842777551297731, "loss": 3.6792, "step": 25410 }, { "epoch": 1.7267971191738007, "grad_norm": 1.1142475605010986, "learning_rate": 0.0007842352901209404, "loss": 3.6691, "step": 25415 }, { "epoch": 1.7271368392444626, "grad_norm": 1.3349921703338623, "learning_rate": 0.0007841928251121076, "loss": 3.8638, "step": 25420 }, { "epoch": 1.7274765593151242, "grad_norm": 1.4174643754959106, "learning_rate": 0.000784150360103275, "loss": 3.6463, "step": 25425 }, { "epoch": 1.727816279385786, "grad_norm": 1.4210187196731567, "learning_rate": 0.0007841078950944423, "loss": 3.4582, "step": 25430 }, { "epoch": 1.728155999456448, "grad_norm": 1.4690587520599365, "learning_rate": 0.0007840654300856095, "loss": 3.6167, "step": 25435 }, { "epoch": 1.7284957195271096, "grad_norm": 1.1209617853164673, "learning_rate": 0.0007840229650767767, "loss": 3.7367, "step": 25440 }, { "epoch": 1.7288354395977714, "grad_norm": 1.1167250871658325, "learning_rate": 0.0007839805000679441, "loss": 3.5067, "step": 25445 }, { "epoch": 1.7291751596684333, "grad_norm": 1.5741794109344482, "learning_rate": 0.0007839380350591113, "loss": 3.5747, "step": 25450 }, { "epoch": 1.729514879739095, "grad_norm": 1.3184535503387451, "learning_rate": 0.0007838955700502785, "loss": 3.6832, "step": 25455 }, { "epoch": 1.7298545998097568, "grad_norm": 1.1200826168060303, "learning_rate": 0.000783853105041446, "loss": 3.9018, "step": 25460 }, { "epoch": 1.7301943198804186, "grad_norm": 1.492534875869751, "learning_rate": 0.0007838106400326132, "loss": 3.6983, "step": 25465 }, { "epoch": 1.7305340399510802, "grad_norm": 1.4401209354400635, "learning_rate": 0.0007837681750237804, "loss": 3.3984, "step": 25470 }, { "epoch": 1.730873760021742, "grad_norm": 1.3642728328704834, "learning_rate": 0.0007837257100149478, "loss": 3.5646, "step": 25475 }, { "epoch": 1.731213480092404, "grad_norm": 1.3860315084457397, "learning_rate": 0.000783683245006115, "loss": 3.4019, "step": 25480 }, { "epoch": 1.7315532001630656, "grad_norm": 1.663413643836975, "learning_rate": 0.0007836407799972822, "loss": 3.6335, "step": 25485 }, { "epoch": 1.7318929202337274, "grad_norm": 1.713498830795288, "learning_rate": 0.0007835983149884495, "loss": 3.5833, "step": 25490 }, { "epoch": 1.7322326403043893, "grad_norm": 1.3436946868896484, "learning_rate": 0.0007835558499796169, "loss": 3.443, "step": 25495 }, { "epoch": 1.732572360375051, "grad_norm": 1.8274075984954834, "learning_rate": 0.0007835133849707841, "loss": 3.6238, "step": 25500 }, { "epoch": 1.7329120804457128, "grad_norm": 1.6994224786758423, "learning_rate": 0.0007834709199619514, "loss": 3.6335, "step": 25505 }, { "epoch": 1.7332518005163746, "grad_norm": 1.3803672790527344, "learning_rate": 0.0007834284549531187, "loss": 3.8863, "step": 25510 }, { "epoch": 1.7335915205870362, "grad_norm": 1.3953315019607544, "learning_rate": 0.0007833859899442859, "loss": 3.4501, "step": 25515 }, { "epoch": 1.733931240657698, "grad_norm": 1.1321320533752441, "learning_rate": 0.0007833435249354532, "loss": 3.5829, "step": 25520 }, { "epoch": 1.73427096072836, "grad_norm": 1.2853350639343262, "learning_rate": 0.0007833010599266204, "loss": 3.2308, "step": 25525 }, { "epoch": 1.7346106807990216, "grad_norm": 1.303183913230896, "learning_rate": 0.0007832585949177878, "loss": 3.33, "step": 25530 }, { "epoch": 1.7349504008696832, "grad_norm": 1.8278297185897827, "learning_rate": 0.0007832161299089551, "loss": 3.3768, "step": 25535 }, { "epoch": 1.7352901209403453, "grad_norm": 1.194534420967102, "learning_rate": 0.0007831736649001223, "loss": 3.5678, "step": 25540 }, { "epoch": 1.735629841011007, "grad_norm": 1.4317556619644165, "learning_rate": 0.0007831311998912896, "loss": 3.5067, "step": 25545 }, { "epoch": 1.7359695610816686, "grad_norm": 2.912815570831299, "learning_rate": 0.0007830887348824569, "loss": 3.7163, "step": 25550 }, { "epoch": 1.7363092811523306, "grad_norm": 1.357607126235962, "learning_rate": 0.0007830462698736241, "loss": 3.7661, "step": 25555 }, { "epoch": 1.7366490012229923, "grad_norm": 1.5754730701446533, "learning_rate": 0.0007830038048647913, "loss": 3.6179, "step": 25560 }, { "epoch": 1.736988721293654, "grad_norm": 1.5136301517486572, "learning_rate": 0.0007829613398559588, "loss": 3.5617, "step": 25565 }, { "epoch": 1.737328441364316, "grad_norm": 1.5935603380203247, "learning_rate": 0.000782918874847126, "loss": 3.3559, "step": 25570 }, { "epoch": 1.7376681614349776, "grad_norm": 1.4289764165878296, "learning_rate": 0.0007828764098382932, "loss": 3.5414, "step": 25575 }, { "epoch": 1.7380078815056392, "grad_norm": 1.4066189527511597, "learning_rate": 0.0007828339448294606, "loss": 3.6122, "step": 25580 }, { "epoch": 1.738347601576301, "grad_norm": 1.265128493309021, "learning_rate": 0.0007827914798206278, "loss": 3.4201, "step": 25585 }, { "epoch": 1.738687321646963, "grad_norm": 1.6427315473556519, "learning_rate": 0.000782749014811795, "loss": 3.8084, "step": 25590 }, { "epoch": 1.7390270417176246, "grad_norm": 1.3528270721435547, "learning_rate": 0.0007827065498029624, "loss": 3.7262, "step": 25595 }, { "epoch": 1.7393667617882864, "grad_norm": 1.941142201423645, "learning_rate": 0.0007826640847941297, "loss": 3.9426, "step": 25600 }, { "epoch": 1.7397064818589483, "grad_norm": 1.2819100618362427, "learning_rate": 0.0007826216197852969, "loss": 3.5852, "step": 25605 }, { "epoch": 1.74004620192961, "grad_norm": 1.4072110652923584, "learning_rate": 0.0007825791547764643, "loss": 3.6421, "step": 25610 }, { "epoch": 1.7403859220002718, "grad_norm": 1.2540791034698486, "learning_rate": 0.0007825366897676315, "loss": 3.7975, "step": 25615 }, { "epoch": 1.7407256420709336, "grad_norm": 1.3163280487060547, "learning_rate": 0.0007824942247587987, "loss": 3.4673, "step": 25620 }, { "epoch": 1.7410653621415952, "grad_norm": 1.4369115829467773, "learning_rate": 0.000782451759749966, "loss": 3.656, "step": 25625 }, { "epoch": 1.741405082212257, "grad_norm": 1.4356212615966797, "learning_rate": 0.0007824092947411333, "loss": 3.7272, "step": 25630 }, { "epoch": 1.741744802282919, "grad_norm": 1.3905097246170044, "learning_rate": 0.0007823668297323006, "loss": 3.5951, "step": 25635 }, { "epoch": 1.7420845223535806, "grad_norm": 1.0314180850982666, "learning_rate": 0.0007823243647234679, "loss": 3.4069, "step": 25640 }, { "epoch": 1.7424242424242424, "grad_norm": 1.274850845336914, "learning_rate": 0.0007822818997146352, "loss": 3.4315, "step": 25645 }, { "epoch": 1.7427639624949043, "grad_norm": 4.54417085647583, "learning_rate": 0.0007822394347058024, "loss": 3.3807, "step": 25650 }, { "epoch": 1.743103682565566, "grad_norm": 1.132612943649292, "learning_rate": 0.0007821969696969697, "loss": 3.6009, "step": 25655 }, { "epoch": 1.7434434026362278, "grad_norm": 1.2076969146728516, "learning_rate": 0.000782154504688137, "loss": 3.4456, "step": 25660 }, { "epoch": 1.7437831227068896, "grad_norm": 1.6089141368865967, "learning_rate": 0.0007821120396793042, "loss": 3.5882, "step": 25665 }, { "epoch": 1.7441228427775513, "grad_norm": 1.3091233968734741, "learning_rate": 0.0007820695746704716, "loss": 3.9637, "step": 25670 }, { "epoch": 1.744462562848213, "grad_norm": 1.8544509410858154, "learning_rate": 0.0007820271096616388, "loss": 3.4264, "step": 25675 }, { "epoch": 1.744802282918875, "grad_norm": 1.20468270778656, "learning_rate": 0.0007819846446528061, "loss": 3.7122, "step": 25680 }, { "epoch": 1.7451420029895366, "grad_norm": 1.6131608486175537, "learning_rate": 0.0007819421796439734, "loss": 3.4475, "step": 25685 }, { "epoch": 1.7454817230601984, "grad_norm": 1.740373134613037, "learning_rate": 0.0007818997146351406, "loss": 3.6352, "step": 25690 }, { "epoch": 1.7458214431308603, "grad_norm": 1.1933375597000122, "learning_rate": 0.0007818572496263079, "loss": 3.6592, "step": 25695 }, { "epoch": 1.746161163201522, "grad_norm": 1.3233647346496582, "learning_rate": 0.0007818147846174752, "loss": 3.5317, "step": 25700 }, { "epoch": 1.7465008832721836, "grad_norm": 1.2922258377075195, "learning_rate": 0.0007817723196086425, "loss": 3.7152, "step": 25705 }, { "epoch": 1.7468406033428456, "grad_norm": 1.566523790359497, "learning_rate": 0.0007817298545998098, "loss": 3.3819, "step": 25710 }, { "epoch": 1.7471803234135073, "grad_norm": 1.74066162109375, "learning_rate": 0.0007816873895909771, "loss": 3.562, "step": 25715 }, { "epoch": 1.747520043484169, "grad_norm": 1.4413307905197144, "learning_rate": 0.0007816449245821443, "loss": 3.5251, "step": 25720 }, { "epoch": 1.747859763554831, "grad_norm": 1.2217137813568115, "learning_rate": 0.0007816024595733115, "loss": 3.5105, "step": 25725 }, { "epoch": 1.7481994836254926, "grad_norm": 1.1282083988189697, "learning_rate": 0.0007815599945644789, "loss": 3.596, "step": 25730 }, { "epoch": 1.7485392036961542, "grad_norm": 1.0791341066360474, "learning_rate": 0.0007815175295556461, "loss": 3.295, "step": 25735 }, { "epoch": 1.7488789237668163, "grad_norm": 1.0642518997192383, "learning_rate": 0.0007814750645468135, "loss": 3.7021, "step": 25740 }, { "epoch": 1.749218643837478, "grad_norm": 0.9849804639816284, "learning_rate": 0.0007814325995379808, "loss": 3.286, "step": 25745 }, { "epoch": 1.7495583639081396, "grad_norm": 1.3191535472869873, "learning_rate": 0.000781390134529148, "loss": 3.7152, "step": 25750 }, { "epoch": 1.7498980839788014, "grad_norm": 2.0846426486968994, "learning_rate": 0.0007813476695203153, "loss": 3.5711, "step": 25755 }, { "epoch": 1.7502378040494633, "grad_norm": 1.9198800325393677, "learning_rate": 0.0007813052045114826, "loss": 3.6223, "step": 25760 }, { "epoch": 1.750577524120125, "grad_norm": 1.330739140510559, "learning_rate": 0.0007812627395026498, "loss": 3.5422, "step": 25765 }, { "epoch": 1.7509172441907868, "grad_norm": 1.0362247228622437, "learning_rate": 0.0007812202744938171, "loss": 3.3158, "step": 25770 }, { "epoch": 1.7512569642614486, "grad_norm": 1.2672392129898071, "learning_rate": 0.0007811778094849844, "loss": 3.6736, "step": 25775 }, { "epoch": 1.7515966843321102, "grad_norm": 1.3828771114349365, "learning_rate": 0.0007811353444761517, "loss": 3.6833, "step": 25780 }, { "epoch": 1.751936404402772, "grad_norm": 1.2782659530639648, "learning_rate": 0.000781092879467319, "loss": 3.4292, "step": 25785 }, { "epoch": 1.752276124473434, "grad_norm": 1.3161582946777344, "learning_rate": 0.0007810504144584862, "loss": 3.7641, "step": 25790 }, { "epoch": 1.7526158445440956, "grad_norm": 1.0789399147033691, "learning_rate": 0.0007810079494496535, "loss": 3.5647, "step": 25795 }, { "epoch": 1.7529555646147574, "grad_norm": 1.316758632659912, "learning_rate": 0.0007809654844408208, "loss": 3.6304, "step": 25800 }, { "epoch": 1.7532952846854193, "grad_norm": 1.5776411294937134, "learning_rate": 0.000780923019431988, "loss": 3.5959, "step": 25805 }, { "epoch": 1.753635004756081, "grad_norm": 7.201361179351807, "learning_rate": 0.0007808805544231554, "loss": 3.8459, "step": 25810 }, { "epoch": 1.7539747248267428, "grad_norm": 1.414959192276001, "learning_rate": 0.0007808380894143227, "loss": 3.8595, "step": 25815 }, { "epoch": 1.7543144448974046, "grad_norm": 1.7359468936920166, "learning_rate": 0.0007807956244054899, "loss": 3.8206, "step": 25820 }, { "epoch": 1.7546541649680663, "grad_norm": 1.4909160137176514, "learning_rate": 0.0007807531593966571, "loss": 3.5494, "step": 25825 }, { "epoch": 1.754993885038728, "grad_norm": 9.082356452941895, "learning_rate": 0.0007807106943878245, "loss": 3.6988, "step": 25830 }, { "epoch": 1.75533360510939, "grad_norm": 1.3098434209823608, "learning_rate": 0.0007806682293789917, "loss": 3.6597, "step": 25835 }, { "epoch": 1.7556733251800516, "grad_norm": 1.2418373823165894, "learning_rate": 0.0007806257643701589, "loss": 3.6666, "step": 25840 }, { "epoch": 1.7560130452507134, "grad_norm": 1.65934157371521, "learning_rate": 0.0007805832993613264, "loss": 3.7528, "step": 25845 }, { "epoch": 1.7563527653213753, "grad_norm": 1.3996601104736328, "learning_rate": 0.0007805408343524936, "loss": 3.5459, "step": 25850 }, { "epoch": 1.756692485392037, "grad_norm": 1.3242340087890625, "learning_rate": 0.0007804983693436608, "loss": 3.6727, "step": 25855 }, { "epoch": 1.7570322054626988, "grad_norm": 1.6202526092529297, "learning_rate": 0.0007804559043348282, "loss": 3.6051, "step": 25860 }, { "epoch": 1.7573719255333606, "grad_norm": 1.507611632347107, "learning_rate": 0.0007804134393259954, "loss": 3.6487, "step": 25865 }, { "epoch": 1.7577116456040223, "grad_norm": 1.1278033256530762, "learning_rate": 0.0007803709743171626, "loss": 3.38, "step": 25870 }, { "epoch": 1.758051365674684, "grad_norm": 2.0578629970550537, "learning_rate": 0.0007803285093083299, "loss": 3.3858, "step": 25875 }, { "epoch": 1.758391085745346, "grad_norm": 1.526781439781189, "learning_rate": 0.0007802860442994973, "loss": 3.4197, "step": 25880 }, { "epoch": 1.7587308058160076, "grad_norm": 1.137115478515625, "learning_rate": 0.0007802435792906645, "loss": 3.6515, "step": 25885 }, { "epoch": 1.7590705258866692, "grad_norm": 1.328755497932434, "learning_rate": 0.0007802011142818318, "loss": 3.4594, "step": 25890 }, { "epoch": 1.7594102459573313, "grad_norm": 1.347240924835205, "learning_rate": 0.0007801586492729991, "loss": 3.66, "step": 25895 }, { "epoch": 1.759749966027993, "grad_norm": 1.4984464645385742, "learning_rate": 0.0007801161842641663, "loss": 3.3134, "step": 25900 }, { "epoch": 1.7600896860986546, "grad_norm": 1.8097623586654663, "learning_rate": 0.0007800737192553336, "loss": 3.4585, "step": 25905 }, { "epoch": 1.7604294061693166, "grad_norm": 1.3718715906143188, "learning_rate": 0.0007800312542465008, "loss": 3.644, "step": 25910 }, { "epoch": 1.7607691262399783, "grad_norm": 1.423666000366211, "learning_rate": 0.0007799887892376682, "loss": 3.6725, "step": 25915 }, { "epoch": 1.76110884631064, "grad_norm": 1.4614561796188354, "learning_rate": 0.0007799463242288355, "loss": 3.4742, "step": 25920 }, { "epoch": 1.7614485663813018, "grad_norm": 1.1900522708892822, "learning_rate": 0.0007799038592200027, "loss": 3.5561, "step": 25925 }, { "epoch": 1.7617882864519636, "grad_norm": 1.126562476158142, "learning_rate": 0.00077986139421117, "loss": 3.6341, "step": 25930 }, { "epoch": 1.7621280065226252, "grad_norm": 1.5302408933639526, "learning_rate": 0.0007798189292023373, "loss": 3.6437, "step": 25935 }, { "epoch": 1.762467726593287, "grad_norm": 1.5537220239639282, "learning_rate": 0.0007797764641935045, "loss": 3.4426, "step": 25940 }, { "epoch": 1.762807446663949, "grad_norm": 1.069602370262146, "learning_rate": 0.0007797339991846718, "loss": 3.9232, "step": 25945 }, { "epoch": 1.7631471667346106, "grad_norm": 1.7891063690185547, "learning_rate": 0.0007796915341758392, "loss": 3.4839, "step": 25950 }, { "epoch": 1.7634868868052724, "grad_norm": 1.4296244382858276, "learning_rate": 0.0007796490691670064, "loss": 3.7918, "step": 25955 }, { "epoch": 1.7638266068759343, "grad_norm": 1.2433651685714722, "learning_rate": 0.0007796066041581736, "loss": 3.7365, "step": 25960 }, { "epoch": 1.764166326946596, "grad_norm": 1.1536579132080078, "learning_rate": 0.000779564139149341, "loss": 3.5141, "step": 25965 }, { "epoch": 1.7645060470172578, "grad_norm": 1.4313522577285767, "learning_rate": 0.0007795216741405082, "loss": 3.8348, "step": 25970 }, { "epoch": 1.7648457670879196, "grad_norm": 1.0441112518310547, "learning_rate": 0.0007794792091316754, "loss": 3.5275, "step": 25975 }, { "epoch": 1.7651854871585813, "grad_norm": 1.0302814245224, "learning_rate": 0.0007794367441228429, "loss": 3.5232, "step": 25980 }, { "epoch": 1.765525207229243, "grad_norm": 2.4185497760772705, "learning_rate": 0.0007793942791140101, "loss": 3.4795, "step": 25985 }, { "epoch": 1.765864927299905, "grad_norm": 1.285706639289856, "learning_rate": 0.0007793518141051773, "loss": 3.5223, "step": 25990 }, { "epoch": 1.7662046473705666, "grad_norm": 1.5333200693130493, "learning_rate": 0.0007793093490963447, "loss": 3.9526, "step": 25995 }, { "epoch": 1.7665443674412284, "grad_norm": 1.1408363580703735, "learning_rate": 0.0007792668840875119, "loss": 3.7331, "step": 26000 }, { "epoch": 1.7668840875118903, "grad_norm": 1.3409570455551147, "learning_rate": 0.0007792244190786791, "loss": 3.5958, "step": 26005 }, { "epoch": 1.767223807582552, "grad_norm": 1.337030291557312, "learning_rate": 0.0007791819540698464, "loss": 3.8019, "step": 26010 }, { "epoch": 1.7675635276532138, "grad_norm": 1.164802074432373, "learning_rate": 0.0007791394890610138, "loss": 3.5468, "step": 26015 }, { "epoch": 1.7679032477238756, "grad_norm": 1.3869786262512207, "learning_rate": 0.000779097024052181, "loss": 3.4529, "step": 26020 }, { "epoch": 1.7682429677945373, "grad_norm": 1.4292782545089722, "learning_rate": 0.0007790545590433483, "loss": 3.2728, "step": 26025 }, { "epoch": 1.7685826878651991, "grad_norm": 1.2716150283813477, "learning_rate": 0.0007790120940345156, "loss": 3.4162, "step": 26030 }, { "epoch": 1.768922407935861, "grad_norm": 1.4592602252960205, "learning_rate": 0.0007789696290256828, "loss": 3.5706, "step": 26035 }, { "epoch": 1.7692621280065226, "grad_norm": 1.2572548389434814, "learning_rate": 0.0007789271640168501, "loss": 3.6562, "step": 26040 }, { "epoch": 1.7696018480771842, "grad_norm": 1.3900542259216309, "learning_rate": 0.0007788846990080174, "loss": 3.5623, "step": 26045 }, { "epoch": 1.7699415681478463, "grad_norm": 1.449953317642212, "learning_rate": 0.0007788422339991847, "loss": 3.6511, "step": 26050 }, { "epoch": 1.770281288218508, "grad_norm": 1.1569468975067139, "learning_rate": 0.000778799768990352, "loss": 3.3967, "step": 26055 }, { "epoch": 1.7706210082891696, "grad_norm": 1.27316153049469, "learning_rate": 0.0007787573039815192, "loss": 3.5533, "step": 26060 }, { "epoch": 1.7709607283598316, "grad_norm": 1.2912766933441162, "learning_rate": 0.0007787233319744531, "loss": 3.5665, "step": 26065 }, { "epoch": 1.7713004484304933, "grad_norm": 1.380717396736145, "learning_rate": 0.0007786808669656203, "loss": 3.4162, "step": 26070 }, { "epoch": 1.771640168501155, "grad_norm": 1.5040440559387207, "learning_rate": 0.0007786384019567876, "loss": 3.481, "step": 26075 }, { "epoch": 1.771979888571817, "grad_norm": 1.3665655851364136, "learning_rate": 0.000778595936947955, "loss": 3.7937, "step": 26080 }, { "epoch": 1.7723196086424786, "grad_norm": 1.1784995794296265, "learning_rate": 0.0007785534719391222, "loss": 3.4532, "step": 26085 }, { "epoch": 1.7726593287131402, "grad_norm": 1.106843113899231, "learning_rate": 0.0007785110069302894, "loss": 3.6423, "step": 26090 }, { "epoch": 1.772999048783802, "grad_norm": 1.9581115245819092, "learning_rate": 0.0007784685419214568, "loss": 3.3599, "step": 26095 }, { "epoch": 1.773338768854464, "grad_norm": 1.284105896949768, "learning_rate": 0.000778426076912624, "loss": 3.4708, "step": 26100 }, { "epoch": 1.7736784889251256, "grad_norm": 0.9704298377037048, "learning_rate": 0.0007783836119037912, "loss": 3.6635, "step": 26105 }, { "epoch": 1.7740182089957874, "grad_norm": 1.3860692977905273, "learning_rate": 0.0007783411468949586, "loss": 3.211, "step": 26110 }, { "epoch": 1.7743579290664493, "grad_norm": 1.4018384218215942, "learning_rate": 0.0007782986818861259, "loss": 3.6104, "step": 26115 }, { "epoch": 1.774697649137111, "grad_norm": 1.4525606632232666, "learning_rate": 0.0007782562168772931, "loss": 3.6991, "step": 26120 }, { "epoch": 1.7750373692077728, "grad_norm": 1.367680311203003, "learning_rate": 0.0007782137518684604, "loss": 3.6341, "step": 26125 }, { "epoch": 1.7753770892784346, "grad_norm": 1.294614553451538, "learning_rate": 0.0007781712868596277, "loss": 3.5404, "step": 26130 }, { "epoch": 1.7757168093490963, "grad_norm": 4.835052967071533, "learning_rate": 0.0007781288218507949, "loss": 3.5797, "step": 26135 }, { "epoch": 1.7760565294197581, "grad_norm": 1.1764200925827026, "learning_rate": 0.0007780863568419622, "loss": 3.6268, "step": 26140 }, { "epoch": 1.77639624949042, "grad_norm": 1.4846360683441162, "learning_rate": 0.0007780438918331296, "loss": 3.5787, "step": 26145 }, { "epoch": 1.7767359695610816, "grad_norm": 1.2306283712387085, "learning_rate": 0.0007780014268242968, "loss": 3.398, "step": 26150 }, { "epoch": 1.7770756896317434, "grad_norm": 1.3204350471496582, "learning_rate": 0.0007779589618154641, "loss": 3.3714, "step": 26155 }, { "epoch": 1.7774154097024053, "grad_norm": 1.2139298915863037, "learning_rate": 0.0007779164968066313, "loss": 3.5462, "step": 26160 }, { "epoch": 1.777755129773067, "grad_norm": 1.9059041738510132, "learning_rate": 0.0007778740317977986, "loss": 3.676, "step": 26165 }, { "epoch": 1.7780948498437288, "grad_norm": 1.640151023864746, "learning_rate": 0.0007778315667889659, "loss": 3.6999, "step": 26170 }, { "epoch": 1.7784345699143906, "grad_norm": 1.3048492670059204, "learning_rate": 0.0007777891017801331, "loss": 3.3428, "step": 26175 }, { "epoch": 1.7787742899850523, "grad_norm": 1.2556705474853516, "learning_rate": 0.0007777466367713005, "loss": 3.9069, "step": 26180 }, { "epoch": 1.7791140100557141, "grad_norm": 1.1330971717834473, "learning_rate": 0.0007777041717624678, "loss": 3.6308, "step": 26185 }, { "epoch": 1.779453730126376, "grad_norm": 1.5279827117919922, "learning_rate": 0.000777661706753635, "loss": 3.5007, "step": 26190 }, { "epoch": 1.7797934501970376, "grad_norm": 1.479917049407959, "learning_rate": 0.0007776192417448022, "loss": 3.3114, "step": 26195 }, { "epoch": 1.7801331702676995, "grad_norm": 1.6157907247543335, "learning_rate": 0.0007775767767359696, "loss": 3.3801, "step": 26200 }, { "epoch": 1.7804728903383613, "grad_norm": 1.3974794149398804, "learning_rate": 0.0007775343117271368, "loss": 3.4814, "step": 26205 }, { "epoch": 1.780812610409023, "grad_norm": 1.4911726713180542, "learning_rate": 0.000777491846718304, "loss": 3.6515, "step": 26210 }, { "epoch": 1.7811523304796846, "grad_norm": 1.423449993133545, "learning_rate": 0.0007774493817094715, "loss": 3.4625, "step": 26215 }, { "epoch": 1.7814920505503467, "grad_norm": 1.2386295795440674, "learning_rate": 0.0007774069167006387, "loss": 3.561, "step": 26220 }, { "epoch": 1.7818317706210083, "grad_norm": 11.042737007141113, "learning_rate": 0.0007773644516918059, "loss": 3.6262, "step": 26225 }, { "epoch": 1.78217149069167, "grad_norm": 1.195499062538147, "learning_rate": 0.0007773219866829733, "loss": 3.7564, "step": 26230 }, { "epoch": 1.782511210762332, "grad_norm": 1.1175129413604736, "learning_rate": 0.0007772795216741405, "loss": 3.6743, "step": 26235 }, { "epoch": 1.7828509308329936, "grad_norm": 1.3427058458328247, "learning_rate": 0.0007772370566653077, "loss": 3.4173, "step": 26240 }, { "epoch": 1.7831906509036552, "grad_norm": 1.2396063804626465, "learning_rate": 0.0007771945916564752, "loss": 3.727, "step": 26245 }, { "epoch": 1.7835303709743173, "grad_norm": 1.273775339126587, "learning_rate": 0.0007771521266476424, "loss": 3.7786, "step": 26250 }, { "epoch": 1.783870091044979, "grad_norm": 4.965933322906494, "learning_rate": 0.0007771096616388096, "loss": 3.4467, "step": 26255 }, { "epoch": 1.7842098111156406, "grad_norm": 1.4298889636993408, "learning_rate": 0.0007770671966299769, "loss": 3.5344, "step": 26260 }, { "epoch": 1.7845495311863024, "grad_norm": 1.4529101848602295, "learning_rate": 0.0007770247316211442, "loss": 3.5234, "step": 26265 }, { "epoch": 1.7848892512569643, "grad_norm": 1.1006510257720947, "learning_rate": 0.0007769822666123114, "loss": 3.695, "step": 26270 }, { "epoch": 1.785228971327626, "grad_norm": 1.418146014213562, "learning_rate": 0.0007769398016034787, "loss": 3.6241, "step": 26275 }, { "epoch": 1.7855686913982878, "grad_norm": 1.0899804830551147, "learning_rate": 0.0007768973365946461, "loss": 3.7556, "step": 26280 }, { "epoch": 1.7859084114689496, "grad_norm": 1.2053685188293457, "learning_rate": 0.0007768548715858134, "loss": 3.6736, "step": 26285 }, { "epoch": 1.7862481315396113, "grad_norm": 1.15653395652771, "learning_rate": 0.0007768124065769806, "loss": 3.6702, "step": 26290 }, { "epoch": 1.7865878516102731, "grad_norm": 1.3939517736434937, "learning_rate": 0.0007767699415681478, "loss": 3.6579, "step": 26295 }, { "epoch": 1.786927571680935, "grad_norm": 1.0180143117904663, "learning_rate": 0.0007767274765593152, "loss": 3.4505, "step": 26300 }, { "epoch": 1.7872672917515966, "grad_norm": 1.0754259824752808, "learning_rate": 0.0007766850115504824, "loss": 3.538, "step": 26305 }, { "epoch": 1.7876070118222585, "grad_norm": 1.2773793935775757, "learning_rate": 0.0007766425465416496, "loss": 3.6508, "step": 26310 }, { "epoch": 1.7879467318929203, "grad_norm": 1.2478572130203247, "learning_rate": 0.0007766000815328171, "loss": 3.6375, "step": 26315 }, { "epoch": 1.788286451963582, "grad_norm": 1.0374144315719604, "learning_rate": 0.0007765576165239843, "loss": 3.4386, "step": 26320 }, { "epoch": 1.7886261720342438, "grad_norm": 1.2283674478530884, "learning_rate": 0.0007765151515151515, "loss": 3.4125, "step": 26325 }, { "epoch": 1.7889658921049056, "grad_norm": 1.3726797103881836, "learning_rate": 0.0007764726865063189, "loss": 3.643, "step": 26330 }, { "epoch": 1.7893056121755673, "grad_norm": 1.1556916236877441, "learning_rate": 0.0007764302214974861, "loss": 3.4484, "step": 26335 }, { "epoch": 1.7896453322462291, "grad_norm": 1.2869371175765991, "learning_rate": 0.0007763877564886533, "loss": 3.6061, "step": 26340 }, { "epoch": 1.789985052316891, "grad_norm": 1.1191364526748657, "learning_rate": 0.0007763452914798206, "loss": 3.6158, "step": 26345 }, { "epoch": 1.7903247723875526, "grad_norm": 1.2826498746871948, "learning_rate": 0.000776302826470988, "loss": 3.9399, "step": 26350 }, { "epoch": 1.7906644924582145, "grad_norm": 1.1590174436569214, "learning_rate": 0.0007762603614621552, "loss": 3.8255, "step": 26355 }, { "epoch": 1.7910042125288763, "grad_norm": 1.1765999794006348, "learning_rate": 0.0007762178964533225, "loss": 3.6192, "step": 26360 }, { "epoch": 1.791343932599538, "grad_norm": 1.3655740022659302, "learning_rate": 0.0007761754314444898, "loss": 3.755, "step": 26365 }, { "epoch": 1.7916836526701998, "grad_norm": 1.3812618255615234, "learning_rate": 0.000776132966435657, "loss": 3.6214, "step": 26370 }, { "epoch": 1.7920233727408617, "grad_norm": 1.275230050086975, "learning_rate": 0.0007760905014268243, "loss": 3.6647, "step": 26375 }, { "epoch": 1.7923630928115233, "grad_norm": 1.1430798768997192, "learning_rate": 0.0007760480364179916, "loss": 3.8006, "step": 26380 }, { "epoch": 1.792702812882185, "grad_norm": 1.1278681755065918, "learning_rate": 0.0007760055714091589, "loss": 3.6778, "step": 26385 }, { "epoch": 1.793042532952847, "grad_norm": 1.4852190017700195, "learning_rate": 0.0007759631064003262, "loss": 3.6946, "step": 26390 }, { "epoch": 1.7933822530235086, "grad_norm": 1.0282580852508545, "learning_rate": 0.0007759206413914934, "loss": 3.842, "step": 26395 }, { "epoch": 1.7937219730941703, "grad_norm": 1.3261173963546753, "learning_rate": 0.0007758781763826607, "loss": 3.4971, "step": 26400 }, { "epoch": 1.7940616931648323, "grad_norm": 1.0928617715835571, "learning_rate": 0.000775835711373828, "loss": 3.5802, "step": 26405 }, { "epoch": 1.794401413235494, "grad_norm": 1.281179428100586, "learning_rate": 0.0007757932463649952, "loss": 3.5419, "step": 26410 }, { "epoch": 1.7947411333061556, "grad_norm": 0.9427726864814758, "learning_rate": 0.0007757507813561625, "loss": 3.5578, "step": 26415 }, { "epoch": 1.7950808533768177, "grad_norm": 1.1384615898132324, "learning_rate": 0.0007757083163473299, "loss": 3.4641, "step": 26420 }, { "epoch": 1.7954205734474793, "grad_norm": 1.3259682655334473, "learning_rate": 0.0007756658513384971, "loss": 3.404, "step": 26425 }, { "epoch": 1.795760293518141, "grad_norm": 1.3309472799301147, "learning_rate": 0.0007756233863296644, "loss": 3.5414, "step": 26430 }, { "epoch": 1.7961000135888028, "grad_norm": 1.6518372297286987, "learning_rate": 0.0007755809213208317, "loss": 3.3731, "step": 26435 }, { "epoch": 1.7964397336594646, "grad_norm": 1.4438085556030273, "learning_rate": 0.0007755384563119989, "loss": 3.7318, "step": 26440 }, { "epoch": 1.7967794537301263, "grad_norm": 1.1063047647476196, "learning_rate": 0.0007754959913031661, "loss": 3.4114, "step": 26445 }, { "epoch": 1.7971191738007881, "grad_norm": 1.2617803812026978, "learning_rate": 0.0007754535262943335, "loss": 3.1746, "step": 26450 }, { "epoch": 1.79745889387145, "grad_norm": 1.480115532875061, "learning_rate": 0.0007754110612855008, "loss": 3.6149, "step": 26455 }, { "epoch": 1.7977986139421116, "grad_norm": 1.405578374862671, "learning_rate": 0.000775368596276668, "loss": 3.7322, "step": 26460 }, { "epoch": 1.7981383340127735, "grad_norm": 1.0279381275177002, "learning_rate": 0.0007753261312678354, "loss": 3.4676, "step": 26465 }, { "epoch": 1.7984780540834353, "grad_norm": 2.724930763244629, "learning_rate": 0.0007752836662590026, "loss": 3.6518, "step": 26470 }, { "epoch": 1.798817774154097, "grad_norm": 1.3554019927978516, "learning_rate": 0.0007752412012501698, "loss": 3.5962, "step": 26475 }, { "epoch": 1.7991574942247588, "grad_norm": 1.1674426794052124, "learning_rate": 0.0007751987362413372, "loss": 3.6448, "step": 26480 }, { "epoch": 1.7994972142954206, "grad_norm": 1.4186909198760986, "learning_rate": 0.0007751562712325044, "loss": 3.5019, "step": 26485 }, { "epoch": 1.7998369343660823, "grad_norm": 1.3935116529464722, "learning_rate": 0.0007751138062236717, "loss": 3.5456, "step": 26490 }, { "epoch": 1.8001766544367441, "grad_norm": 1.0997029542922974, "learning_rate": 0.000775071341214839, "loss": 3.6186, "step": 26495 }, { "epoch": 1.800516374507406, "grad_norm": 1.150223970413208, "learning_rate": 0.0007750288762060063, "loss": 3.5826, "step": 26500 }, { "epoch": 1.8008560945780676, "grad_norm": 1.3869588375091553, "learning_rate": 0.0007749864111971735, "loss": 3.4827, "step": 26505 }, { "epoch": 1.8011958146487295, "grad_norm": 1.1224267482757568, "learning_rate": 0.0007749439461883408, "loss": 3.9106, "step": 26510 }, { "epoch": 1.8015355347193913, "grad_norm": 1.1990867853164673, "learning_rate": 0.0007749014811795081, "loss": 3.6742, "step": 26515 }, { "epoch": 1.801875254790053, "grad_norm": 1.0589615106582642, "learning_rate": 0.0007748590161706753, "loss": 3.2563, "step": 26520 }, { "epoch": 1.8022149748607148, "grad_norm": 1.323469877243042, "learning_rate": 0.0007748165511618427, "loss": 3.7469, "step": 26525 }, { "epoch": 1.8025546949313767, "grad_norm": 1.0123714208602905, "learning_rate": 0.00077477408615301, "loss": 3.4641, "step": 26530 }, { "epoch": 1.8028944150020383, "grad_norm": 1.4727416038513184, "learning_rate": 0.0007747316211441772, "loss": 3.7721, "step": 26535 }, { "epoch": 1.8032341350727001, "grad_norm": 1.3905014991760254, "learning_rate": 0.0007746891561353445, "loss": 3.6812, "step": 26540 }, { "epoch": 1.803573855143362, "grad_norm": 1.1692825555801392, "learning_rate": 0.0007746466911265117, "loss": 3.6149, "step": 26545 }, { "epoch": 1.8039135752140236, "grad_norm": 1.4644663333892822, "learning_rate": 0.000774604226117679, "loss": 3.4503, "step": 26550 }, { "epoch": 1.8042532952846853, "grad_norm": 1.3144198656082153, "learning_rate": 0.0007745617611088463, "loss": 3.866, "step": 26555 }, { "epoch": 1.8045930153553473, "grad_norm": 1.4122105836868286, "learning_rate": 0.0007745192961000136, "loss": 3.7661, "step": 26560 }, { "epoch": 1.804932735426009, "grad_norm": 1.0663865804672241, "learning_rate": 0.0007744768310911809, "loss": 3.5065, "step": 26565 }, { "epoch": 1.8052724554966706, "grad_norm": 1.158074140548706, "learning_rate": 0.0007744343660823482, "loss": 3.6291, "step": 26570 }, { "epoch": 1.8056121755673327, "grad_norm": 1.34388267993927, "learning_rate": 0.0007743919010735154, "loss": 3.414, "step": 26575 }, { "epoch": 1.8059518956379943, "grad_norm": 1.2835031747817993, "learning_rate": 0.0007743494360646826, "loss": 3.4825, "step": 26580 }, { "epoch": 1.806291615708656, "grad_norm": 1.1468055248260498, "learning_rate": 0.00077430697105585, "loss": 3.516, "step": 26585 }, { "epoch": 1.806631335779318, "grad_norm": 1.3001049757003784, "learning_rate": 0.0007742645060470172, "loss": 3.6462, "step": 26590 }, { "epoch": 1.8069710558499796, "grad_norm": 1.2155681848526, "learning_rate": 0.0007742220410381845, "loss": 3.6355, "step": 26595 }, { "epoch": 1.8073107759206413, "grad_norm": 1.3788566589355469, "learning_rate": 0.0007741795760293519, "loss": 3.7474, "step": 26600 }, { "epoch": 1.8076504959913031, "grad_norm": 1.2060692310333252, "learning_rate": 0.0007741371110205191, "loss": 3.5128, "step": 26605 }, { "epoch": 1.807990216061965, "grad_norm": 1.1131447553634644, "learning_rate": 0.0007740946460116863, "loss": 3.5621, "step": 26610 }, { "epoch": 1.8083299361326266, "grad_norm": 1.0796451568603516, "learning_rate": 0.0007740521810028537, "loss": 3.5277, "step": 26615 }, { "epoch": 1.8086696562032885, "grad_norm": 1.0928261280059814, "learning_rate": 0.0007740097159940209, "loss": 3.6396, "step": 26620 }, { "epoch": 1.8090093762739503, "grad_norm": 1.2031986713409424, "learning_rate": 0.0007739672509851882, "loss": 3.4824, "step": 26625 }, { "epoch": 1.809349096344612, "grad_norm": 1.0510977506637573, "learning_rate": 0.0007739247859763556, "loss": 3.6493, "step": 26630 }, { "epoch": 1.8096888164152738, "grad_norm": 1.2147964239120483, "learning_rate": 0.0007738823209675228, "loss": 3.9261, "step": 26635 }, { "epoch": 1.8100285364859356, "grad_norm": 1.33250892162323, "learning_rate": 0.0007738398559586901, "loss": 3.5894, "step": 26640 }, { "epoch": 1.8103682565565973, "grad_norm": 1.0093915462493896, "learning_rate": 0.0007737973909498573, "loss": 3.6312, "step": 26645 }, { "epoch": 1.8107079766272591, "grad_norm": 1.0107886791229248, "learning_rate": 0.0007737549259410246, "loss": 3.6739, "step": 26650 }, { "epoch": 1.811047696697921, "grad_norm": 1.2335294485092163, "learning_rate": 0.0007737124609321919, "loss": 3.6063, "step": 26655 }, { "epoch": 1.8113874167685826, "grad_norm": 1.4043267965316772, "learning_rate": 0.0007736699959233591, "loss": 3.5478, "step": 26660 }, { "epoch": 1.8117271368392445, "grad_norm": 1.0838686227798462, "learning_rate": 0.0007736275309145265, "loss": 3.5259, "step": 26665 }, { "epoch": 1.8120668569099063, "grad_norm": 1.3999860286712646, "learning_rate": 0.0007735850659056938, "loss": 3.5164, "step": 26670 }, { "epoch": 1.812406576980568, "grad_norm": 1.3508538007736206, "learning_rate": 0.000773542600896861, "loss": 3.5443, "step": 26675 }, { "epoch": 1.8127462970512298, "grad_norm": 1.349291443824768, "learning_rate": 0.0007735001358880283, "loss": 3.3426, "step": 26680 }, { "epoch": 1.8130860171218917, "grad_norm": 1.1769832372665405, "learning_rate": 0.0007734576708791956, "loss": 3.4516, "step": 26685 }, { "epoch": 1.8134257371925533, "grad_norm": 1.517034649848938, "learning_rate": 0.0007734152058703628, "loss": 3.5592, "step": 26690 }, { "epoch": 1.8137654572632151, "grad_norm": 1.3815959692001343, "learning_rate": 0.00077337274086153, "loss": 4.0047, "step": 26695 }, { "epoch": 1.814105177333877, "grad_norm": 0.973362147808075, "learning_rate": 0.0007733302758526975, "loss": 3.6074, "step": 26700 }, { "epoch": 1.8144448974045386, "grad_norm": 1.2765417098999023, "learning_rate": 0.0007732878108438647, "loss": 3.4041, "step": 26705 }, { "epoch": 1.8147846174752005, "grad_norm": 1.139986515045166, "learning_rate": 0.0007732453458350319, "loss": 3.8087, "step": 26710 }, { "epoch": 1.8151243375458623, "grad_norm": 1.9075626134872437, "learning_rate": 0.0007732028808261993, "loss": 3.5565, "step": 26715 }, { "epoch": 1.815464057616524, "grad_norm": 1.4095131158828735, "learning_rate": 0.0007731604158173665, "loss": 3.532, "step": 26720 }, { "epoch": 1.8158037776871856, "grad_norm": 1.1412431001663208, "learning_rate": 0.0007731179508085337, "loss": 3.6157, "step": 26725 }, { "epoch": 1.8161434977578477, "grad_norm": 1.4008877277374268, "learning_rate": 0.0007730754857997012, "loss": 3.7608, "step": 26730 }, { "epoch": 1.8164832178285093, "grad_norm": 1.3580085039138794, "learning_rate": 0.0007730330207908684, "loss": 3.8099, "step": 26735 }, { "epoch": 1.816822937899171, "grad_norm": 1.190251111984253, "learning_rate": 0.0007729905557820356, "loss": 3.2356, "step": 26740 }, { "epoch": 1.817162657969833, "grad_norm": 0.9673405289649963, "learning_rate": 0.000772948090773203, "loss": 3.7258, "step": 26745 }, { "epoch": 1.8175023780404946, "grad_norm": 1.881677269935608, "learning_rate": 0.0007729056257643702, "loss": 3.8225, "step": 26750 }, { "epoch": 1.8178420981111563, "grad_norm": 0.9549523591995239, "learning_rate": 0.0007728631607555374, "loss": 3.5619, "step": 26755 }, { "epoch": 1.8181818181818183, "grad_norm": 1.8933168649673462, "learning_rate": 0.0007728206957467047, "loss": 3.5798, "step": 26760 }, { "epoch": 1.81852153825248, "grad_norm": 1.2580595016479492, "learning_rate": 0.0007727782307378721, "loss": 3.4213, "step": 26765 }, { "epoch": 1.8188612583231416, "grad_norm": 1.2493294477462769, "learning_rate": 0.0007727357657290393, "loss": 3.6746, "step": 26770 }, { "epoch": 1.8192009783938035, "grad_norm": 1.54487943649292, "learning_rate": 0.0007726933007202066, "loss": 3.7318, "step": 26775 }, { "epoch": 1.8195406984644653, "grad_norm": 1.5581835508346558, "learning_rate": 0.0007726508357113739, "loss": 3.3389, "step": 26780 }, { "epoch": 1.819880418535127, "grad_norm": 1.3381376266479492, "learning_rate": 0.0007726083707025411, "loss": 3.5491, "step": 26785 }, { "epoch": 1.8202201386057888, "grad_norm": 1.4877840280532837, "learning_rate": 0.0007725659056937084, "loss": 3.4857, "step": 26790 }, { "epoch": 1.8205598586764506, "grad_norm": 1.378201961517334, "learning_rate": 0.0007725234406848756, "loss": 3.4833, "step": 26795 }, { "epoch": 1.8208995787471123, "grad_norm": 1.2545347213745117, "learning_rate": 0.000772480975676043, "loss": 3.6413, "step": 26800 }, { "epoch": 1.8212392988177741, "grad_norm": 1.3558136224746704, "learning_rate": 0.0007724385106672103, "loss": 3.8154, "step": 26805 }, { "epoch": 1.821579018888436, "grad_norm": 1.697849154472351, "learning_rate": 0.0007723960456583775, "loss": 3.5975, "step": 26810 }, { "epoch": 1.8219187389590976, "grad_norm": 1.3237621784210205, "learning_rate": 0.0007723535806495448, "loss": 3.5537, "step": 26815 }, { "epoch": 1.8222584590297595, "grad_norm": 1.1802462339401245, "learning_rate": 0.0007723111156407121, "loss": 3.4618, "step": 26820 }, { "epoch": 1.8225981791004213, "grad_norm": 1.3712077140808105, "learning_rate": 0.0007722686506318793, "loss": 3.456, "step": 26825 }, { "epoch": 1.822937899171083, "grad_norm": 1.272110104560852, "learning_rate": 0.0007722261856230465, "loss": 3.5678, "step": 26830 }, { "epoch": 1.8232776192417448, "grad_norm": 1.1120535135269165, "learning_rate": 0.000772183720614214, "loss": 3.5511, "step": 26835 }, { "epoch": 1.8236173393124067, "grad_norm": 1.2827774286270142, "learning_rate": 0.0007721412556053812, "loss": 3.6286, "step": 26840 }, { "epoch": 1.8239570593830683, "grad_norm": 1.4040043354034424, "learning_rate": 0.0007720987905965484, "loss": 3.6071, "step": 26845 }, { "epoch": 1.8242967794537301, "grad_norm": 1.2527680397033691, "learning_rate": 0.0007720563255877158, "loss": 3.7323, "step": 26850 }, { "epoch": 1.824636499524392, "grad_norm": 1.1675227880477905, "learning_rate": 0.000772013860578883, "loss": 3.6726, "step": 26855 }, { "epoch": 1.8249762195950536, "grad_norm": 1.4241865873336792, "learning_rate": 0.0007719713955700502, "loss": 3.4443, "step": 26860 }, { "epoch": 1.8253159396657155, "grad_norm": 1.0836495161056519, "learning_rate": 0.0007719289305612176, "loss": 3.7152, "step": 26865 }, { "epoch": 1.8256556597363773, "grad_norm": 1.4775186777114868, "learning_rate": 0.0007718864655523849, "loss": 3.674, "step": 26870 }, { "epoch": 1.825995379807039, "grad_norm": 1.6649866104125977, "learning_rate": 0.0007718440005435521, "loss": 4.0349, "step": 26875 }, { "epoch": 1.8263350998777008, "grad_norm": 1.3500049114227295, "learning_rate": 0.0007718015355347195, "loss": 3.6448, "step": 26880 }, { "epoch": 1.8266748199483627, "grad_norm": 1.1183327436447144, "learning_rate": 0.0007717590705258867, "loss": 3.5207, "step": 26885 }, { "epoch": 1.8270145400190243, "grad_norm": 1.5510685443878174, "learning_rate": 0.0007717166055170539, "loss": 3.4986, "step": 26890 }, { "epoch": 1.827354260089686, "grad_norm": 1.137037992477417, "learning_rate": 0.0007716741405082212, "loss": 3.5752, "step": 26895 }, { "epoch": 1.827693980160348, "grad_norm": 1.5449219942092896, "learning_rate": 0.0007716316754993885, "loss": 3.4508, "step": 26900 }, { "epoch": 1.8280337002310096, "grad_norm": 1.5704814195632935, "learning_rate": 0.0007715892104905558, "loss": 3.5695, "step": 26905 }, { "epoch": 1.8283734203016713, "grad_norm": 1.162371277809143, "learning_rate": 0.0007715467454817231, "loss": 3.5361, "step": 26910 }, { "epoch": 1.8287131403723333, "grad_norm": 1.394514799118042, "learning_rate": 0.0007715042804728904, "loss": 3.7583, "step": 26915 }, { "epoch": 1.829052860442995, "grad_norm": 1.2184228897094727, "learning_rate": 0.0007714618154640576, "loss": 3.4939, "step": 26920 }, { "epoch": 1.8293925805136566, "grad_norm": 1.8947824239730835, "learning_rate": 0.0007714193504552249, "loss": 3.4796, "step": 26925 }, { "epoch": 1.8297323005843187, "grad_norm": 1.2218499183654785, "learning_rate": 0.0007713768854463921, "loss": 3.6554, "step": 26930 }, { "epoch": 1.8300720206549803, "grad_norm": 0.924363374710083, "learning_rate": 0.0007713344204375594, "loss": 3.6771, "step": 26935 }, { "epoch": 1.830411740725642, "grad_norm": 1.2417479753494263, "learning_rate": 0.0007712919554287268, "loss": 3.5291, "step": 26940 }, { "epoch": 1.8307514607963038, "grad_norm": 1.1614644527435303, "learning_rate": 0.000771249490419894, "loss": 3.5057, "step": 26945 }, { "epoch": 1.8310911808669657, "grad_norm": 1.250901222229004, "learning_rate": 0.0007712070254110613, "loss": 3.3829, "step": 26950 }, { "epoch": 1.8314309009376273, "grad_norm": 1.5804471969604492, "learning_rate": 0.0007711645604022286, "loss": 3.5097, "step": 26955 }, { "epoch": 1.8317706210082891, "grad_norm": 1.02223801612854, "learning_rate": 0.0007711220953933958, "loss": 3.5951, "step": 26960 }, { "epoch": 1.832110341078951, "grad_norm": 1.0468556880950928, "learning_rate": 0.0007710796303845632, "loss": 3.3568, "step": 26965 }, { "epoch": 1.8324500611496126, "grad_norm": 1.4633285999298096, "learning_rate": 0.0007710371653757304, "loss": 3.5914, "step": 26970 }, { "epoch": 1.8327897812202745, "grad_norm": 1.4375286102294922, "learning_rate": 0.0007709947003668977, "loss": 3.8943, "step": 26975 }, { "epoch": 1.8331295012909363, "grad_norm": 1.0483629703521729, "learning_rate": 0.0007709522353580651, "loss": 3.6085, "step": 26980 }, { "epoch": 1.833469221361598, "grad_norm": 1.204525351524353, "learning_rate": 0.0007709097703492323, "loss": 3.4691, "step": 26985 }, { "epoch": 1.8338089414322598, "grad_norm": 1.2572399377822876, "learning_rate": 0.0007708673053403995, "loss": 3.5297, "step": 26990 }, { "epoch": 1.8341486615029217, "grad_norm": 1.4919087886810303, "learning_rate": 0.0007708248403315668, "loss": 3.3024, "step": 26995 }, { "epoch": 1.8344883815735833, "grad_norm": 1.255413293838501, "learning_rate": 0.0007707823753227341, "loss": 3.6865, "step": 27000 }, { "epoch": 1.8348281016442451, "grad_norm": 1.5886569023132324, "learning_rate": 0.0007707399103139013, "loss": 3.3766, "step": 27005 }, { "epoch": 1.835167821714907, "grad_norm": 1.6833311319351196, "learning_rate": 0.0007706974453050687, "loss": 3.5297, "step": 27010 }, { "epoch": 1.8355075417855686, "grad_norm": 1.133723497390747, "learning_rate": 0.000770654980296236, "loss": 3.5493, "step": 27015 }, { "epoch": 1.8358472618562305, "grad_norm": 1.2983289957046509, "learning_rate": 0.0007706125152874032, "loss": 3.6294, "step": 27020 }, { "epoch": 1.8361869819268923, "grad_norm": 1.4969980716705322, "learning_rate": 0.0007705700502785705, "loss": 3.6545, "step": 27025 }, { "epoch": 1.836526701997554, "grad_norm": 1.7400565147399902, "learning_rate": 0.0007705275852697377, "loss": 3.4401, "step": 27030 }, { "epoch": 1.8368664220682158, "grad_norm": 1.0775835514068604, "learning_rate": 0.000770485120260905, "loss": 3.6397, "step": 27035 }, { "epoch": 1.8372061421388777, "grad_norm": 1.1618027687072754, "learning_rate": 0.0007704426552520723, "loss": 3.2302, "step": 27040 }, { "epoch": 1.8375458622095393, "grad_norm": 1.4577332735061646, "learning_rate": 0.0007704001902432396, "loss": 3.4974, "step": 27045 }, { "epoch": 1.8378855822802012, "grad_norm": 1.1136289834976196, "learning_rate": 0.0007703577252344069, "loss": 3.6077, "step": 27050 }, { "epoch": 1.838225302350863, "grad_norm": 1.2922943830490112, "learning_rate": 0.0007703152602255742, "loss": 3.534, "step": 27055 }, { "epoch": 1.8385650224215246, "grad_norm": 1.1496890783309937, "learning_rate": 0.0007702727952167414, "loss": 3.4057, "step": 27060 }, { "epoch": 1.8389047424921863, "grad_norm": 1.4107415676116943, "learning_rate": 0.0007702303302079087, "loss": 3.6087, "step": 27065 }, { "epoch": 1.8392444625628483, "grad_norm": 1.237190842628479, "learning_rate": 0.000770187865199076, "loss": 3.598, "step": 27070 }, { "epoch": 1.83958418263351, "grad_norm": 1.3535922765731812, "learning_rate": 0.0007701454001902432, "loss": 3.6607, "step": 27075 }, { "epoch": 1.8399239027041716, "grad_norm": 1.3109748363494873, "learning_rate": 0.0007701029351814105, "loss": 3.5985, "step": 27080 }, { "epoch": 1.8402636227748337, "grad_norm": 1.6493655443191528, "learning_rate": 0.0007700604701725779, "loss": 3.5174, "step": 27085 }, { "epoch": 1.8406033428454953, "grad_norm": 1.4688421487808228, "learning_rate": 0.0007700180051637451, "loss": 3.6567, "step": 27090 }, { "epoch": 1.840943062916157, "grad_norm": 1.2266325950622559, "learning_rate": 0.0007699755401549123, "loss": 3.379, "step": 27095 }, { "epoch": 1.841282782986819, "grad_norm": 1.4901938438415527, "learning_rate": 0.0007699330751460797, "loss": 3.5859, "step": 27100 }, { "epoch": 1.8416225030574807, "grad_norm": 1.2160972356796265, "learning_rate": 0.0007698906101372469, "loss": 3.4905, "step": 27105 }, { "epoch": 1.8419622231281423, "grad_norm": 1.2348300218582153, "learning_rate": 0.0007698481451284141, "loss": 3.7408, "step": 27110 }, { "epoch": 1.8423019431988041, "grad_norm": 1.26332688331604, "learning_rate": 0.0007698056801195816, "loss": 3.3826, "step": 27115 }, { "epoch": 1.842641663269466, "grad_norm": 1.182227611541748, "learning_rate": 0.0007697632151107488, "loss": 3.7282, "step": 27120 }, { "epoch": 1.8429813833401276, "grad_norm": 1.4993922710418701, "learning_rate": 0.000769720750101916, "loss": 3.7188, "step": 27125 }, { "epoch": 1.8433211034107895, "grad_norm": 1.3588931560516357, "learning_rate": 0.0007696782850930833, "loss": 3.6877, "step": 27130 }, { "epoch": 1.8436608234814513, "grad_norm": 1.1503851413726807, "learning_rate": 0.0007696358200842506, "loss": 3.6463, "step": 27135 }, { "epoch": 1.844000543552113, "grad_norm": 1.0833377838134766, "learning_rate": 0.0007695933550754178, "loss": 3.6608, "step": 27140 }, { "epoch": 1.8443402636227748, "grad_norm": 0.9342405200004578, "learning_rate": 0.0007695508900665851, "loss": 3.4137, "step": 27145 }, { "epoch": 1.8446799836934367, "grad_norm": 1.5317381620407104, "learning_rate": 0.0007695084250577525, "loss": 3.6988, "step": 27150 }, { "epoch": 1.8450197037640983, "grad_norm": 1.176310420036316, "learning_rate": 0.0007694659600489197, "loss": 3.6966, "step": 27155 }, { "epoch": 1.8453594238347601, "grad_norm": 1.1411482095718384, "learning_rate": 0.000769423495040087, "loss": 3.6586, "step": 27160 }, { "epoch": 1.845699143905422, "grad_norm": 1.579939603805542, "learning_rate": 0.0007693810300312543, "loss": 3.2637, "step": 27165 }, { "epoch": 1.8460388639760836, "grad_norm": 1.38289475440979, "learning_rate": 0.0007693385650224215, "loss": 3.5475, "step": 27170 }, { "epoch": 1.8463785840467455, "grad_norm": 1.3729966878890991, "learning_rate": 0.0007692961000135888, "loss": 3.7989, "step": 27175 }, { "epoch": 1.8467183041174073, "grad_norm": 1.6177645921707153, "learning_rate": 0.000769253635004756, "loss": 3.8311, "step": 27180 }, { "epoch": 1.847058024188069, "grad_norm": 1.5917199850082397, "learning_rate": 0.0007692111699959234, "loss": 3.6863, "step": 27185 }, { "epoch": 1.8473977442587308, "grad_norm": 1.2784544229507446, "learning_rate": 0.0007691687049870907, "loss": 3.9126, "step": 27190 }, { "epoch": 1.8477374643293927, "grad_norm": 1.2121577262878418, "learning_rate": 0.0007691262399782579, "loss": 3.7739, "step": 27195 }, { "epoch": 1.8480771844000543, "grad_norm": 1.0594725608825684, "learning_rate": 0.0007690837749694252, "loss": 3.5724, "step": 27200 }, { "epoch": 1.8484169044707162, "grad_norm": 1.065691351890564, "learning_rate": 0.0007690413099605925, "loss": 3.9073, "step": 27205 }, { "epoch": 1.848756624541378, "grad_norm": 1.2114990949630737, "learning_rate": 0.0007689988449517597, "loss": 3.5177, "step": 27210 }, { "epoch": 1.8490963446120396, "grad_norm": 0.985390841960907, "learning_rate": 0.000768956379942927, "loss": 3.3339, "step": 27215 }, { "epoch": 1.8494360646827015, "grad_norm": 1.283360242843628, "learning_rate": 0.0007689139149340944, "loss": 3.811, "step": 27220 }, { "epoch": 1.8497757847533634, "grad_norm": 1.4688066244125366, "learning_rate": 0.0007688714499252616, "loss": 3.3896, "step": 27225 }, { "epoch": 1.850115504824025, "grad_norm": 1.4998786449432373, "learning_rate": 0.0007688289849164288, "loss": 3.3983, "step": 27230 }, { "epoch": 1.8504552248946866, "grad_norm": 1.041913390159607, "learning_rate": 0.0007687865199075962, "loss": 3.6366, "step": 27235 }, { "epoch": 1.8507949449653487, "grad_norm": 1.3759651184082031, "learning_rate": 0.0007687440548987634, "loss": 3.6009, "step": 27240 }, { "epoch": 1.8511346650360103, "grad_norm": 1.5749119520187378, "learning_rate": 0.0007687015898899306, "loss": 3.6015, "step": 27245 }, { "epoch": 1.851474385106672, "grad_norm": 2.4165682792663574, "learning_rate": 0.000768659124881098, "loss": 3.4996, "step": 27250 }, { "epoch": 1.851814105177334, "grad_norm": 1.3084397315979004, "learning_rate": 0.0007686166598722653, "loss": 3.4754, "step": 27255 }, { "epoch": 1.8521538252479957, "grad_norm": 1.1383206844329834, "learning_rate": 0.0007685741948634325, "loss": 3.6037, "step": 27260 }, { "epoch": 1.8524935453186573, "grad_norm": 1.332067608833313, "learning_rate": 0.0007685317298545999, "loss": 3.6905, "step": 27265 }, { "epoch": 1.8528332653893194, "grad_norm": 1.5110269784927368, "learning_rate": 0.0007684977578475336, "loss": 3.6513, "step": 27270 }, { "epoch": 1.853172985459981, "grad_norm": 1.4839502573013306, "learning_rate": 0.000768455292838701, "loss": 3.8773, "step": 27275 }, { "epoch": 1.8535127055306426, "grad_norm": 1.3741859197616577, "learning_rate": 0.0007684128278298682, "loss": 3.6999, "step": 27280 }, { "epoch": 1.8538524256013045, "grad_norm": 1.4411604404449463, "learning_rate": 0.0007683703628210355, "loss": 3.5854, "step": 27285 }, { "epoch": 1.8541921456719663, "grad_norm": 0.9121926426887512, "learning_rate": 0.0007683278978122028, "loss": 3.6629, "step": 27290 }, { "epoch": 1.854531865742628, "grad_norm": 1.7461163997650146, "learning_rate": 0.00076828543280337, "loss": 3.4898, "step": 27295 }, { "epoch": 1.8548715858132898, "grad_norm": 1.9408386945724487, "learning_rate": 0.0007682429677945373, "loss": 3.6263, "step": 27300 }, { "epoch": 1.8552113058839517, "grad_norm": 1.2830746173858643, "learning_rate": 0.0007682005027857046, "loss": 3.5196, "step": 27305 }, { "epoch": 1.8555510259546133, "grad_norm": 1.589985966682434, "learning_rate": 0.0007681580377768719, "loss": 3.4013, "step": 27310 }, { "epoch": 1.8558907460252752, "grad_norm": 2.2397828102111816, "learning_rate": 0.0007681155727680391, "loss": 3.8341, "step": 27315 }, { "epoch": 1.856230466095937, "grad_norm": 1.262548565864563, "learning_rate": 0.0007680731077592065, "loss": 3.4387, "step": 27320 }, { "epoch": 1.8565701861665986, "grad_norm": 1.195394515991211, "learning_rate": 0.0007680306427503737, "loss": 3.4298, "step": 27325 }, { "epoch": 1.8569099062372605, "grad_norm": 1.3813493251800537, "learning_rate": 0.0007679881777415409, "loss": 3.4139, "step": 27330 }, { "epoch": 1.8572496263079223, "grad_norm": 1.420486569404602, "learning_rate": 0.0007679457127327083, "loss": 3.527, "step": 27335 }, { "epoch": 1.857589346378584, "grad_norm": 1.5514522790908813, "learning_rate": 0.0007679032477238755, "loss": 3.4653, "step": 27340 }, { "epoch": 1.8579290664492458, "grad_norm": 1.5657739639282227, "learning_rate": 0.0007678607827150428, "loss": 3.3949, "step": 27345 }, { "epoch": 1.8582687865199077, "grad_norm": 1.442674994468689, "learning_rate": 0.0007678183177062102, "loss": 3.5228, "step": 27350 }, { "epoch": 1.8586085065905693, "grad_norm": 1.4417861700057983, "learning_rate": 0.0007677758526973774, "loss": 3.5837, "step": 27355 }, { "epoch": 1.8589482266612312, "grad_norm": 1.1437045335769653, "learning_rate": 0.0007677333876885446, "loss": 3.5165, "step": 27360 }, { "epoch": 1.859287946731893, "grad_norm": 1.2851048707962036, "learning_rate": 0.000767690922679712, "loss": 3.5096, "step": 27365 }, { "epoch": 1.8596276668025546, "grad_norm": 1.2702115774154663, "learning_rate": 0.0007676484576708792, "loss": 3.4483, "step": 27370 }, { "epoch": 1.8599673868732165, "grad_norm": 1.8857723474502563, "learning_rate": 0.0007676059926620464, "loss": 3.593, "step": 27375 }, { "epoch": 1.8603071069438784, "grad_norm": 1.2759296894073486, "learning_rate": 0.0007675635276532138, "loss": 3.443, "step": 27380 }, { "epoch": 1.86064682701454, "grad_norm": 1.3786485195159912, "learning_rate": 0.0007675210626443811, "loss": 3.5072, "step": 27385 }, { "epoch": 1.8609865470852018, "grad_norm": 1.4137381315231323, "learning_rate": 0.0007674785976355483, "loss": 3.3283, "step": 27390 }, { "epoch": 1.8613262671558637, "grad_norm": 1.2924138307571411, "learning_rate": 0.0007674361326267156, "loss": 3.6782, "step": 27395 }, { "epoch": 1.8616659872265253, "grad_norm": 1.8150190114974976, "learning_rate": 0.0007673936676178829, "loss": 3.848, "step": 27400 }, { "epoch": 1.862005707297187, "grad_norm": 1.5644659996032715, "learning_rate": 0.0007673512026090501, "loss": 3.399, "step": 27405 }, { "epoch": 1.862345427367849, "grad_norm": 1.3417084217071533, "learning_rate": 0.0007673087376002174, "loss": 3.7889, "step": 27410 }, { "epoch": 1.8626851474385107, "grad_norm": 1.3148434162139893, "learning_rate": 0.0007672662725913847, "loss": 3.8144, "step": 27415 }, { "epoch": 1.8630248675091723, "grad_norm": 1.36183762550354, "learning_rate": 0.000767223807582552, "loss": 3.7569, "step": 27420 }, { "epoch": 1.8633645875798344, "grad_norm": 1.1733989715576172, "learning_rate": 0.0007671813425737193, "loss": 3.6949, "step": 27425 }, { "epoch": 1.863704307650496, "grad_norm": 1.3686009645462036, "learning_rate": 0.0007671388775648865, "loss": 3.7265, "step": 27430 }, { "epoch": 1.8640440277211576, "grad_norm": 1.014806866645813, "learning_rate": 0.0007670964125560538, "loss": 3.6254, "step": 27435 }, { "epoch": 1.8643837477918197, "grad_norm": 1.5851805210113525, "learning_rate": 0.0007670539475472211, "loss": 3.7684, "step": 27440 }, { "epoch": 1.8647234678624813, "grad_norm": 1.411867618560791, "learning_rate": 0.0007670114825383883, "loss": 3.6032, "step": 27445 }, { "epoch": 1.865063187933143, "grad_norm": 1.499597191810608, "learning_rate": 0.0007669690175295557, "loss": 3.3935, "step": 27450 }, { "epoch": 1.8654029080038048, "grad_norm": 1.3957639932632446, "learning_rate": 0.000766926552520723, "loss": 3.3942, "step": 27455 }, { "epoch": 1.8657426280744667, "grad_norm": 1.2168205976486206, "learning_rate": 0.0007668840875118902, "loss": 3.6299, "step": 27460 }, { "epoch": 1.8660823481451283, "grad_norm": 1.7990809679031372, "learning_rate": 0.0007668416225030574, "loss": 3.3675, "step": 27465 }, { "epoch": 1.8664220682157902, "grad_norm": 1.4863035678863525, "learning_rate": 0.0007667991574942248, "loss": 3.5192, "step": 27470 }, { "epoch": 1.866761788286452, "grad_norm": 1.1375138759613037, "learning_rate": 0.000766756692485392, "loss": 3.3592, "step": 27475 }, { "epoch": 1.8671015083571136, "grad_norm": 1.4694032669067383, "learning_rate": 0.0007667142274765592, "loss": 3.6777, "step": 27480 }, { "epoch": 1.8674412284277755, "grad_norm": 1.1544198989868164, "learning_rate": 0.0007666717624677267, "loss": 3.4346, "step": 27485 }, { "epoch": 1.8677809484984373, "grad_norm": 1.5520405769348145, "learning_rate": 0.0007666292974588939, "loss": 3.511, "step": 27490 }, { "epoch": 1.868120668569099, "grad_norm": 2.00580096244812, "learning_rate": 0.0007665868324500611, "loss": 3.8243, "step": 27495 }, { "epoch": 1.8684603886397608, "grad_norm": 1.7116119861602783, "learning_rate": 0.0007665443674412285, "loss": 3.4285, "step": 27500 }, { "epoch": 1.8688001087104227, "grad_norm": 1.2644950151443481, "learning_rate": 0.0007665019024323957, "loss": 3.757, "step": 27505 }, { "epoch": 1.8691398287810843, "grad_norm": 1.7437559366226196, "learning_rate": 0.000766459437423563, "loss": 3.3808, "step": 27510 }, { "epoch": 1.8694795488517462, "grad_norm": 1.2968615293502808, "learning_rate": 0.0007664169724147302, "loss": 3.6408, "step": 27515 }, { "epoch": 1.869819268922408, "grad_norm": 1.214922308921814, "learning_rate": 0.0007663745074058976, "loss": 3.3717, "step": 27520 }, { "epoch": 1.8701589889930696, "grad_norm": 1.8295470476150513, "learning_rate": 0.0007663320423970649, "loss": 3.5204, "step": 27525 }, { "epoch": 1.8704987090637315, "grad_norm": 1.0161670446395874, "learning_rate": 0.0007662895773882321, "loss": 3.725, "step": 27530 }, { "epoch": 1.8708384291343934, "grad_norm": 1.4248592853546143, "learning_rate": 0.0007662471123793994, "loss": 3.2424, "step": 27535 }, { "epoch": 1.871178149205055, "grad_norm": 1.3434702157974243, "learning_rate": 0.0007662046473705667, "loss": 3.8284, "step": 27540 }, { "epoch": 1.8715178692757168, "grad_norm": 1.1371835470199585, "learning_rate": 0.0007661621823617339, "loss": 3.5985, "step": 27545 }, { "epoch": 1.8718575893463787, "grad_norm": 1.1678199768066406, "learning_rate": 0.0007661197173529011, "loss": 3.8405, "step": 27550 }, { "epoch": 1.8721973094170403, "grad_norm": 1.4255861043930054, "learning_rate": 0.0007660772523440686, "loss": 3.5608, "step": 27555 }, { "epoch": 1.8725370294877022, "grad_norm": 1.4313468933105469, "learning_rate": 0.0007660347873352358, "loss": 3.148, "step": 27560 }, { "epoch": 1.872876749558364, "grad_norm": 1.2657195329666138, "learning_rate": 0.000765992322326403, "loss": 3.6488, "step": 27565 }, { "epoch": 1.8732164696290257, "grad_norm": 1.1884350776672363, "learning_rate": 0.0007659498573175704, "loss": 3.4579, "step": 27570 }, { "epoch": 1.8735561896996873, "grad_norm": 1.379087209701538, "learning_rate": 0.0007659073923087376, "loss": 3.7333, "step": 27575 }, { "epoch": 1.8738959097703494, "grad_norm": 1.641357421875, "learning_rate": 0.0007658649272999048, "loss": 3.7159, "step": 27580 }, { "epoch": 1.874235629841011, "grad_norm": 1.167484998703003, "learning_rate": 0.0007658224622910723, "loss": 3.5825, "step": 27585 }, { "epoch": 1.8745753499116726, "grad_norm": 0.9785139560699463, "learning_rate": 0.0007657799972822395, "loss": 3.4396, "step": 27590 }, { "epoch": 1.8749150699823347, "grad_norm": 1.3817058801651, "learning_rate": 0.0007657375322734067, "loss": 3.5127, "step": 27595 }, { "epoch": 1.8752547900529963, "grad_norm": 1.4198931455612183, "learning_rate": 0.0007656950672645741, "loss": 3.4342, "step": 27600 }, { "epoch": 1.875594510123658, "grad_norm": 1.637352705001831, "learning_rate": 0.0007656526022557413, "loss": 3.7706, "step": 27605 }, { "epoch": 1.87593423019432, "grad_norm": 1.2232520580291748, "learning_rate": 0.0007656101372469085, "loss": 3.4756, "step": 27610 }, { "epoch": 1.8762739502649817, "grad_norm": 1.189229130744934, "learning_rate": 0.0007655676722380758, "loss": 3.7497, "step": 27615 }, { "epoch": 1.8766136703356433, "grad_norm": 1.3521384000778198, "learning_rate": 0.0007655252072292432, "loss": 3.5918, "step": 27620 }, { "epoch": 1.8769533904063052, "grad_norm": 1.396863341331482, "learning_rate": 0.0007654827422204104, "loss": 3.7336, "step": 27625 }, { "epoch": 1.877293110476967, "grad_norm": 2.5177719593048096, "learning_rate": 0.0007654402772115777, "loss": 3.6504, "step": 27630 }, { "epoch": 1.8776328305476286, "grad_norm": 1.2737030982971191, "learning_rate": 0.000765397812202745, "loss": 3.2363, "step": 27635 }, { "epoch": 1.8779725506182905, "grad_norm": 1.0794095993041992, "learning_rate": 0.0007653553471939122, "loss": 3.6427, "step": 27640 }, { "epoch": 1.8783122706889523, "grad_norm": 1.2565425634384155, "learning_rate": 0.0007653128821850795, "loss": 3.3084, "step": 27645 }, { "epoch": 1.878651990759614, "grad_norm": 1.6568268537521362, "learning_rate": 0.0007652704171762468, "loss": 3.4399, "step": 27650 }, { "epoch": 1.8789917108302758, "grad_norm": 1.101045846939087, "learning_rate": 0.0007652279521674141, "loss": 3.6016, "step": 27655 }, { "epoch": 1.8793314309009377, "grad_norm": 1.153241515159607, "learning_rate": 0.0007651854871585814, "loss": 3.6524, "step": 27660 }, { "epoch": 1.8796711509715993, "grad_norm": 1.689009189605713, "learning_rate": 0.0007651430221497486, "loss": 3.6763, "step": 27665 }, { "epoch": 1.8800108710422612, "grad_norm": 1.2642607688903809, "learning_rate": 0.0007651005571409159, "loss": 3.5562, "step": 27670 }, { "epoch": 1.880350591112923, "grad_norm": 1.388503074645996, "learning_rate": 0.0007650580921320832, "loss": 3.4536, "step": 27675 }, { "epoch": 1.8806903111835847, "grad_norm": 1.3025381565093994, "learning_rate": 0.0007650156271232504, "loss": 3.7336, "step": 27680 }, { "epoch": 1.8810300312542465, "grad_norm": 1.2342548370361328, "learning_rate": 0.0007649731621144177, "loss": 3.4816, "step": 27685 }, { "epoch": 1.8813697513249084, "grad_norm": 1.0989567041397095, "learning_rate": 0.0007649306971055851, "loss": 3.5293, "step": 27690 }, { "epoch": 1.88170947139557, "grad_norm": 1.4753282070159912, "learning_rate": 0.0007648882320967523, "loss": 3.7954, "step": 27695 }, { "epoch": 1.8820491914662318, "grad_norm": 1.4609777927398682, "learning_rate": 0.0007648457670879196, "loss": 3.4202, "step": 27700 }, { "epoch": 1.8823889115368937, "grad_norm": 1.4121716022491455, "learning_rate": 0.0007648033020790869, "loss": 3.4949, "step": 27705 }, { "epoch": 1.8827286316075553, "grad_norm": 1.453943133354187, "learning_rate": 0.0007647608370702541, "loss": 3.5971, "step": 27710 }, { "epoch": 1.8830683516782172, "grad_norm": 1.2839056253433228, "learning_rate": 0.0007647183720614213, "loss": 3.707, "step": 27715 }, { "epoch": 1.883408071748879, "grad_norm": 2.119719982147217, "learning_rate": 0.0007646759070525887, "loss": 3.533, "step": 27720 }, { "epoch": 1.8837477918195407, "grad_norm": 1.084376573562622, "learning_rate": 0.000764633442043756, "loss": 3.6141, "step": 27725 }, { "epoch": 1.8840875118902025, "grad_norm": 1.3776742219924927, "learning_rate": 0.0007645909770349232, "loss": 3.5564, "step": 27730 }, { "epoch": 1.8844272319608644, "grad_norm": 1.371148943901062, "learning_rate": 0.0007645485120260906, "loss": 3.6521, "step": 27735 }, { "epoch": 1.884766952031526, "grad_norm": 1.5491524934768677, "learning_rate": 0.0007645060470172578, "loss": 3.7319, "step": 27740 }, { "epoch": 1.8851066721021879, "grad_norm": 1.1582716703414917, "learning_rate": 0.000764463582008425, "loss": 3.4711, "step": 27745 }, { "epoch": 1.8854463921728497, "grad_norm": 0.9755056500434875, "learning_rate": 0.0007644211169995924, "loss": 3.4743, "step": 27750 }, { "epoch": 1.8857861122435113, "grad_norm": 1.1355526447296143, "learning_rate": 0.0007643786519907596, "loss": 3.4367, "step": 27755 }, { "epoch": 1.886125832314173, "grad_norm": 1.37941575050354, "learning_rate": 0.0007643361869819269, "loss": 3.6846, "step": 27760 }, { "epoch": 1.886465552384835, "grad_norm": 1.2860674858093262, "learning_rate": 0.0007642937219730942, "loss": 3.5877, "step": 27765 }, { "epoch": 1.8868052724554967, "grad_norm": 1.4847660064697266, "learning_rate": 0.0007642512569642615, "loss": 3.6774, "step": 27770 }, { "epoch": 1.8871449925261583, "grad_norm": 2.9132370948791504, "learning_rate": 0.0007642087919554287, "loss": 3.3425, "step": 27775 }, { "epoch": 1.8874847125968204, "grad_norm": 0.9739795327186584, "learning_rate": 0.000764166326946596, "loss": 3.4469, "step": 27780 }, { "epoch": 1.887824432667482, "grad_norm": 1.7296925783157349, "learning_rate": 0.0007641238619377633, "loss": 3.4067, "step": 27785 }, { "epoch": 1.8881641527381436, "grad_norm": 1.1963096857070923, "learning_rate": 0.0007640813969289305, "loss": 3.7206, "step": 27790 }, { "epoch": 1.8885038728088055, "grad_norm": 1.1631301641464233, "learning_rate": 0.0007640389319200979, "loss": 3.5513, "step": 27795 }, { "epoch": 1.8888435928794673, "grad_norm": 1.3537527322769165, "learning_rate": 0.0007639964669112652, "loss": 3.5197, "step": 27800 }, { "epoch": 1.889183312950129, "grad_norm": 1.1702563762664795, "learning_rate": 0.0007639540019024324, "loss": 3.5404, "step": 27805 }, { "epoch": 1.8895230330207908, "grad_norm": 1.3458991050720215, "learning_rate": 0.0007639115368935997, "loss": 3.7942, "step": 27810 }, { "epoch": 1.8898627530914527, "grad_norm": 1.8603978157043457, "learning_rate": 0.0007638690718847669, "loss": 3.5036, "step": 27815 }, { "epoch": 1.8902024731621143, "grad_norm": 1.189590573310852, "learning_rate": 0.0007638266068759342, "loss": 3.4953, "step": 27820 }, { "epoch": 1.8905421932327762, "grad_norm": 1.0317715406417847, "learning_rate": 0.0007637841418671015, "loss": 3.5571, "step": 27825 }, { "epoch": 1.890881913303438, "grad_norm": 1.3991056680679321, "learning_rate": 0.0007637416768582688, "loss": 3.9368, "step": 27830 }, { "epoch": 1.8912216333740997, "grad_norm": 1.5201003551483154, "learning_rate": 0.0007636992118494361, "loss": 3.7389, "step": 27835 }, { "epoch": 1.8915613534447615, "grad_norm": 1.1997509002685547, "learning_rate": 0.0007636567468406034, "loss": 3.3799, "step": 27840 }, { "epoch": 1.8919010735154234, "grad_norm": 1.2621963024139404, "learning_rate": 0.0007636142818317706, "loss": 3.4894, "step": 27845 }, { "epoch": 1.892240793586085, "grad_norm": 1.434952974319458, "learning_rate": 0.000763571816822938, "loss": 3.5027, "step": 27850 }, { "epoch": 1.8925805136567468, "grad_norm": 2.784034013748169, "learning_rate": 0.0007635293518141052, "loss": 3.8973, "step": 27855 }, { "epoch": 1.8929202337274087, "grad_norm": 1.1772072315216064, "learning_rate": 0.0007634868868052724, "loss": 3.8099, "step": 27860 }, { "epoch": 1.8932599537980703, "grad_norm": 1.5566729307174683, "learning_rate": 0.0007634444217964398, "loss": 3.3581, "step": 27865 }, { "epoch": 1.8935996738687322, "grad_norm": 1.3568609952926636, "learning_rate": 0.0007634019567876071, "loss": 3.882, "step": 27870 }, { "epoch": 1.893939393939394, "grad_norm": 1.3241468667984009, "learning_rate": 0.0007633594917787743, "loss": 3.7179, "step": 27875 }, { "epoch": 1.8942791140100557, "grad_norm": 1.1998475790023804, "learning_rate": 0.0007633170267699416, "loss": 3.7087, "step": 27880 }, { "epoch": 1.8946188340807175, "grad_norm": 1.3486846685409546, "learning_rate": 0.0007632745617611089, "loss": 3.3987, "step": 27885 }, { "epoch": 1.8949585541513794, "grad_norm": 1.0145058631896973, "learning_rate": 0.0007632320967522761, "loss": 3.518, "step": 27890 }, { "epoch": 1.895298274222041, "grad_norm": 1.4505821466445923, "learning_rate": 0.0007631896317434434, "loss": 3.7332, "step": 27895 }, { "epoch": 1.8956379942927029, "grad_norm": 1.4421004056930542, "learning_rate": 0.0007631471667346108, "loss": 3.6489, "step": 27900 }, { "epoch": 1.8959777143633647, "grad_norm": 1.5545779466629028, "learning_rate": 0.000763104701725778, "loss": 3.4251, "step": 27905 }, { "epoch": 1.8963174344340263, "grad_norm": 1.2008670568466187, "learning_rate": 0.0007630622367169453, "loss": 3.6285, "step": 27910 }, { "epoch": 1.8966571545046882, "grad_norm": 1.199836254119873, "learning_rate": 0.0007630197717081125, "loss": 3.5432, "step": 27915 }, { "epoch": 1.89699687457535, "grad_norm": 1.2203850746154785, "learning_rate": 0.0007629773066992798, "loss": 3.6401, "step": 27920 }, { "epoch": 1.8973365946460117, "grad_norm": 1.9400707483291626, "learning_rate": 0.0007629348416904471, "loss": 3.6298, "step": 27925 }, { "epoch": 1.8976763147166733, "grad_norm": 1.2189035415649414, "learning_rate": 0.0007628923766816143, "loss": 3.6452, "step": 27930 }, { "epoch": 1.8980160347873354, "grad_norm": 3.3120052814483643, "learning_rate": 0.0007628499116727817, "loss": 3.5678, "step": 27935 }, { "epoch": 1.898355754857997, "grad_norm": 1.2242563962936401, "learning_rate": 0.000762807446663949, "loss": 3.4546, "step": 27940 }, { "epoch": 1.8986954749286586, "grad_norm": 1.5606440305709839, "learning_rate": 0.0007627649816551162, "loss": 3.4214, "step": 27945 }, { "epoch": 1.8990351949993207, "grad_norm": 1.0920206308364868, "learning_rate": 0.0007627225166462834, "loss": 3.5132, "step": 27950 }, { "epoch": 1.8993749150699824, "grad_norm": 1.3988549709320068, "learning_rate": 0.0007626800516374508, "loss": 3.4083, "step": 27955 }, { "epoch": 1.899714635140644, "grad_norm": 1.1867121458053589, "learning_rate": 0.000762637586628618, "loss": 3.5239, "step": 27960 }, { "epoch": 1.9000543552113058, "grad_norm": 1.5775915384292603, "learning_rate": 0.0007625951216197852, "loss": 3.705, "step": 27965 }, { "epoch": 1.9003940752819677, "grad_norm": 1.1803520917892456, "learning_rate": 0.0007625526566109527, "loss": 3.7182, "step": 27970 }, { "epoch": 1.9007337953526293, "grad_norm": 1.2292336225509644, "learning_rate": 0.0007625101916021199, "loss": 3.4937, "step": 27975 }, { "epoch": 1.9010735154232912, "grad_norm": 1.0144842863082886, "learning_rate": 0.0007624677265932871, "loss": 3.6182, "step": 27980 }, { "epoch": 1.901413235493953, "grad_norm": 1.2526512145996094, "learning_rate": 0.0007624252615844545, "loss": 3.913, "step": 27985 }, { "epoch": 1.9017529555646147, "grad_norm": 1.673080325126648, "learning_rate": 0.0007623827965756217, "loss": 3.5514, "step": 27990 }, { "epoch": 1.9020926756352765, "grad_norm": 1.610991358757019, "learning_rate": 0.0007623403315667889, "loss": 3.4404, "step": 27995 }, { "epoch": 1.9024323957059384, "grad_norm": 4.830174446105957, "learning_rate": 0.0007622978665579562, "loss": 3.5856, "step": 28000 }, { "epoch": 1.9027721157766, "grad_norm": 1.3576754331588745, "learning_rate": 0.0007622554015491236, "loss": 3.6201, "step": 28005 }, { "epoch": 1.9031118358472618, "grad_norm": 1.344722032546997, "learning_rate": 0.0007622129365402908, "loss": 3.8433, "step": 28010 }, { "epoch": 1.9034515559179237, "grad_norm": 1.5276950597763062, "learning_rate": 0.0007621704715314581, "loss": 3.4245, "step": 28015 }, { "epoch": 1.9037912759885853, "grad_norm": 1.2373151779174805, "learning_rate": 0.0007621280065226254, "loss": 3.7122, "step": 28020 }, { "epoch": 1.9041309960592472, "grad_norm": 1.1486377716064453, "learning_rate": 0.0007620855415137926, "loss": 3.6666, "step": 28025 }, { "epoch": 1.904470716129909, "grad_norm": 1.085014820098877, "learning_rate": 0.0007620430765049599, "loss": 3.6796, "step": 28030 }, { "epoch": 1.9048104362005707, "grad_norm": 1.3478577136993408, "learning_rate": 0.0007620006114961272, "loss": 3.5805, "step": 28035 }, { "epoch": 1.9051501562712325, "grad_norm": 1.6992751359939575, "learning_rate": 0.0007619581464872945, "loss": 3.5676, "step": 28040 }, { "epoch": 1.9054898763418944, "grad_norm": 1.193070411682129, "learning_rate": 0.0007619156814784618, "loss": 3.8268, "step": 28045 }, { "epoch": 1.905829596412556, "grad_norm": 1.3636658191680908, "learning_rate": 0.000761873216469629, "loss": 3.4204, "step": 28050 }, { "epoch": 1.9061693164832179, "grad_norm": 1.37433660030365, "learning_rate": 0.0007618307514607963, "loss": 3.5351, "step": 28055 }, { "epoch": 1.9065090365538797, "grad_norm": 1.144229769706726, "learning_rate": 0.0007617882864519636, "loss": 3.7591, "step": 28060 }, { "epoch": 1.9068487566245413, "grad_norm": 1.4456316232681274, "learning_rate": 0.0007617458214431308, "loss": 3.3851, "step": 28065 }, { "epoch": 1.9071884766952032, "grad_norm": 1.3036731481552124, "learning_rate": 0.0007617033564342981, "loss": 3.4563, "step": 28070 }, { "epoch": 1.907528196765865, "grad_norm": 1.2747868299484253, "learning_rate": 0.0007616608914254655, "loss": 3.6445, "step": 28075 }, { "epoch": 1.9078679168365267, "grad_norm": 1.171810269355774, "learning_rate": 0.0007616184264166327, "loss": 3.4799, "step": 28080 }, { "epoch": 1.9082076369071885, "grad_norm": 1.4040284156799316, "learning_rate": 0.0007615759614078, "loss": 3.5089, "step": 28085 }, { "epoch": 1.9085473569778504, "grad_norm": 1.4091153144836426, "learning_rate": 0.0007615334963989673, "loss": 3.8037, "step": 28090 }, { "epoch": 1.908887077048512, "grad_norm": 1.2551617622375488, "learning_rate": 0.0007614910313901345, "loss": 3.4327, "step": 28095 }, { "epoch": 1.9092267971191736, "grad_norm": 1.2548648118972778, "learning_rate": 0.0007614485663813017, "loss": 3.4406, "step": 28100 }, { "epoch": 1.9095665171898357, "grad_norm": 1.1845661401748657, "learning_rate": 0.0007614061013724691, "loss": 3.5265, "step": 28105 }, { "epoch": 1.9099062372604974, "grad_norm": 1.4998265504837036, "learning_rate": 0.0007613636363636364, "loss": 3.6795, "step": 28110 }, { "epoch": 1.910245957331159, "grad_norm": 1.5152034759521484, "learning_rate": 0.0007613211713548036, "loss": 3.5705, "step": 28115 }, { "epoch": 1.910585677401821, "grad_norm": 1.1685367822647095, "learning_rate": 0.000761278706345971, "loss": 3.7894, "step": 28120 }, { "epoch": 1.9109253974724827, "grad_norm": 1.367883563041687, "learning_rate": 0.0007612362413371382, "loss": 3.8697, "step": 28125 }, { "epoch": 1.9112651175431443, "grad_norm": 1.4886422157287598, "learning_rate": 0.0007611937763283054, "loss": 3.5703, "step": 28130 }, { "epoch": 1.9116048376138062, "grad_norm": 1.7596673965454102, "learning_rate": 0.0007611513113194728, "loss": 3.4141, "step": 28135 }, { "epoch": 1.911944557684468, "grad_norm": 1.403745174407959, "learning_rate": 0.00076110884631064, "loss": 3.9178, "step": 28140 }, { "epoch": 1.9122842777551297, "grad_norm": 1.1759172677993774, "learning_rate": 0.0007610663813018073, "loss": 3.633, "step": 28145 }, { "epoch": 1.9126239978257915, "grad_norm": 1.2867847681045532, "learning_rate": 0.0007610239162929746, "loss": 3.5523, "step": 28150 }, { "epoch": 1.9129637178964534, "grad_norm": 1.4405813217163086, "learning_rate": 0.0007609814512841419, "loss": 3.7455, "step": 28155 }, { "epoch": 1.913303437967115, "grad_norm": 1.6915100812911987, "learning_rate": 0.0007609389862753091, "loss": 3.7414, "step": 28160 }, { "epoch": 1.9136431580377768, "grad_norm": 1.3321120738983154, "learning_rate": 0.0007608965212664764, "loss": 3.829, "step": 28165 }, { "epoch": 1.9139828781084387, "grad_norm": 1.661009669303894, "learning_rate": 0.0007608540562576437, "loss": 3.5578, "step": 28170 }, { "epoch": 1.9143225981791003, "grad_norm": 1.5212514400482178, "learning_rate": 0.0007608115912488109, "loss": 3.3736, "step": 28175 }, { "epoch": 1.9146623182497622, "grad_norm": 1.4497175216674805, "learning_rate": 0.0007607691262399783, "loss": 3.6949, "step": 28180 }, { "epoch": 1.915002038320424, "grad_norm": 1.5014632940292358, "learning_rate": 0.0007607266612311456, "loss": 3.5654, "step": 28185 }, { "epoch": 1.9153417583910857, "grad_norm": 1.232796549797058, "learning_rate": 0.0007606841962223129, "loss": 3.5644, "step": 28190 }, { "epoch": 1.9156814784617475, "grad_norm": 1.0848920345306396, "learning_rate": 0.0007606417312134801, "loss": 3.4396, "step": 28195 }, { "epoch": 1.9160211985324094, "grad_norm": 1.2382618188858032, "learning_rate": 0.0007605992662046473, "loss": 3.9604, "step": 28200 }, { "epoch": 1.916360918603071, "grad_norm": 1.806638240814209, "learning_rate": 0.0007605568011958147, "loss": 3.585, "step": 28205 }, { "epoch": 1.9167006386737329, "grad_norm": 0.9937152862548828, "learning_rate": 0.000760514336186982, "loss": 3.6412, "step": 28210 }, { "epoch": 1.9170403587443947, "grad_norm": 1.3575471639633179, "learning_rate": 0.0007604718711781492, "loss": 3.4868, "step": 28215 }, { "epoch": 1.9173800788150563, "grad_norm": 1.6146478652954102, "learning_rate": 0.0007604294061693166, "loss": 3.5803, "step": 28220 }, { "epoch": 1.9177197988857182, "grad_norm": 2.0401129722595215, "learning_rate": 0.0007603869411604838, "loss": 3.3401, "step": 28225 }, { "epoch": 1.91805951895638, "grad_norm": 1.67330801486969, "learning_rate": 0.000760344476151651, "loss": 3.284, "step": 28230 }, { "epoch": 1.9183992390270417, "grad_norm": 1.2954946756362915, "learning_rate": 0.0007603020111428184, "loss": 3.6271, "step": 28235 }, { "epoch": 1.9187389590977035, "grad_norm": 1.085976243019104, "learning_rate": 0.0007602595461339856, "loss": 3.6851, "step": 28240 }, { "epoch": 1.9190786791683654, "grad_norm": 1.376865029335022, "learning_rate": 0.0007602170811251529, "loss": 3.5137, "step": 28245 }, { "epoch": 1.919418399239027, "grad_norm": 1.7830466032028198, "learning_rate": 0.0007601746161163203, "loss": 3.6447, "step": 28250 }, { "epoch": 1.9197581193096889, "grad_norm": 1.65817391872406, "learning_rate": 0.0007601321511074875, "loss": 3.7239, "step": 28255 }, { "epoch": 1.9200978393803507, "grad_norm": 1.2071176767349243, "learning_rate": 0.0007600896860986547, "loss": 3.6091, "step": 28260 }, { "epoch": 1.9204375594510124, "grad_norm": 1.6997333765029907, "learning_rate": 0.000760047221089822, "loss": 3.7221, "step": 28265 }, { "epoch": 1.920777279521674, "grad_norm": 1.3312550783157349, "learning_rate": 0.0007600047560809893, "loss": 3.748, "step": 28270 }, { "epoch": 1.921116999592336, "grad_norm": 1.2399585247039795, "learning_rate": 0.0007599622910721565, "loss": 3.67, "step": 28275 }, { "epoch": 1.9214567196629977, "grad_norm": 1.4629894495010376, "learning_rate": 0.0007599198260633239, "loss": 3.413, "step": 28280 }, { "epoch": 1.9217964397336593, "grad_norm": 1.4293911457061768, "learning_rate": 0.0007598773610544912, "loss": 3.666, "step": 28285 }, { "epoch": 1.9221361598043214, "grad_norm": 2.4739463329315186, "learning_rate": 0.0007598348960456584, "loss": 3.7186, "step": 28290 }, { "epoch": 1.922475879874983, "grad_norm": 1.2815006971359253, "learning_rate": 0.0007597924310368257, "loss": 3.5227, "step": 28295 }, { "epoch": 1.9228155999456447, "grad_norm": 1.190210223197937, "learning_rate": 0.0007597499660279929, "loss": 3.7083, "step": 28300 }, { "epoch": 1.9231553200163065, "grad_norm": 1.0036288499832153, "learning_rate": 0.0007597075010191602, "loss": 3.6871, "step": 28305 }, { "epoch": 1.9234950400869684, "grad_norm": 1.1513994932174683, "learning_rate": 0.0007596650360103275, "loss": 3.6595, "step": 28310 }, { "epoch": 1.92383476015763, "grad_norm": 1.098750352859497, "learning_rate": 0.0007596225710014948, "loss": 3.8201, "step": 28315 }, { "epoch": 1.9241744802282919, "grad_norm": 1.4122252464294434, "learning_rate": 0.0007595801059926621, "loss": 3.6699, "step": 28320 }, { "epoch": 1.9245142002989537, "grad_norm": 1.212867259979248, "learning_rate": 0.0007595376409838294, "loss": 3.577, "step": 28325 }, { "epoch": 1.9248539203696153, "grad_norm": 1.4788587093353271, "learning_rate": 0.0007594951759749966, "loss": 3.5594, "step": 28330 }, { "epoch": 1.9251936404402772, "grad_norm": 1.3234461545944214, "learning_rate": 0.0007594527109661638, "loss": 3.5776, "step": 28335 }, { "epoch": 1.925533360510939, "grad_norm": 2.490586996078491, "learning_rate": 0.0007594102459573312, "loss": 3.6611, "step": 28340 }, { "epoch": 1.9258730805816007, "grad_norm": 1.4530364274978638, "learning_rate": 0.0007593677809484984, "loss": 3.4932, "step": 28345 }, { "epoch": 1.9262128006522625, "grad_norm": 1.313302755355835, "learning_rate": 0.0007593253159396657, "loss": 3.5383, "step": 28350 }, { "epoch": 1.9265525207229244, "grad_norm": 1.2307863235473633, "learning_rate": 0.0007592828509308331, "loss": 3.4233, "step": 28355 }, { "epoch": 1.926892240793586, "grad_norm": 10.432571411132812, "learning_rate": 0.0007592403859220003, "loss": 3.4786, "step": 28360 }, { "epoch": 1.9272319608642479, "grad_norm": 1.1684823036193848, "learning_rate": 0.0007591979209131675, "loss": 3.646, "step": 28365 }, { "epoch": 1.9275716809349097, "grad_norm": 1.062642216682434, "learning_rate": 0.0007591554559043349, "loss": 3.4929, "step": 28370 }, { "epoch": 1.9279114010055713, "grad_norm": 1.3510469198226929, "learning_rate": 0.0007591129908955021, "loss": 3.5621, "step": 28375 }, { "epoch": 1.9282511210762332, "grad_norm": 1.608886957168579, "learning_rate": 0.0007590705258866693, "loss": 3.5678, "step": 28380 }, { "epoch": 1.928590841146895, "grad_norm": 1.0444023609161377, "learning_rate": 0.0007590280608778368, "loss": 3.4031, "step": 28385 }, { "epoch": 1.9289305612175567, "grad_norm": 2.0368168354034424, "learning_rate": 0.000758985595869004, "loss": 3.7117, "step": 28390 }, { "epoch": 1.9292702812882185, "grad_norm": 3.884028673171997, "learning_rate": 0.0007589431308601712, "loss": 3.6927, "step": 28395 }, { "epoch": 1.9296100013588804, "grad_norm": 1.4730007648468018, "learning_rate": 0.0007589006658513385, "loss": 3.7451, "step": 28400 }, { "epoch": 1.929949721429542, "grad_norm": 1.4488847255706787, "learning_rate": 0.0007588582008425058, "loss": 3.5998, "step": 28405 }, { "epoch": 1.9302894415002039, "grad_norm": 1.1106761693954468, "learning_rate": 0.000758815735833673, "loss": 3.5743, "step": 28410 }, { "epoch": 1.9306291615708657, "grad_norm": 1.3997644186019897, "learning_rate": 0.0007587732708248403, "loss": 3.285, "step": 28415 }, { "epoch": 1.9309688816415274, "grad_norm": 1.2314002513885498, "learning_rate": 0.0007587308058160077, "loss": 3.794, "step": 28420 }, { "epoch": 1.9313086017121892, "grad_norm": 1.532016396522522, "learning_rate": 0.0007586883408071749, "loss": 3.5591, "step": 28425 }, { "epoch": 1.931648321782851, "grad_norm": 1.315860629081726, "learning_rate": 0.0007586458757983422, "loss": 3.5581, "step": 28430 }, { "epoch": 1.9319880418535127, "grad_norm": 1.8915544748306274, "learning_rate": 0.0007586034107895095, "loss": 3.6258, "step": 28435 }, { "epoch": 1.9323277619241743, "grad_norm": 1.353171944618225, "learning_rate": 0.0007585609457806767, "loss": 3.529, "step": 28440 }, { "epoch": 1.9326674819948364, "grad_norm": 1.2694591283798218, "learning_rate": 0.000758518480771844, "loss": 3.5322, "step": 28445 }, { "epoch": 1.933007202065498, "grad_norm": 1.5167649984359741, "learning_rate": 0.0007584760157630112, "loss": 3.73, "step": 28450 }, { "epoch": 1.9333469221361597, "grad_norm": 1.2906426191329956, "learning_rate": 0.0007584335507541786, "loss": 3.6363, "step": 28455 }, { "epoch": 1.9336866422068217, "grad_norm": 0.9262884855270386, "learning_rate": 0.0007583910857453459, "loss": 3.6578, "step": 28460 }, { "epoch": 1.9340263622774834, "grad_norm": 1.2656148672103882, "learning_rate": 0.0007583486207365131, "loss": 3.664, "step": 28465 }, { "epoch": 1.934366082348145, "grad_norm": 1.0996557474136353, "learning_rate": 0.0007583061557276804, "loss": 3.3775, "step": 28470 }, { "epoch": 1.9347058024188069, "grad_norm": 1.6145812273025513, "learning_rate": 0.0007582636907188477, "loss": 3.7912, "step": 28475 }, { "epoch": 1.9350455224894687, "grad_norm": 1.1835569143295288, "learning_rate": 0.0007582212257100149, "loss": 3.697, "step": 28480 }, { "epoch": 1.9353852425601303, "grad_norm": 1.765262484550476, "learning_rate": 0.0007581787607011821, "loss": 3.5596, "step": 28485 }, { "epoch": 1.9357249626307922, "grad_norm": 1.080094814300537, "learning_rate": 0.0007581362956923496, "loss": 3.3159, "step": 28490 }, { "epoch": 1.936064682701454, "grad_norm": 1.5276150703430176, "learning_rate": 0.0007580938306835168, "loss": 3.6013, "step": 28495 }, { "epoch": 1.9364044027721157, "grad_norm": 1.514375925064087, "learning_rate": 0.000758051365674684, "loss": 3.6824, "step": 28500 }, { "epoch": 1.9367441228427775, "grad_norm": 1.8479008674621582, "learning_rate": 0.0007580089006658514, "loss": 3.7128, "step": 28505 }, { "epoch": 1.9370838429134394, "grad_norm": 1.2291151285171509, "learning_rate": 0.0007579664356570186, "loss": 3.5379, "step": 28510 }, { "epoch": 1.937423562984101, "grad_norm": 1.2250677347183228, "learning_rate": 0.0007579239706481858, "loss": 3.6583, "step": 28515 }, { "epoch": 1.9377632830547629, "grad_norm": 1.4680548906326294, "learning_rate": 0.0007578815056393532, "loss": 3.5436, "step": 28520 }, { "epoch": 1.9381030031254247, "grad_norm": 1.1839853525161743, "learning_rate": 0.0007578390406305205, "loss": 3.6463, "step": 28525 }, { "epoch": 1.9384427231960863, "grad_norm": 1.3631924390792847, "learning_rate": 0.0007577965756216878, "loss": 3.6005, "step": 28530 }, { "epoch": 1.9387824432667482, "grad_norm": 1.1541751623153687, "learning_rate": 0.000757754110612855, "loss": 3.5078, "step": 28535 }, { "epoch": 1.93912216333741, "grad_norm": 1.6889036893844604, "learning_rate": 0.0007577116456040223, "loss": 3.7196, "step": 28540 }, { "epoch": 1.9394618834080717, "grad_norm": 1.366836428642273, "learning_rate": 0.0007576691805951896, "loss": 3.623, "step": 28545 }, { "epoch": 1.9398016034787335, "grad_norm": 1.2910611629486084, "learning_rate": 0.0007576267155863568, "loss": 3.6046, "step": 28550 }, { "epoch": 1.9401413235493954, "grad_norm": 1.2651054859161377, "learning_rate": 0.0007575842505775241, "loss": 3.5543, "step": 28555 }, { "epoch": 1.940481043620057, "grad_norm": 1.674460768699646, "learning_rate": 0.0007575417855686915, "loss": 3.7605, "step": 28560 }, { "epoch": 1.9408207636907189, "grad_norm": 1.1078964471817017, "learning_rate": 0.0007574993205598587, "loss": 3.5423, "step": 28565 }, { "epoch": 1.9411604837613807, "grad_norm": 1.5020190477371216, "learning_rate": 0.000757456855551026, "loss": 3.6688, "step": 28570 }, { "epoch": 1.9415002038320424, "grad_norm": 1.1346182823181152, "learning_rate": 0.0007574143905421933, "loss": 3.3507, "step": 28575 }, { "epoch": 1.9418399239027042, "grad_norm": 1.5497475862503052, "learning_rate": 0.0007573719255333605, "loss": 3.6583, "step": 28580 }, { "epoch": 1.942179643973366, "grad_norm": 0.9813948273658752, "learning_rate": 0.0007573294605245277, "loss": 3.8001, "step": 28585 }, { "epoch": 1.9425193640440277, "grad_norm": 1.2530146837234497, "learning_rate": 0.0007572869955156951, "loss": 3.613, "step": 28590 }, { "epoch": 1.9428590841146895, "grad_norm": 1.2330132722854614, "learning_rate": 0.0007572445305068624, "loss": 3.5441, "step": 28595 }, { "epoch": 1.9431988041853514, "grad_norm": 1.7078090906143188, "learning_rate": 0.0007572020654980296, "loss": 3.2117, "step": 28600 }, { "epoch": 1.943538524256013, "grad_norm": 1.8904333114624023, "learning_rate": 0.000757159600489197, "loss": 3.5402, "step": 28605 }, { "epoch": 1.9438782443266747, "grad_norm": 1.6604256629943848, "learning_rate": 0.0007571171354803642, "loss": 3.4149, "step": 28610 }, { "epoch": 1.9442179643973367, "grad_norm": 1.3123551607131958, "learning_rate": 0.0007570746704715314, "loss": 3.5693, "step": 28615 }, { "epoch": 1.9445576844679984, "grad_norm": 1.7975660562515259, "learning_rate": 0.0007570322054626988, "loss": 3.1559, "step": 28620 }, { "epoch": 1.94489740453866, "grad_norm": 1.2104562520980835, "learning_rate": 0.000756989740453866, "loss": 3.4529, "step": 28625 }, { "epoch": 1.945237124609322, "grad_norm": 1.2824417352676392, "learning_rate": 0.0007569472754450333, "loss": 3.5412, "step": 28630 }, { "epoch": 1.9455768446799837, "grad_norm": 1.1210616827011108, "learning_rate": 0.0007569048104362007, "loss": 3.5235, "step": 28635 }, { "epoch": 1.9459165647506453, "grad_norm": 4.974826812744141, "learning_rate": 0.0007568623454273679, "loss": 3.4515, "step": 28640 }, { "epoch": 1.9462562848213072, "grad_norm": 1.2829108238220215, "learning_rate": 0.0007568198804185351, "loss": 3.5238, "step": 28645 }, { "epoch": 1.946596004891969, "grad_norm": 1.4007076025009155, "learning_rate": 0.0007567774154097024, "loss": 3.2982, "step": 28650 }, { "epoch": 1.9469357249626307, "grad_norm": 1.4026731252670288, "learning_rate": 0.0007567349504008697, "loss": 3.691, "step": 28655 }, { "epoch": 1.9472754450332925, "grad_norm": 1.472905158996582, "learning_rate": 0.0007566924853920369, "loss": 3.8507, "step": 28660 }, { "epoch": 1.9476151651039544, "grad_norm": 1.8006609678268433, "learning_rate": 0.0007566500203832043, "loss": 3.4348, "step": 28665 }, { "epoch": 1.947954885174616, "grad_norm": 1.9174102544784546, "learning_rate": 0.0007566075553743716, "loss": 3.4918, "step": 28670 }, { "epoch": 1.9482946052452779, "grad_norm": 1.6477458477020264, "learning_rate": 0.0007565650903655388, "loss": 3.3883, "step": 28675 }, { "epoch": 1.9486343253159397, "grad_norm": 1.1220839023590088, "learning_rate": 0.0007565226253567061, "loss": 3.8582, "step": 28680 }, { "epoch": 1.9489740453866014, "grad_norm": 1.4650548696517944, "learning_rate": 0.0007564801603478733, "loss": 3.7436, "step": 28685 }, { "epoch": 1.9493137654572632, "grad_norm": 1.0889599323272705, "learning_rate": 0.0007564376953390406, "loss": 3.6315, "step": 28690 }, { "epoch": 1.949653485527925, "grad_norm": 1.134151577949524, "learning_rate": 0.000756395230330208, "loss": 3.6466, "step": 28695 }, { "epoch": 1.9499932055985867, "grad_norm": 1.4829497337341309, "learning_rate": 0.0007563527653213752, "loss": 3.5981, "step": 28700 }, { "epoch": 1.9503329256692485, "grad_norm": 1.1393455266952515, "learning_rate": 0.0007563103003125425, "loss": 3.5339, "step": 28705 }, { "epoch": 1.9506726457399104, "grad_norm": 1.5881011486053467, "learning_rate": 0.0007562678353037098, "loss": 3.8482, "step": 28710 }, { "epoch": 1.951012365810572, "grad_norm": 1.4047255516052246, "learning_rate": 0.000756225370294877, "loss": 3.6481, "step": 28715 }, { "epoch": 1.9513520858812339, "grad_norm": 1.1850553750991821, "learning_rate": 0.0007561829052860443, "loss": 3.5971, "step": 28720 }, { "epoch": 1.9516918059518957, "grad_norm": 1.2619614601135254, "learning_rate": 0.0007561404402772116, "loss": 3.645, "step": 28725 }, { "epoch": 1.9520315260225574, "grad_norm": 1.5943583250045776, "learning_rate": 0.0007560979752683789, "loss": 3.6497, "step": 28730 }, { "epoch": 1.9523712460932192, "grad_norm": 1.3450250625610352, "learning_rate": 0.0007560555102595461, "loss": 3.7637, "step": 28735 }, { "epoch": 1.952710966163881, "grad_norm": 1.2413653135299683, "learning_rate": 0.0007560130452507135, "loss": 3.7226, "step": 28740 }, { "epoch": 1.9530506862345427, "grad_norm": 1.6443407535552979, "learning_rate": 0.0007559705802418807, "loss": 3.8278, "step": 28745 }, { "epoch": 1.9533904063052046, "grad_norm": 1.245180606842041, "learning_rate": 0.0007559281152330479, "loss": 3.5793, "step": 28750 }, { "epoch": 1.9537301263758664, "grad_norm": 1.3917138576507568, "learning_rate": 0.0007558856502242153, "loss": 3.6415, "step": 28755 }, { "epoch": 1.954069846446528, "grad_norm": 1.4174578189849854, "learning_rate": 0.0007558431852153825, "loss": 3.6836, "step": 28760 }, { "epoch": 1.95440956651719, "grad_norm": 1.8685438632965088, "learning_rate": 0.0007558007202065498, "loss": 3.7455, "step": 28765 }, { "epoch": 1.9547492865878517, "grad_norm": 2.2216055393218994, "learning_rate": 0.0007557582551977172, "loss": 3.7335, "step": 28770 }, { "epoch": 1.9550890066585134, "grad_norm": 1.508205533027649, "learning_rate": 0.0007557157901888844, "loss": 3.5626, "step": 28775 }, { "epoch": 1.955428726729175, "grad_norm": 1.2953487634658813, "learning_rate": 0.0007556733251800516, "loss": 3.7464, "step": 28780 }, { "epoch": 1.955768446799837, "grad_norm": 1.6368764638900757, "learning_rate": 0.000755630860171219, "loss": 3.5905, "step": 28785 }, { "epoch": 1.9561081668704987, "grad_norm": 1.0506319999694824, "learning_rate": 0.0007555883951623862, "loss": 3.7642, "step": 28790 }, { "epoch": 1.9564478869411603, "grad_norm": 1.3904024362564087, "learning_rate": 0.0007555459301535534, "loss": 3.5604, "step": 28795 }, { "epoch": 1.9567876070118224, "grad_norm": 1.1011172533035278, "learning_rate": 0.0007555034651447208, "loss": 3.6019, "step": 28800 }, { "epoch": 1.957127327082484, "grad_norm": 1.3036549091339111, "learning_rate": 0.0007554610001358881, "loss": 3.3006, "step": 28805 }, { "epoch": 1.9574670471531457, "grad_norm": 1.3956279754638672, "learning_rate": 0.0007554185351270553, "loss": 3.6067, "step": 28810 }, { "epoch": 1.9578067672238075, "grad_norm": 1.5106816291809082, "learning_rate": 0.0007553760701182226, "loss": 3.548, "step": 28815 }, { "epoch": 1.9581464872944694, "grad_norm": 1.2729506492614746, "learning_rate": 0.0007553336051093899, "loss": 3.6084, "step": 28820 }, { "epoch": 1.958486207365131, "grad_norm": 1.2057831287384033, "learning_rate": 0.0007552911401005571, "loss": 3.4817, "step": 28825 }, { "epoch": 1.9588259274357929, "grad_norm": 1.948310136795044, "learning_rate": 0.0007552486750917244, "loss": 3.4402, "step": 28830 }, { "epoch": 1.9591656475064547, "grad_norm": 2.4224815368652344, "learning_rate": 0.0007552062100828917, "loss": 3.4079, "step": 28835 }, { "epoch": 1.9595053675771164, "grad_norm": 1.3121289014816284, "learning_rate": 0.000755163745074059, "loss": 3.6652, "step": 28840 }, { "epoch": 1.9598450876477782, "grad_norm": 1.214752197265625, "learning_rate": 0.0007551212800652263, "loss": 3.6887, "step": 28845 }, { "epoch": 1.96018480771844, "grad_norm": 1.542224407196045, "learning_rate": 0.0007550788150563935, "loss": 3.5712, "step": 28850 }, { "epoch": 1.9605245277891017, "grad_norm": 1.47307288646698, "learning_rate": 0.0007550363500475608, "loss": 3.5797, "step": 28855 }, { "epoch": 1.9608642478597635, "grad_norm": 1.2372432947158813, "learning_rate": 0.0007549938850387281, "loss": 3.7493, "step": 28860 }, { "epoch": 1.9612039679304254, "grad_norm": 1.0417758226394653, "learning_rate": 0.0007549514200298953, "loss": 3.5744, "step": 28865 }, { "epoch": 1.961543688001087, "grad_norm": 1.3564425706863403, "learning_rate": 0.0007549089550210628, "loss": 3.5363, "step": 28870 }, { "epoch": 1.9618834080717489, "grad_norm": 1.2304741144180298, "learning_rate": 0.00075486649001223, "loss": 3.7634, "step": 28875 }, { "epoch": 1.9622231281424107, "grad_norm": 1.021981120109558, "learning_rate": 0.0007548240250033972, "loss": 3.5121, "step": 28880 }, { "epoch": 1.9625628482130724, "grad_norm": 1.326138973236084, "learning_rate": 0.0007547815599945645, "loss": 3.3371, "step": 28885 }, { "epoch": 1.9629025682837342, "grad_norm": 2.102325916290283, "learning_rate": 0.0007547390949857318, "loss": 3.5632, "step": 28890 }, { "epoch": 1.963242288354396, "grad_norm": 1.5008925199508667, "learning_rate": 0.000754696629976899, "loss": 3.5012, "step": 28895 }, { "epoch": 1.9635820084250577, "grad_norm": 1.4792096614837646, "learning_rate": 0.0007546541649680663, "loss": 3.5075, "step": 28900 }, { "epoch": 1.9639217284957196, "grad_norm": 1.6631975173950195, "learning_rate": 0.0007546116999592337, "loss": 3.7407, "step": 28905 }, { "epoch": 1.9642614485663814, "grad_norm": 1.1632524728775024, "learning_rate": 0.0007545692349504009, "loss": 3.4135, "step": 28910 }, { "epoch": 1.964601168637043, "grad_norm": 1.184127688407898, "learning_rate": 0.0007545267699415682, "loss": 3.4907, "step": 28915 }, { "epoch": 1.964940888707705, "grad_norm": 1.2181119918823242, "learning_rate": 0.0007544843049327355, "loss": 3.7436, "step": 28920 }, { "epoch": 1.9652806087783667, "grad_norm": 1.1243464946746826, "learning_rate": 0.0007544418399239027, "loss": 3.4881, "step": 28925 }, { "epoch": 1.9656203288490284, "grad_norm": 5.317780494689941, "learning_rate": 0.00075439937491507, "loss": 3.597, "step": 28930 }, { "epoch": 1.9659600489196902, "grad_norm": 1.3757359981536865, "learning_rate": 0.0007543569099062372, "loss": 3.4815, "step": 28935 }, { "epoch": 1.966299768990352, "grad_norm": 1.1538158655166626, "learning_rate": 0.0007543144448974046, "loss": 3.5477, "step": 28940 }, { "epoch": 1.9666394890610137, "grad_norm": 1.1033189296722412, "learning_rate": 0.0007542719798885719, "loss": 3.7359, "step": 28945 }, { "epoch": 1.9669792091316753, "grad_norm": 1.6132632493972778, "learning_rate": 0.0007542295148797391, "loss": 3.8413, "step": 28950 }, { "epoch": 1.9673189292023374, "grad_norm": 1.6171281337738037, "learning_rate": 0.0007541870498709064, "loss": 3.3858, "step": 28955 }, { "epoch": 1.967658649272999, "grad_norm": 2.1048083305358887, "learning_rate": 0.0007541445848620737, "loss": 3.3589, "step": 28960 }, { "epoch": 1.9679983693436607, "grad_norm": 1.690613865852356, "learning_rate": 0.0007541021198532409, "loss": 3.7071, "step": 28965 }, { "epoch": 1.9683380894143228, "grad_norm": 1.4003772735595703, "learning_rate": 0.0007540596548444081, "loss": 3.619, "step": 28970 }, { "epoch": 1.9686778094849844, "grad_norm": 1.495640754699707, "learning_rate": 0.0007540171898355756, "loss": 3.3627, "step": 28975 }, { "epoch": 1.969017529555646, "grad_norm": 1.3121525049209595, "learning_rate": 0.0007539747248267428, "loss": 3.3591, "step": 28980 }, { "epoch": 1.9693572496263079, "grad_norm": 1.599245309829712, "learning_rate": 0.00075393225981791, "loss": 3.524, "step": 28985 }, { "epoch": 1.9696969696969697, "grad_norm": 1.3732377290725708, "learning_rate": 0.0007538897948090774, "loss": 3.6016, "step": 28990 }, { "epoch": 1.9700366897676314, "grad_norm": 1.1532701253890991, "learning_rate": 0.0007538473298002446, "loss": 3.3552, "step": 28995 }, { "epoch": 1.9703764098382932, "grad_norm": 1.2016342878341675, "learning_rate": 0.0007538048647914118, "loss": 3.6206, "step": 29000 }, { "epoch": 1.970716129908955, "grad_norm": 1.2252012491226196, "learning_rate": 0.0007537623997825792, "loss": 3.5781, "step": 29005 }, { "epoch": 1.9710558499796167, "grad_norm": 1.1938836574554443, "learning_rate": 0.0007537199347737465, "loss": 3.6527, "step": 29010 }, { "epoch": 1.9713955700502785, "grad_norm": 1.368644118309021, "learning_rate": 0.0007536774697649137, "loss": 3.6441, "step": 29015 }, { "epoch": 1.9717352901209404, "grad_norm": 1.226202130317688, "learning_rate": 0.0007536350047560811, "loss": 3.708, "step": 29020 }, { "epoch": 1.972075010191602, "grad_norm": 1.308038353919983, "learning_rate": 0.0007535925397472483, "loss": 3.6341, "step": 29025 }, { "epoch": 1.9724147302622639, "grad_norm": 1.8388530015945435, "learning_rate": 0.0007535500747384155, "loss": 3.8788, "step": 29030 }, { "epoch": 1.9727544503329257, "grad_norm": 1.334166407585144, "learning_rate": 0.0007535076097295828, "loss": 3.6286, "step": 29035 }, { "epoch": 1.9730941704035874, "grad_norm": 1.5745614767074585, "learning_rate": 0.0007534651447207501, "loss": 3.5493, "step": 29040 }, { "epoch": 1.9734338904742492, "grad_norm": 1.4143160581588745, "learning_rate": 0.0007534226797119174, "loss": 3.5005, "step": 29045 }, { "epoch": 1.973773610544911, "grad_norm": 1.477574348449707, "learning_rate": 0.0007533802147030847, "loss": 3.643, "step": 29050 }, { "epoch": 1.9741133306155727, "grad_norm": 1.5691989660263062, "learning_rate": 0.000753337749694252, "loss": 3.8371, "step": 29055 }, { "epoch": 1.9744530506862346, "grad_norm": 1.3024300336837769, "learning_rate": 0.0007532952846854192, "loss": 3.6086, "step": 29060 }, { "epoch": 1.9747927707568964, "grad_norm": 1.4040038585662842, "learning_rate": 0.0007532528196765865, "loss": 3.9159, "step": 29065 }, { "epoch": 1.975132490827558, "grad_norm": 1.219994068145752, "learning_rate": 0.0007532103546677538, "loss": 3.475, "step": 29070 }, { "epoch": 1.97547221089822, "grad_norm": 1.6674556732177734, "learning_rate": 0.000753167889658921, "loss": 3.4574, "step": 29075 }, { "epoch": 1.9758119309688817, "grad_norm": 1.4014803171157837, "learning_rate": 0.0007531254246500884, "loss": 3.5328, "step": 29080 }, { "epoch": 1.9761516510395434, "grad_norm": 1.178475260734558, "learning_rate": 0.0007530829596412556, "loss": 3.6998, "step": 29085 }, { "epoch": 1.9764913711102052, "grad_norm": 1.3716973066329956, "learning_rate": 0.0007530404946324229, "loss": 3.6193, "step": 29090 }, { "epoch": 1.976831091180867, "grad_norm": 1.2671544551849365, "learning_rate": 0.0007529980296235902, "loss": 3.6309, "step": 29095 }, { "epoch": 1.9771708112515287, "grad_norm": 1.2686740159988403, "learning_rate": 0.0007529555646147574, "loss": 3.2895, "step": 29100 }, { "epoch": 1.9775105313221906, "grad_norm": 1.0198419094085693, "learning_rate": 0.0007529130996059247, "loss": 3.6885, "step": 29105 }, { "epoch": 1.9778502513928524, "grad_norm": 1.491534948348999, "learning_rate": 0.000752870634597092, "loss": 3.3698, "step": 29110 }, { "epoch": 1.978189971463514, "grad_norm": 1.1778501272201538, "learning_rate": 0.0007528281695882593, "loss": 3.5485, "step": 29115 }, { "epoch": 1.9785296915341757, "grad_norm": 1.3158024549484253, "learning_rate": 0.0007527857045794266, "loss": 3.684, "step": 29120 }, { "epoch": 1.9788694116048378, "grad_norm": 1.1348161697387695, "learning_rate": 0.0007527432395705939, "loss": 3.8133, "step": 29125 }, { "epoch": 1.9792091316754994, "grad_norm": 1.4543663263320923, "learning_rate": 0.0007527007745617611, "loss": 3.7223, "step": 29130 }, { "epoch": 1.979548851746161, "grad_norm": 1.2061508893966675, "learning_rate": 0.0007526583095529283, "loss": 3.6187, "step": 29135 }, { "epoch": 1.979888571816823, "grad_norm": 1.8013943433761597, "learning_rate": 0.0007526158445440957, "loss": 3.4028, "step": 29140 }, { "epoch": 1.9802282918874847, "grad_norm": 1.1002118587493896, "learning_rate": 0.0007525733795352629, "loss": 3.5207, "step": 29145 }, { "epoch": 1.9805680119581464, "grad_norm": 1.2808104753494263, "learning_rate": 0.0007525309145264302, "loss": 3.5856, "step": 29150 }, { "epoch": 1.9809077320288082, "grad_norm": 1.1422561407089233, "learning_rate": 0.0007524884495175976, "loss": 3.5705, "step": 29155 }, { "epoch": 1.98124745209947, "grad_norm": 1.7027077674865723, "learning_rate": 0.0007524459845087648, "loss": 3.4875, "step": 29160 }, { "epoch": 1.9815871721701317, "grad_norm": 1.5115300416946411, "learning_rate": 0.000752403519499932, "loss": 3.4764, "step": 29165 }, { "epoch": 1.9819268922407935, "grad_norm": 1.502153992652893, "learning_rate": 0.0007523610544910994, "loss": 3.7269, "step": 29170 }, { "epoch": 1.9822666123114554, "grad_norm": 1.266229271888733, "learning_rate": 0.0007523185894822666, "loss": 3.8373, "step": 29175 }, { "epoch": 1.982606332382117, "grad_norm": 1.4827851057052612, "learning_rate": 0.0007522761244734338, "loss": 3.477, "step": 29180 }, { "epoch": 1.9829460524527789, "grad_norm": 1.2405390739440918, "learning_rate": 0.0007522336594646012, "loss": 3.6906, "step": 29185 }, { "epoch": 1.9832857725234407, "grad_norm": 1.2582043409347534, "learning_rate": 0.0007521911944557685, "loss": 3.7437, "step": 29190 }, { "epoch": 1.9836254925941024, "grad_norm": 1.5370073318481445, "learning_rate": 0.0007521487294469357, "loss": 3.6917, "step": 29195 }, { "epoch": 1.9839652126647642, "grad_norm": 1.888142466545105, "learning_rate": 0.000752106264438103, "loss": 3.7983, "step": 29200 }, { "epoch": 1.984304932735426, "grad_norm": 1.4325298070907593, "learning_rate": 0.0007520637994292703, "loss": 3.6219, "step": 29205 }, { "epoch": 1.9846446528060877, "grad_norm": 1.2218527793884277, "learning_rate": 0.0007520213344204376, "loss": 3.3221, "step": 29210 }, { "epoch": 1.9849843728767496, "grad_norm": 1.3365731239318848, "learning_rate": 0.0007519788694116048, "loss": 3.7688, "step": 29215 }, { "epoch": 1.9853240929474114, "grad_norm": 1.6230741739273071, "learning_rate": 0.0007519364044027722, "loss": 3.3823, "step": 29220 }, { "epoch": 1.985663813018073, "grad_norm": 1.330345630645752, "learning_rate": 0.0007518939393939395, "loss": 3.6671, "step": 29225 }, { "epoch": 1.986003533088735, "grad_norm": 1.5379490852355957, "learning_rate": 0.0007518514743851067, "loss": 3.4912, "step": 29230 }, { "epoch": 1.9863432531593967, "grad_norm": 1.2196117639541626, "learning_rate": 0.0007518090093762739, "loss": 3.4989, "step": 29235 }, { "epoch": 1.9866829732300584, "grad_norm": 1.4984126091003418, "learning_rate": 0.0007517665443674413, "loss": 3.4414, "step": 29240 }, { "epoch": 1.9870226933007202, "grad_norm": 1.377055287361145, "learning_rate": 0.0007517240793586085, "loss": 3.6909, "step": 29245 }, { "epoch": 1.987362413371382, "grad_norm": 1.2835586071014404, "learning_rate": 0.0007516816143497757, "loss": 3.7274, "step": 29250 }, { "epoch": 1.9877021334420437, "grad_norm": 2.0093424320220947, "learning_rate": 0.0007516391493409432, "loss": 3.6387, "step": 29255 }, { "epoch": 1.9880418535127056, "grad_norm": 1.268905758857727, "learning_rate": 0.0007515966843321104, "loss": 3.7715, "step": 29260 }, { "epoch": 1.9883815735833674, "grad_norm": 1.420393943786621, "learning_rate": 0.0007515542193232776, "loss": 3.3977, "step": 29265 }, { "epoch": 1.988721293654029, "grad_norm": 1.3749369382858276, "learning_rate": 0.000751511754314445, "loss": 3.5497, "step": 29270 }, { "epoch": 1.989061013724691, "grad_norm": 1.3263784646987915, "learning_rate": 0.0007514692893056122, "loss": 3.3318, "step": 29275 }, { "epoch": 1.9894007337953528, "grad_norm": 1.0090664625167847, "learning_rate": 0.0007514268242967794, "loss": 3.5632, "step": 29280 }, { "epoch": 1.9897404538660144, "grad_norm": 1.6370471715927124, "learning_rate": 0.0007513843592879468, "loss": 3.5524, "step": 29285 }, { "epoch": 1.990080173936676, "grad_norm": 1.1075894832611084, "learning_rate": 0.0007513418942791141, "loss": 3.5292, "step": 29290 }, { "epoch": 1.990419894007338, "grad_norm": 1.1456706523895264, "learning_rate": 0.0007512994292702813, "loss": 3.5914, "step": 29295 }, { "epoch": 1.9907596140779997, "grad_norm": 1.1384601593017578, "learning_rate": 0.0007512569642614486, "loss": 3.6667, "step": 29300 }, { "epoch": 1.9910993341486614, "grad_norm": 1.3031785488128662, "learning_rate": 0.0007512144992526159, "loss": 3.5811, "step": 29305 }, { "epoch": 1.9914390542193234, "grad_norm": 0.9898591041564941, "learning_rate": 0.0007511720342437831, "loss": 3.6519, "step": 29310 }, { "epoch": 1.991778774289985, "grad_norm": 1.0638514757156372, "learning_rate": 0.0007511295692349504, "loss": 3.5291, "step": 29315 }, { "epoch": 1.9921184943606467, "grad_norm": 1.0804272890090942, "learning_rate": 0.0007510871042261178, "loss": 3.5154, "step": 29320 }, { "epoch": 1.9924582144313085, "grad_norm": 1.446519374847412, "learning_rate": 0.000751044639217285, "loss": 3.7419, "step": 29325 }, { "epoch": 1.9927979345019704, "grad_norm": 1.0386358499526978, "learning_rate": 0.0007510021742084523, "loss": 3.3626, "step": 29330 }, { "epoch": 1.993137654572632, "grad_norm": 1.305772304534912, "learning_rate": 0.0007509597091996195, "loss": 3.5401, "step": 29335 }, { "epoch": 1.9934773746432939, "grad_norm": 1.205942988395691, "learning_rate": 0.0007509172441907868, "loss": 3.7807, "step": 29340 }, { "epoch": 1.9938170947139557, "grad_norm": 1.8120753765106201, "learning_rate": 0.0007508747791819541, "loss": 3.3649, "step": 29345 }, { "epoch": 1.9941568147846174, "grad_norm": 1.4776380062103271, "learning_rate": 0.0007508323141731213, "loss": 3.7844, "step": 29350 }, { "epoch": 1.9944965348552792, "grad_norm": 1.210106372833252, "learning_rate": 0.0007507898491642887, "loss": 3.3962, "step": 29355 }, { "epoch": 1.994836254925941, "grad_norm": 1.9425239562988281, "learning_rate": 0.000750747384155456, "loss": 3.703, "step": 29360 }, { "epoch": 1.9951759749966027, "grad_norm": 1.2065372467041016, "learning_rate": 0.0007507049191466232, "loss": 3.5974, "step": 29365 }, { "epoch": 1.9955156950672646, "grad_norm": 1.3202189207077026, "learning_rate": 0.0007506624541377904, "loss": 3.581, "step": 29370 }, { "epoch": 1.9958554151379264, "grad_norm": 1.3786475658416748, "learning_rate": 0.0007506199891289578, "loss": 3.6487, "step": 29375 }, { "epoch": 1.996195135208588, "grad_norm": 1.091187596321106, "learning_rate": 0.000750577524120125, "loss": 3.8196, "step": 29380 }, { "epoch": 1.99653485527925, "grad_norm": 1.9320824146270752, "learning_rate": 0.0007505350591112922, "loss": 3.6103, "step": 29385 }, { "epoch": 1.9968745753499118, "grad_norm": 1.3614803552627563, "learning_rate": 0.0007504925941024597, "loss": 3.4078, "step": 29390 }, { "epoch": 1.9972142954205734, "grad_norm": 1.2625110149383545, "learning_rate": 0.0007504501290936269, "loss": 3.7337, "step": 29395 }, { "epoch": 1.9975540154912352, "grad_norm": 1.1605491638183594, "learning_rate": 0.0007504076640847941, "loss": 3.7361, "step": 29400 }, { "epoch": 1.997893735561897, "grad_norm": 1.3709025382995605, "learning_rate": 0.0007503651990759615, "loss": 3.6565, "step": 29405 }, { "epoch": 1.9982334556325587, "grad_norm": 1.197245717048645, "learning_rate": 0.0007503227340671287, "loss": 3.6399, "step": 29410 }, { "epoch": 1.9985731757032206, "grad_norm": 1.4533069133758545, "learning_rate": 0.0007502802690582959, "loss": 3.5067, "step": 29415 }, { "epoch": 1.9989128957738824, "grad_norm": 1.3246190547943115, "learning_rate": 0.0007502378040494632, "loss": 3.6772, "step": 29420 }, { "epoch": 1.999252615844544, "grad_norm": 1.380786418914795, "learning_rate": 0.0007501953390406306, "loss": 3.6217, "step": 29425 }, { "epoch": 1.999592335915206, "grad_norm": 1.148766040802002, "learning_rate": 0.0007501528740317978, "loss": 3.531, "step": 29430 }, { "epoch": 1.9999320559858678, "grad_norm": 1.0815565586090088, "learning_rate": 0.0007501104090229651, "loss": 3.6725, "step": 29435 }, { "epoch": 2.0, "eval_bertscore": { "f1": 0.8412870100122376, "precision": 0.8428162400638634, "recall": 0.8405212142148883 }, "eval_bleu_4": 0.0173233746214528, "eval_exact_match": 9.690861517588914e-05, "eval_loss": 3.4906246662139893, "eval_meteor": 0.09412700196448416, "eval_rouge": { "rouge1": 0.1279762562858631, "rouge2": 0.01807488684995727, "rougeL": 0.11101985606807296, "rougeLsum": 0.11107225279813082 }, "eval_runtime": 1933.104, "eval_samples_per_second": 5.338, "eval_steps_per_second": 0.667, "step": 29436 }, { "epoch": 2.0002717760565294, "grad_norm": 1.57707679271698, "learning_rate": 0.0007500679440141324, "loss": 3.5014, "step": 29440 }, { "epoch": 2.000611496127191, "grad_norm": 1.1596956253051758, "learning_rate": 0.0007500254790052996, "loss": 3.5579, "step": 29445 }, { "epoch": 2.000951216197853, "grad_norm": 1.2409700155258179, "learning_rate": 0.0007499830139964669, "loss": 3.5573, "step": 29450 }, { "epoch": 2.0012909362685147, "grad_norm": 1.208603858947754, "learning_rate": 0.0007499405489876342, "loss": 3.5289, "step": 29455 }, { "epoch": 2.0016306563391764, "grad_norm": 1.6701786518096924, "learning_rate": 0.0007498980839788015, "loss": 3.4361, "step": 29460 }, { "epoch": 2.0019703764098384, "grad_norm": 1.6765633821487427, "learning_rate": 0.0007498556189699688, "loss": 3.3327, "step": 29465 }, { "epoch": 2.0023100964805, "grad_norm": 1.7767382860183716, "learning_rate": 0.000749813153961136, "loss": 3.3339, "step": 29470 }, { "epoch": 2.0026498165511617, "grad_norm": 1.4846662282943726, "learning_rate": 0.0007497706889523033, "loss": 3.3772, "step": 29475 }, { "epoch": 2.0029895366218238, "grad_norm": 1.4760130643844604, "learning_rate": 0.0007497282239434706, "loss": 3.4205, "step": 29480 }, { "epoch": 2.0033292566924854, "grad_norm": 1.4483377933502197, "learning_rate": 0.0007496857589346378, "loss": 3.3859, "step": 29485 }, { "epoch": 2.003668976763147, "grad_norm": 1.3571035861968994, "learning_rate": 0.0007496432939258051, "loss": 3.5959, "step": 29490 }, { "epoch": 2.004008696833809, "grad_norm": 1.0177562236785889, "learning_rate": 0.0007496008289169725, "loss": 3.6852, "step": 29495 }, { "epoch": 2.0043484169044707, "grad_norm": 1.1377463340759277, "learning_rate": 0.0007495583639081397, "loss": 3.1785, "step": 29500 }, { "epoch": 2.0046881369751324, "grad_norm": 1.813329815864563, "learning_rate": 0.000749515898899307, "loss": 3.5524, "step": 29505 }, { "epoch": 2.0050278570457944, "grad_norm": 1.4510287046432495, "learning_rate": 0.0007494734338904743, "loss": 3.7821, "step": 29510 }, { "epoch": 2.005367577116456, "grad_norm": 1.1337552070617676, "learning_rate": 0.0007494309688816415, "loss": 3.4857, "step": 29515 }, { "epoch": 2.0057072971871177, "grad_norm": 1.8433629274368286, "learning_rate": 0.0007493885038728087, "loss": 3.2975, "step": 29520 }, { "epoch": 2.00604701725778, "grad_norm": 1.9592212438583374, "learning_rate": 0.0007493460388639761, "loss": 3.6141, "step": 29525 }, { "epoch": 2.0063867373284414, "grad_norm": 1.127496600151062, "learning_rate": 0.0007493035738551434, "loss": 3.5918, "step": 29530 }, { "epoch": 2.006726457399103, "grad_norm": 2.045349359512329, "learning_rate": 0.0007492611088463106, "loss": 3.6604, "step": 29535 }, { "epoch": 2.007066177469765, "grad_norm": 1.2609457969665527, "learning_rate": 0.000749218643837478, "loss": 3.568, "step": 29540 }, { "epoch": 2.0074058975404268, "grad_norm": 1.3625060319900513, "learning_rate": 0.0007491761788286452, "loss": 3.8469, "step": 29545 }, { "epoch": 2.0077456176110884, "grad_norm": 2.654179096221924, "learning_rate": 0.0007491337138198124, "loss": 3.1623, "step": 29550 }, { "epoch": 2.0080853376817505, "grad_norm": 1.3953447341918945, "learning_rate": 0.0007490912488109798, "loss": 3.5972, "step": 29555 }, { "epoch": 2.008425057752412, "grad_norm": 1.3904074430465698, "learning_rate": 0.000749048783802147, "loss": 3.2929, "step": 29560 }, { "epoch": 2.0087647778230737, "grad_norm": 1.2628405094146729, "learning_rate": 0.0007490063187933144, "loss": 3.4326, "step": 29565 }, { "epoch": 2.0091044978937354, "grad_norm": 1.5770435333251953, "learning_rate": 0.0007489723467862481, "loss": 3.579, "step": 29570 }, { "epoch": 2.0094442179643974, "grad_norm": 1.0151751041412354, "learning_rate": 0.0007489298817774154, "loss": 3.5095, "step": 29575 }, { "epoch": 2.009783938035059, "grad_norm": 2.1715495586395264, "learning_rate": 0.0007488874167685827, "loss": 3.6184, "step": 29580 }, { "epoch": 2.0101236581057207, "grad_norm": 1.0364094972610474, "learning_rate": 0.00074884495175975, "loss": 3.5139, "step": 29585 }, { "epoch": 2.0104633781763828, "grad_norm": 1.3772906064987183, "learning_rate": 0.0007488024867509173, "loss": 3.5441, "step": 29590 }, { "epoch": 2.0108030982470444, "grad_norm": 1.427634358406067, "learning_rate": 0.0007487600217420846, "loss": 3.5755, "step": 29595 }, { "epoch": 2.011142818317706, "grad_norm": 1.415955662727356, "learning_rate": 0.0007487175567332518, "loss": 3.3634, "step": 29600 }, { "epoch": 2.011482538388368, "grad_norm": 1.339700698852539, "learning_rate": 0.000748675091724419, "loss": 3.4175, "step": 29605 }, { "epoch": 2.0118222584590297, "grad_norm": 1.2122597694396973, "learning_rate": 0.0007486326267155864, "loss": 3.3894, "step": 29610 }, { "epoch": 2.0121619785296914, "grad_norm": 1.4320666790008545, "learning_rate": 0.0007485901617067536, "loss": 3.666, "step": 29615 }, { "epoch": 2.0125016986003534, "grad_norm": 1.3782567977905273, "learning_rate": 0.0007485476966979209, "loss": 3.5117, "step": 29620 }, { "epoch": 2.012841418671015, "grad_norm": 1.2731754779815674, "learning_rate": 0.0007485052316890883, "loss": 3.36, "step": 29625 }, { "epoch": 2.0131811387416767, "grad_norm": 1.5008772611618042, "learning_rate": 0.0007484627666802555, "loss": 3.6143, "step": 29630 }, { "epoch": 2.0135208588123388, "grad_norm": 1.1573688983917236, "learning_rate": 0.0007484203016714227, "loss": 3.5042, "step": 29635 }, { "epoch": 2.0138605788830004, "grad_norm": 2.1006951332092285, "learning_rate": 0.0007483778366625901, "loss": 3.2718, "step": 29640 }, { "epoch": 2.014200298953662, "grad_norm": 1.195431113243103, "learning_rate": 0.0007483353716537573, "loss": 3.4794, "step": 29645 }, { "epoch": 2.014540019024324, "grad_norm": 1.1724225282669067, "learning_rate": 0.0007482929066449245, "loss": 3.5258, "step": 29650 }, { "epoch": 2.0148797390949857, "grad_norm": 2.0899856090545654, "learning_rate": 0.000748250441636092, "loss": 3.7855, "step": 29655 }, { "epoch": 2.0152194591656474, "grad_norm": 1.2368133068084717, "learning_rate": 0.0007482079766272592, "loss": 3.6886, "step": 29660 }, { "epoch": 2.0155591792363095, "grad_norm": 1.2361571788787842, "learning_rate": 0.0007481655116184264, "loss": 3.608, "step": 29665 }, { "epoch": 2.015898899306971, "grad_norm": 12.716486930847168, "learning_rate": 0.0007481230466095937, "loss": 3.416, "step": 29670 }, { "epoch": 2.0162386193776327, "grad_norm": 1.4803471565246582, "learning_rate": 0.000748080581600761, "loss": 3.144, "step": 29675 }, { "epoch": 2.016578339448295, "grad_norm": 1.4482117891311646, "learning_rate": 0.0007480381165919282, "loss": 3.7472, "step": 29680 }, { "epoch": 2.0169180595189564, "grad_norm": 2.0395522117614746, "learning_rate": 0.0007479956515830955, "loss": 3.4695, "step": 29685 }, { "epoch": 2.017257779589618, "grad_norm": 1.2436726093292236, "learning_rate": 0.0007479531865742629, "loss": 3.7038, "step": 29690 }, { "epoch": 2.01759749966028, "grad_norm": 1.078078269958496, "learning_rate": 0.0007479107215654301, "loss": 3.6907, "step": 29695 }, { "epoch": 2.0179372197309418, "grad_norm": 1.7238996028900146, "learning_rate": 0.0007478682565565974, "loss": 3.4877, "step": 29700 }, { "epoch": 2.0182769398016034, "grad_norm": 1.2157593965530396, "learning_rate": 0.0007478257915477646, "loss": 3.3936, "step": 29705 }, { "epoch": 2.0186166598722655, "grad_norm": 1.7838157415390015, "learning_rate": 0.0007477833265389319, "loss": 3.6616, "step": 29710 }, { "epoch": 2.018956379942927, "grad_norm": 1.5893349647521973, "learning_rate": 0.0007477408615300992, "loss": 3.5305, "step": 29715 }, { "epoch": 2.0192961000135887, "grad_norm": 1.2405493259429932, "learning_rate": 0.0007476983965212664, "loss": 3.3635, "step": 29720 }, { "epoch": 2.0196358200842504, "grad_norm": 1.4456918239593506, "learning_rate": 0.0007476559315124338, "loss": 3.4994, "step": 29725 }, { "epoch": 2.0199755401549124, "grad_norm": 1.655396819114685, "learning_rate": 0.0007476134665036011, "loss": 3.2284, "step": 29730 }, { "epoch": 2.020315260225574, "grad_norm": 1.819385290145874, "learning_rate": 0.0007475710014947683, "loss": 3.3552, "step": 29735 }, { "epoch": 2.0206549802962357, "grad_norm": 4.984613418579102, "learning_rate": 0.0007475285364859356, "loss": 3.3904, "step": 29740 }, { "epoch": 2.0209947003668978, "grad_norm": 1.7589837312698364, "learning_rate": 0.0007474860714771029, "loss": 3.3431, "step": 29745 }, { "epoch": 2.0213344204375594, "grad_norm": 1.650583028793335, "learning_rate": 0.0007474436064682701, "loss": 3.4016, "step": 29750 }, { "epoch": 2.021674140508221, "grad_norm": 1.2643438577651978, "learning_rate": 0.0007474011414594373, "loss": 3.6046, "step": 29755 }, { "epoch": 2.022013860578883, "grad_norm": 1.148240566253662, "learning_rate": 0.0007473586764506048, "loss": 3.6873, "step": 29760 }, { "epoch": 2.0223535806495447, "grad_norm": 1.3282378911972046, "learning_rate": 0.000747316211441772, "loss": 3.5607, "step": 29765 }, { "epoch": 2.0226933007202064, "grad_norm": 1.5512443780899048, "learning_rate": 0.0007472737464329393, "loss": 3.3639, "step": 29770 }, { "epoch": 2.0230330207908684, "grad_norm": 1.3404473066329956, "learning_rate": 0.0007472312814241066, "loss": 3.4676, "step": 29775 }, { "epoch": 2.02337274086153, "grad_norm": 1.7048628330230713, "learning_rate": 0.0007471888164152738, "loss": 3.5326, "step": 29780 }, { "epoch": 2.0237124609321917, "grad_norm": 1.2357597351074219, "learning_rate": 0.0007471463514064411, "loss": 3.4425, "step": 29785 }, { "epoch": 2.024052181002854, "grad_norm": 1.171563744544983, "learning_rate": 0.0007471038863976084, "loss": 3.5556, "step": 29790 }, { "epoch": 2.0243919010735154, "grad_norm": 1.6448118686676025, "learning_rate": 0.0007470614213887757, "loss": 3.5038, "step": 29795 }, { "epoch": 2.024731621144177, "grad_norm": 1.1706453561782837, "learning_rate": 0.000747018956379943, "loss": 3.858, "step": 29800 }, { "epoch": 2.025071341214839, "grad_norm": 1.3812415599822998, "learning_rate": 0.0007469764913711102, "loss": 3.641, "step": 29805 }, { "epoch": 2.0254110612855007, "grad_norm": 1.1672043800354004, "learning_rate": 0.0007469340263622775, "loss": 3.6017, "step": 29810 }, { "epoch": 2.0257507813561624, "grad_norm": 1.419199824333191, "learning_rate": 0.0007468915613534448, "loss": 3.5098, "step": 29815 }, { "epoch": 2.0260905014268245, "grad_norm": 1.2179969549179077, "learning_rate": 0.000746849096344612, "loss": 3.4459, "step": 29820 }, { "epoch": 2.026430221497486, "grad_norm": 1.272379755973816, "learning_rate": 0.0007468066313357793, "loss": 3.518, "step": 29825 }, { "epoch": 2.0267699415681477, "grad_norm": 1.2504795789718628, "learning_rate": 0.0007467641663269467, "loss": 3.3968, "step": 29830 }, { "epoch": 2.02710966163881, "grad_norm": 1.4023877382278442, "learning_rate": 0.0007467217013181139, "loss": 3.384, "step": 29835 }, { "epoch": 2.0274493817094714, "grad_norm": 1.7819534540176392, "learning_rate": 0.0007466792363092812, "loss": 3.7673, "step": 29840 }, { "epoch": 2.027789101780133, "grad_norm": 1.2934945821762085, "learning_rate": 0.0007466367713004485, "loss": 3.3799, "step": 29845 }, { "epoch": 2.028128821850795, "grad_norm": 1.1187372207641602, "learning_rate": 0.0007465943062916157, "loss": 3.4888, "step": 29850 }, { "epoch": 2.0284685419214568, "grad_norm": 1.2988008260726929, "learning_rate": 0.0007465518412827829, "loss": 3.7531, "step": 29855 }, { "epoch": 2.0288082619921184, "grad_norm": 0.9804505109786987, "learning_rate": 0.0007465093762739503, "loss": 3.5047, "step": 29860 }, { "epoch": 2.0291479820627805, "grad_norm": 1.7999647855758667, "learning_rate": 0.0007464669112651176, "loss": 3.626, "step": 29865 }, { "epoch": 2.029487702133442, "grad_norm": 1.505845546722412, "learning_rate": 0.0007464244462562848, "loss": 3.5929, "step": 29870 }, { "epoch": 2.0298274222041037, "grad_norm": 1.243972659111023, "learning_rate": 0.0007463819812474522, "loss": 3.6514, "step": 29875 }, { "epoch": 2.030167142274766, "grad_norm": 1.4952049255371094, "learning_rate": 0.0007463395162386194, "loss": 3.6692, "step": 29880 }, { "epoch": 2.0305068623454274, "grad_norm": 1.6813937425613403, "learning_rate": 0.0007462970512297866, "loss": 3.5538, "step": 29885 }, { "epoch": 2.030846582416089, "grad_norm": 1.3668086528778076, "learning_rate": 0.000746254586220954, "loss": 3.4663, "step": 29890 }, { "epoch": 2.031186302486751, "grad_norm": 1.9973468780517578, "learning_rate": 0.0007462121212121212, "loss": 3.2158, "step": 29895 }, { "epoch": 2.0315260225574128, "grad_norm": 1.2721668481826782, "learning_rate": 0.0007461696562032885, "loss": 3.2016, "step": 29900 }, { "epoch": 2.0318657426280744, "grad_norm": 1.462807059288025, "learning_rate": 0.0007461271911944559, "loss": 3.775, "step": 29905 }, { "epoch": 2.032205462698736, "grad_norm": 0.9847255349159241, "learning_rate": 0.0007460847261856231, "loss": 3.723, "step": 29910 }, { "epoch": 2.032545182769398, "grad_norm": 1.3915059566497803, "learning_rate": 0.0007460422611767903, "loss": 3.4789, "step": 29915 }, { "epoch": 2.0328849028400597, "grad_norm": 1.2934497594833374, "learning_rate": 0.0007459997961679576, "loss": 3.4339, "step": 29920 }, { "epoch": 2.0332246229107214, "grad_norm": 1.413925051689148, "learning_rate": 0.0007459573311591249, "loss": 3.5728, "step": 29925 }, { "epoch": 2.0335643429813834, "grad_norm": 1.113311529159546, "learning_rate": 0.0007459148661502921, "loss": 3.5173, "step": 29930 }, { "epoch": 2.033904063052045, "grad_norm": 1.6624795198440552, "learning_rate": 0.0007458724011414595, "loss": 3.337, "step": 29935 }, { "epoch": 2.0342437831227067, "grad_norm": 1.579744815826416, "learning_rate": 0.0007458299361326268, "loss": 3.7536, "step": 29940 }, { "epoch": 2.034583503193369, "grad_norm": 1.3195654153823853, "learning_rate": 0.000745787471123794, "loss": 3.4643, "step": 29945 }, { "epoch": 2.0349232232640304, "grad_norm": 1.003969430923462, "learning_rate": 0.0007457450061149613, "loss": 3.4141, "step": 29950 }, { "epoch": 2.035262943334692, "grad_norm": 2.9104654788970947, "learning_rate": 0.0007457025411061285, "loss": 3.5543, "step": 29955 }, { "epoch": 2.035602663405354, "grad_norm": 1.0067172050476074, "learning_rate": 0.0007456600760972958, "loss": 3.4364, "step": 29960 }, { "epoch": 2.0359423834760157, "grad_norm": 1.2161234617233276, "learning_rate": 0.0007456176110884631, "loss": 3.3054, "step": 29965 }, { "epoch": 2.0362821035466774, "grad_norm": 1.763232707977295, "learning_rate": 0.0007455751460796304, "loss": 3.5782, "step": 29970 }, { "epoch": 2.0366218236173395, "grad_norm": 1.2804118394851685, "learning_rate": 0.0007455326810707977, "loss": 3.684, "step": 29975 }, { "epoch": 2.036961543688001, "grad_norm": 1.7139289379119873, "learning_rate": 0.000745490216061965, "loss": 3.3243, "step": 29980 }, { "epoch": 2.0373012637586627, "grad_norm": 1.315490484237671, "learning_rate": 0.0007454477510531322, "loss": 3.483, "step": 29985 }, { "epoch": 2.037640983829325, "grad_norm": 1.128893494606018, "learning_rate": 0.0007454052860442994, "loss": 3.6632, "step": 29990 }, { "epoch": 2.0379807038999864, "grad_norm": 1.1087201833724976, "learning_rate": 0.0007453628210354668, "loss": 3.7233, "step": 29995 }, { "epoch": 2.038320423970648, "grad_norm": 1.334999918937683, "learning_rate": 0.000745320356026634, "loss": 3.6834, "step": 30000 }, { "epoch": 2.03866014404131, "grad_norm": 1.1175240278244019, "learning_rate": 0.0007452778910178013, "loss": 3.6631, "step": 30005 }, { "epoch": 2.0389998641119718, "grad_norm": 1.399648904800415, "learning_rate": 0.0007452354260089687, "loss": 3.8784, "step": 30010 }, { "epoch": 2.0393395841826334, "grad_norm": 1.3676406145095825, "learning_rate": 0.0007451929610001359, "loss": 3.7488, "step": 30015 }, { "epoch": 2.0396793042532955, "grad_norm": 1.1969823837280273, "learning_rate": 0.0007451504959913031, "loss": 3.6193, "step": 30020 }, { "epoch": 2.040019024323957, "grad_norm": 1.5229716300964355, "learning_rate": 0.0007451080309824705, "loss": 3.5146, "step": 30025 }, { "epoch": 2.0403587443946187, "grad_norm": 1.0848770141601562, "learning_rate": 0.0007450655659736377, "loss": 3.6598, "step": 30030 }, { "epoch": 2.040698464465281, "grad_norm": 1.2815297842025757, "learning_rate": 0.0007450231009648049, "loss": 3.5621, "step": 30035 }, { "epoch": 2.0410381845359424, "grad_norm": 1.5338608026504517, "learning_rate": 0.0007449806359559724, "loss": 3.5096, "step": 30040 }, { "epoch": 2.041377904606604, "grad_norm": 1.3827122449874878, "learning_rate": 0.0007449381709471396, "loss": 3.4308, "step": 30045 }, { "epoch": 2.041717624677266, "grad_norm": 1.6246618032455444, "learning_rate": 0.0007448957059383068, "loss": 3.441, "step": 30050 }, { "epoch": 2.0420573447479278, "grad_norm": 1.201721429824829, "learning_rate": 0.0007448532409294741, "loss": 3.2897, "step": 30055 }, { "epoch": 2.0423970648185894, "grad_norm": 1.6550407409667969, "learning_rate": 0.0007448107759206414, "loss": 3.571, "step": 30060 }, { "epoch": 2.042736784889251, "grad_norm": 1.2898982763290405, "learning_rate": 0.0007447683109118086, "loss": 3.4021, "step": 30065 }, { "epoch": 2.043076504959913, "grad_norm": 1.088631510734558, "learning_rate": 0.000744725845902976, "loss": 3.7831, "step": 30070 }, { "epoch": 2.0434162250305747, "grad_norm": 1.3035163879394531, "learning_rate": 0.0007446833808941433, "loss": 3.2919, "step": 30075 }, { "epoch": 2.0437559451012364, "grad_norm": 1.1989396810531616, "learning_rate": 0.0007446409158853105, "loss": 3.479, "step": 30080 }, { "epoch": 2.0440956651718984, "grad_norm": 1.4574049711227417, "learning_rate": 0.0007445984508764778, "loss": 3.6131, "step": 30085 }, { "epoch": 2.04443538524256, "grad_norm": 1.0920767784118652, "learning_rate": 0.000744555985867645, "loss": 3.5419, "step": 30090 }, { "epoch": 2.0447751053132217, "grad_norm": 1.206813097000122, "learning_rate": 0.0007445135208588123, "loss": 3.3882, "step": 30095 }, { "epoch": 2.045114825383884, "grad_norm": 1.6370137929916382, "learning_rate": 0.0007444710558499796, "loss": 3.3997, "step": 30100 }, { "epoch": 2.0454545454545454, "grad_norm": 6.256786346435547, "learning_rate": 0.0007444285908411469, "loss": 3.6507, "step": 30105 }, { "epoch": 2.045794265525207, "grad_norm": 1.7587968111038208, "learning_rate": 0.0007443861258323143, "loss": 3.492, "step": 30110 }, { "epoch": 2.046133985595869, "grad_norm": 1.1625210046768188, "learning_rate": 0.0007443436608234815, "loss": 3.6756, "step": 30115 }, { "epoch": 2.0464737056665308, "grad_norm": 1.2199639081954956, "learning_rate": 0.0007443011958146487, "loss": 3.4744, "step": 30120 }, { "epoch": 2.0468134257371924, "grad_norm": 1.2043287754058838, "learning_rate": 0.0007442587308058161, "loss": 3.4329, "step": 30125 }, { "epoch": 2.0471531458078545, "grad_norm": 1.3202966451644897, "learning_rate": 0.0007442162657969833, "loss": 3.4298, "step": 30130 }, { "epoch": 2.047492865878516, "grad_norm": 1.2608058452606201, "learning_rate": 0.0007441738007881505, "loss": 3.4831, "step": 30135 }, { "epoch": 2.0478325859491777, "grad_norm": 1.1045913696289062, "learning_rate": 0.000744131335779318, "loss": 3.6691, "step": 30140 }, { "epoch": 2.04817230601984, "grad_norm": 1.7213078737258911, "learning_rate": 0.0007440888707704852, "loss": 3.6413, "step": 30145 }, { "epoch": 2.0485120260905014, "grad_norm": 1.3824043273925781, "learning_rate": 0.0007440464057616524, "loss": 3.3934, "step": 30150 }, { "epoch": 2.048851746161163, "grad_norm": 1.169324278831482, "learning_rate": 0.0007440039407528197, "loss": 3.3644, "step": 30155 }, { "epoch": 2.049191466231825, "grad_norm": 1.1451835632324219, "learning_rate": 0.000743961475743987, "loss": 3.6447, "step": 30160 }, { "epoch": 2.0495311863024868, "grad_norm": 1.3531306982040405, "learning_rate": 0.0007439190107351542, "loss": 3.5027, "step": 30165 }, { "epoch": 2.0498709063731484, "grad_norm": 1.3818846940994263, "learning_rate": 0.0007438765457263215, "loss": 3.4445, "step": 30170 }, { "epoch": 2.0502106264438105, "grad_norm": 1.2349791526794434, "learning_rate": 0.0007438340807174889, "loss": 3.4572, "step": 30175 }, { "epoch": 2.050550346514472, "grad_norm": 1.1618850231170654, "learning_rate": 0.0007437916157086561, "loss": 3.777, "step": 30180 }, { "epoch": 2.0508900665851337, "grad_norm": 1.5903204679489136, "learning_rate": 0.0007437491506998234, "loss": 3.2712, "step": 30185 }, { "epoch": 2.051229786655796, "grad_norm": 1.320170521736145, "learning_rate": 0.0007437066856909907, "loss": 3.3131, "step": 30190 }, { "epoch": 2.0515695067264574, "grad_norm": 1.294203519821167, "learning_rate": 0.0007436642206821579, "loss": 3.3034, "step": 30195 }, { "epoch": 2.051909226797119, "grad_norm": 1.3900668621063232, "learning_rate": 0.0007436217556733252, "loss": 3.838, "step": 30200 }, { "epoch": 2.052248946867781, "grad_norm": 1.0007684230804443, "learning_rate": 0.0007435792906644924, "loss": 3.5856, "step": 30205 }, { "epoch": 2.0525886669384428, "grad_norm": 2.219305992126465, "learning_rate": 0.0007435368256556598, "loss": 3.4719, "step": 30210 }, { "epoch": 2.0529283870091044, "grad_norm": 1.4686471223831177, "learning_rate": 0.0007434943606468271, "loss": 3.524, "step": 30215 }, { "epoch": 2.0532681070797665, "grad_norm": 1.181159257888794, "learning_rate": 0.0007434518956379943, "loss": 3.7343, "step": 30220 }, { "epoch": 2.053607827150428, "grad_norm": 1.322489857673645, "learning_rate": 0.0007434094306291616, "loss": 3.3431, "step": 30225 }, { "epoch": 2.0539475472210897, "grad_norm": 1.5807993412017822, "learning_rate": 0.0007433669656203289, "loss": 3.2565, "step": 30230 }, { "epoch": 2.054287267291752, "grad_norm": 1.2356736660003662, "learning_rate": 0.0007433245006114961, "loss": 3.5845, "step": 30235 }, { "epoch": 2.0546269873624134, "grad_norm": 1.2326589822769165, "learning_rate": 0.0007432820356026633, "loss": 3.5354, "step": 30240 }, { "epoch": 2.054966707433075, "grad_norm": 1.3593754768371582, "learning_rate": 0.0007432395705938308, "loss": 3.5811, "step": 30245 }, { "epoch": 2.0553064275037367, "grad_norm": 1.4300789833068848, "learning_rate": 0.000743197105584998, "loss": 3.495, "step": 30250 }, { "epoch": 2.055646147574399, "grad_norm": 1.0403907299041748, "learning_rate": 0.0007431546405761652, "loss": 3.7102, "step": 30255 }, { "epoch": 2.0559858676450604, "grad_norm": 1.2464396953582764, "learning_rate": 0.0007431121755673326, "loss": 3.7311, "step": 30260 }, { "epoch": 2.056325587715722, "grad_norm": 1.3633068799972534, "learning_rate": 0.0007430697105584998, "loss": 3.6506, "step": 30265 }, { "epoch": 2.056665307786384, "grad_norm": 1.3215041160583496, "learning_rate": 0.000743027245549667, "loss": 3.2963, "step": 30270 }, { "epoch": 2.0570050278570458, "grad_norm": 1.344878911972046, "learning_rate": 0.0007429847805408344, "loss": 3.6294, "step": 30275 }, { "epoch": 2.0573447479277074, "grad_norm": 1.304709792137146, "learning_rate": 0.0007429423155320017, "loss": 3.2093, "step": 30280 }, { "epoch": 2.0576844679983695, "grad_norm": 1.2676206827163696, "learning_rate": 0.0007428998505231689, "loss": 3.4405, "step": 30285 }, { "epoch": 2.058024188069031, "grad_norm": 1.1424229145050049, "learning_rate": 0.0007428573855143363, "loss": 3.6743, "step": 30290 }, { "epoch": 2.0583639081396927, "grad_norm": 1.5402874946594238, "learning_rate": 0.0007428149205055035, "loss": 3.446, "step": 30295 }, { "epoch": 2.058703628210355, "grad_norm": 1.7184685468673706, "learning_rate": 0.0007427724554966707, "loss": 3.6518, "step": 30300 }, { "epoch": 2.0590433482810164, "grad_norm": 1.4084534645080566, "learning_rate": 0.000742729990487838, "loss": 3.6107, "step": 30305 }, { "epoch": 2.059383068351678, "grad_norm": 1.5115283727645874, "learning_rate": 0.0007426875254790053, "loss": 3.5292, "step": 30310 }, { "epoch": 2.05972278842234, "grad_norm": 1.2656302452087402, "learning_rate": 0.0007426450604701726, "loss": 3.5227, "step": 30315 }, { "epoch": 2.0600625084930018, "grad_norm": 1.2312157154083252, "learning_rate": 0.0007426025954613399, "loss": 3.5791, "step": 30320 }, { "epoch": 2.0604022285636634, "grad_norm": 1.2206357717514038, "learning_rate": 0.0007425601304525072, "loss": 3.6401, "step": 30325 }, { "epoch": 2.0607419486343255, "grad_norm": 1.3245288133621216, "learning_rate": 0.0007425176654436744, "loss": 3.4231, "step": 30330 }, { "epoch": 2.061081668704987, "grad_norm": 1.2526295185089111, "learning_rate": 0.0007424752004348417, "loss": 3.4871, "step": 30335 }, { "epoch": 2.0614213887756487, "grad_norm": 1.2472389936447144, "learning_rate": 0.000742432735426009, "loss": 3.501, "step": 30340 }, { "epoch": 2.061761108846311, "grad_norm": 1.391599416732788, "learning_rate": 0.0007423902704171762, "loss": 3.3441, "step": 30345 }, { "epoch": 2.0621008289169724, "grad_norm": 1.2686874866485596, "learning_rate": 0.0007423478054083436, "loss": 3.6002, "step": 30350 }, { "epoch": 2.062440548987634, "grad_norm": 1.259413242340088, "learning_rate": 0.0007423053403995108, "loss": 3.5719, "step": 30355 }, { "epoch": 2.062780269058296, "grad_norm": 1.3954434394836426, "learning_rate": 0.0007422628753906781, "loss": 3.6146, "step": 30360 }, { "epoch": 2.0631199891289578, "grad_norm": 1.168460726737976, "learning_rate": 0.0007422204103818454, "loss": 3.2369, "step": 30365 }, { "epoch": 2.0634597091996194, "grad_norm": 1.5042662620544434, "learning_rate": 0.0007421779453730126, "loss": 3.36, "step": 30370 }, { "epoch": 2.0637994292702815, "grad_norm": 1.2924938201904297, "learning_rate": 0.0007421354803641799, "loss": 3.3631, "step": 30375 }, { "epoch": 2.064139149340943, "grad_norm": 1.0499851703643799, "learning_rate": 0.0007420930153553472, "loss": 3.433, "step": 30380 }, { "epoch": 2.0644788694116047, "grad_norm": 1.1955397129058838, "learning_rate": 0.0007420505503465145, "loss": 3.5245, "step": 30385 }, { "epoch": 2.064818589482267, "grad_norm": 1.6468724012374878, "learning_rate": 0.0007420080853376817, "loss": 3.6024, "step": 30390 }, { "epoch": 2.0651583095529285, "grad_norm": 2.359675884246826, "learning_rate": 0.0007419656203288491, "loss": 3.6325, "step": 30395 }, { "epoch": 2.06549802962359, "grad_norm": 2.749616861343384, "learning_rate": 0.0007419231553200163, "loss": 3.3309, "step": 30400 }, { "epoch": 2.0658377496942517, "grad_norm": 1.2979121208190918, "learning_rate": 0.0007418806903111835, "loss": 3.3918, "step": 30405 }, { "epoch": 2.066177469764914, "grad_norm": 1.2206381559371948, "learning_rate": 0.0007418382253023509, "loss": 3.4886, "step": 30410 }, { "epoch": 2.0665171898355754, "grad_norm": 0.9165639877319336, "learning_rate": 0.0007417957602935181, "loss": 3.361, "step": 30415 }, { "epoch": 2.066856909906237, "grad_norm": 1.665771245956421, "learning_rate": 0.0007417532952846854, "loss": 3.3177, "step": 30420 }, { "epoch": 2.067196629976899, "grad_norm": 1.2062759399414062, "learning_rate": 0.0007417108302758528, "loss": 3.5063, "step": 30425 }, { "epoch": 2.0675363500475608, "grad_norm": 1.1761183738708496, "learning_rate": 0.00074166836526702, "loss": 3.6001, "step": 30430 }, { "epoch": 2.0678760701182224, "grad_norm": 0.898933470249176, "learning_rate": 0.0007416259002581872, "loss": 3.4927, "step": 30435 }, { "epoch": 2.0682157901888845, "grad_norm": 1.1130560636520386, "learning_rate": 0.0007415834352493545, "loss": 3.7166, "step": 30440 }, { "epoch": 2.068555510259546, "grad_norm": 1.5092813968658447, "learning_rate": 0.0007415409702405218, "loss": 3.5777, "step": 30445 }, { "epoch": 2.0688952303302077, "grad_norm": 1.145058512687683, "learning_rate": 0.0007414985052316891, "loss": 3.6519, "step": 30450 }, { "epoch": 2.06923495040087, "grad_norm": 1.1977133750915527, "learning_rate": 0.0007414560402228564, "loss": 3.3248, "step": 30455 }, { "epoch": 2.0695746704715314, "grad_norm": 1.3599740266799927, "learning_rate": 0.0007414135752140237, "loss": 3.6406, "step": 30460 }, { "epoch": 2.069914390542193, "grad_norm": 1.4824907779693604, "learning_rate": 0.000741371110205191, "loss": 3.5698, "step": 30465 }, { "epoch": 2.070254110612855, "grad_norm": 1.2807629108428955, "learning_rate": 0.0007413286451963582, "loss": 3.5133, "step": 30470 }, { "epoch": 2.0705938306835168, "grad_norm": 1.503057837486267, "learning_rate": 0.0007412861801875255, "loss": 3.3638, "step": 30475 }, { "epoch": 2.0709335507541784, "grad_norm": 1.780987024307251, "learning_rate": 0.0007412437151786928, "loss": 3.5448, "step": 30480 }, { "epoch": 2.0712732708248405, "grad_norm": 1.2839913368225098, "learning_rate": 0.00074120125016986, "loss": 3.3071, "step": 30485 }, { "epoch": 2.071612990895502, "grad_norm": 1.2782232761383057, "learning_rate": 0.0007411587851610273, "loss": 3.5364, "step": 30490 }, { "epoch": 2.0719527109661637, "grad_norm": 1.336558222770691, "learning_rate": 0.0007411163201521947, "loss": 3.6183, "step": 30495 }, { "epoch": 2.072292431036826, "grad_norm": 1.1220903396606445, "learning_rate": 0.0007410738551433619, "loss": 3.2378, "step": 30500 }, { "epoch": 2.0726321511074874, "grad_norm": 1.18820059299469, "learning_rate": 0.0007410313901345291, "loss": 3.626, "step": 30505 }, { "epoch": 2.072971871178149, "grad_norm": 1.297839879989624, "learning_rate": 0.0007409889251256965, "loss": 3.4616, "step": 30510 }, { "epoch": 2.073311591248811, "grad_norm": 4.544674873352051, "learning_rate": 0.0007409464601168637, "loss": 3.424, "step": 30515 }, { "epoch": 2.073651311319473, "grad_norm": 1.3114315271377563, "learning_rate": 0.0007409039951080309, "loss": 3.528, "step": 30520 }, { "epoch": 2.0739910313901344, "grad_norm": 1.1613507270812988, "learning_rate": 0.0007408615300991984, "loss": 3.783, "step": 30525 }, { "epoch": 2.0743307514607965, "grad_norm": 1.2009695768356323, "learning_rate": 0.0007408190650903656, "loss": 3.5286, "step": 30530 }, { "epoch": 2.074670471531458, "grad_norm": 1.3448830842971802, "learning_rate": 0.0007407766000815328, "loss": 3.3136, "step": 30535 }, { "epoch": 2.0750101916021197, "grad_norm": 1.2780011892318726, "learning_rate": 0.0007407341350727001, "loss": 3.2969, "step": 30540 }, { "epoch": 2.075349911672782, "grad_norm": 1.127830982208252, "learning_rate": 0.0007406916700638674, "loss": 3.4816, "step": 30545 }, { "epoch": 2.0756896317434435, "grad_norm": 1.7824183702468872, "learning_rate": 0.0007406492050550346, "loss": 3.4481, "step": 30550 }, { "epoch": 2.076029351814105, "grad_norm": 1.346697211265564, "learning_rate": 0.0007406067400462019, "loss": 3.5847, "step": 30555 }, { "epoch": 2.076369071884767, "grad_norm": 1.6172970533370972, "learning_rate": 0.0007405642750373693, "loss": 3.4594, "step": 30560 }, { "epoch": 2.076708791955429, "grad_norm": 1.3994426727294922, "learning_rate": 0.0007405218100285365, "loss": 3.5669, "step": 30565 }, { "epoch": 2.0770485120260904, "grad_norm": 1.7836041450500488, "learning_rate": 0.0007404793450197038, "loss": 3.3591, "step": 30570 }, { "epoch": 2.0773882320967525, "grad_norm": 1.1102162599563599, "learning_rate": 0.0007404368800108711, "loss": 3.518, "step": 30575 }, { "epoch": 2.077727952167414, "grad_norm": 1.3102850914001465, "learning_rate": 0.0007403944150020383, "loss": 3.6133, "step": 30580 }, { "epoch": 2.0780676722380758, "grad_norm": 1.34432053565979, "learning_rate": 0.0007403519499932056, "loss": 3.4231, "step": 30585 }, { "epoch": 2.0784073923087374, "grad_norm": 1.4595011472702026, "learning_rate": 0.0007403094849843728, "loss": 3.3798, "step": 30590 }, { "epoch": 2.0787471123793995, "grad_norm": 1.244360089302063, "learning_rate": 0.0007402670199755402, "loss": 3.3494, "step": 30595 }, { "epoch": 2.079086832450061, "grad_norm": 1.4664419889450073, "learning_rate": 0.0007402245549667075, "loss": 3.5889, "step": 30600 }, { "epoch": 2.0794265525207227, "grad_norm": 1.7012743949890137, "learning_rate": 0.0007401820899578747, "loss": 3.7504, "step": 30605 }, { "epoch": 2.079766272591385, "grad_norm": 1.1053619384765625, "learning_rate": 0.000740139624949042, "loss": 3.7189, "step": 30610 }, { "epoch": 2.0801059926620464, "grad_norm": 1.3730181455612183, "learning_rate": 0.0007400971599402093, "loss": 3.5593, "step": 30615 }, { "epoch": 2.080445712732708, "grad_norm": 1.4137836694717407, "learning_rate": 0.0007400546949313765, "loss": 3.3335, "step": 30620 }, { "epoch": 2.08078543280337, "grad_norm": 1.2646602392196655, "learning_rate": 0.0007400122299225437, "loss": 3.6849, "step": 30625 }, { "epoch": 2.0811251528740318, "grad_norm": 1.1046366691589355, "learning_rate": 0.0007399697649137112, "loss": 3.5734, "step": 30630 }, { "epoch": 2.0814648729446934, "grad_norm": 1.4732698202133179, "learning_rate": 0.0007399272999048784, "loss": 3.4523, "step": 30635 }, { "epoch": 2.0818045930153555, "grad_norm": 1.3852787017822266, "learning_rate": 0.0007398848348960456, "loss": 3.5721, "step": 30640 }, { "epoch": 2.082144313086017, "grad_norm": 1.6897554397583008, "learning_rate": 0.000739842369887213, "loss": 3.8103, "step": 30645 }, { "epoch": 2.0824840331566787, "grad_norm": 1.5193785429000854, "learning_rate": 0.0007397999048783802, "loss": 3.6826, "step": 30650 }, { "epoch": 2.082823753227341, "grad_norm": 1.0962848663330078, "learning_rate": 0.0007397574398695474, "loss": 3.7172, "step": 30655 }, { "epoch": 2.0831634732980024, "grad_norm": 1.514077067375183, "learning_rate": 0.0007397149748607149, "loss": 3.5975, "step": 30660 }, { "epoch": 2.083503193368664, "grad_norm": 2.499358654022217, "learning_rate": 0.0007396725098518821, "loss": 3.5035, "step": 30665 }, { "epoch": 2.083842913439326, "grad_norm": 1.536672830581665, "learning_rate": 0.0007396300448430493, "loss": 3.537, "step": 30670 }, { "epoch": 2.084182633509988, "grad_norm": 1.272104263305664, "learning_rate": 0.0007395875798342167, "loss": 3.4902, "step": 30675 }, { "epoch": 2.0845223535806494, "grad_norm": 1.4972598552703857, "learning_rate": 0.0007395451148253839, "loss": 3.4642, "step": 30680 }, { "epoch": 2.0848620736513115, "grad_norm": 1.5727736949920654, "learning_rate": 0.0007395026498165511, "loss": 3.59, "step": 30685 }, { "epoch": 2.085201793721973, "grad_norm": 1.1599712371826172, "learning_rate": 0.0007394601848077184, "loss": 3.386, "step": 30690 }, { "epoch": 2.0855415137926347, "grad_norm": 1.2811235189437866, "learning_rate": 0.0007394177197988858, "loss": 3.5549, "step": 30695 }, { "epoch": 2.085881233863297, "grad_norm": 1.4174455404281616, "learning_rate": 0.000739375254790053, "loss": 3.3453, "step": 30700 }, { "epoch": 2.0862209539339585, "grad_norm": 1.0903782844543457, "learning_rate": 0.0007393327897812203, "loss": 3.4634, "step": 30705 }, { "epoch": 2.08656067400462, "grad_norm": 1.5268497467041016, "learning_rate": 0.0007392903247723876, "loss": 3.52, "step": 30710 }, { "epoch": 2.086900394075282, "grad_norm": 1.2509785890579224, "learning_rate": 0.0007392478597635548, "loss": 3.4897, "step": 30715 }, { "epoch": 2.087240114145944, "grad_norm": 1.1698925495147705, "learning_rate": 0.0007392053947547221, "loss": 3.6008, "step": 30720 }, { "epoch": 2.0875798342166054, "grad_norm": 1.127776861190796, "learning_rate": 0.0007391629297458893, "loss": 3.8497, "step": 30725 }, { "epoch": 2.0879195542872675, "grad_norm": 1.0990346670150757, "learning_rate": 0.0007391204647370567, "loss": 3.6092, "step": 30730 }, { "epoch": 2.088259274357929, "grad_norm": 1.3238672018051147, "learning_rate": 0.000739077999728224, "loss": 3.4525, "step": 30735 }, { "epoch": 2.0885989944285908, "grad_norm": 1.0521851778030396, "learning_rate": 0.0007390355347193912, "loss": 3.4391, "step": 30740 }, { "epoch": 2.0889387144992524, "grad_norm": 1.0473856925964355, "learning_rate": 0.0007389930697105585, "loss": 3.5187, "step": 30745 }, { "epoch": 2.0892784345699145, "grad_norm": 1.1544691324234009, "learning_rate": 0.0007389506047017258, "loss": 3.6809, "step": 30750 }, { "epoch": 2.089618154640576, "grad_norm": 1.1725132465362549, "learning_rate": 0.000738908139692893, "loss": 3.7209, "step": 30755 }, { "epoch": 2.0899578747112377, "grad_norm": 1.476753830909729, "learning_rate": 0.0007388656746840603, "loss": 3.4037, "step": 30760 }, { "epoch": 2.0902975947819, "grad_norm": 1.0160808563232422, "learning_rate": 0.0007388232096752277, "loss": 4.0534, "step": 30765 }, { "epoch": 2.0906373148525614, "grad_norm": 1.0357316732406616, "learning_rate": 0.0007387807446663949, "loss": 3.5885, "step": 30770 }, { "epoch": 2.090977034923223, "grad_norm": 0.9843354225158691, "learning_rate": 0.0007387382796575621, "loss": 3.3927, "step": 30775 }, { "epoch": 2.091316754993885, "grad_norm": 1.1097471714019775, "learning_rate": 0.0007386958146487295, "loss": 3.5389, "step": 30780 }, { "epoch": 2.0916564750645468, "grad_norm": 1.3542940616607666, "learning_rate": 0.0007386533496398967, "loss": 3.5836, "step": 30785 }, { "epoch": 2.0919961951352084, "grad_norm": 1.1613004207611084, "learning_rate": 0.000738610884631064, "loss": 3.6088, "step": 30790 }, { "epoch": 2.0923359152058705, "grad_norm": 1.8342130184173584, "learning_rate": 0.0007385684196222313, "loss": 3.6869, "step": 30795 }, { "epoch": 2.092675635276532, "grad_norm": 1.0449433326721191, "learning_rate": 0.0007385259546133986, "loss": 3.5628, "step": 30800 }, { "epoch": 2.0930153553471937, "grad_norm": 2.232717752456665, "learning_rate": 0.0007384834896045659, "loss": 3.4906, "step": 30805 }, { "epoch": 2.093355075417856, "grad_norm": 1.5453674793243408, "learning_rate": 0.0007384410245957332, "loss": 3.4968, "step": 30810 }, { "epoch": 2.0936947954885174, "grad_norm": 1.5003721714019775, "learning_rate": 0.0007383985595869004, "loss": 3.6556, "step": 30815 }, { "epoch": 2.094034515559179, "grad_norm": 1.1687421798706055, "learning_rate": 0.0007383560945780677, "loss": 3.463, "step": 30820 }, { "epoch": 2.094374235629841, "grad_norm": 1.1923805475234985, "learning_rate": 0.000738313629569235, "loss": 3.5078, "step": 30825 }, { "epoch": 2.094713955700503, "grad_norm": 1.4429473876953125, "learning_rate": 0.0007382711645604022, "loss": 3.5578, "step": 30830 }, { "epoch": 2.0950536757711644, "grad_norm": 1.240106463432312, "learning_rate": 0.0007382286995515696, "loss": 3.4854, "step": 30835 }, { "epoch": 2.0953933958418265, "grad_norm": 3.0902838706970215, "learning_rate": 0.0007381862345427368, "loss": 3.5719, "step": 30840 }, { "epoch": 2.095733115912488, "grad_norm": 1.3782179355621338, "learning_rate": 0.0007381437695339041, "loss": 3.6328, "step": 30845 }, { "epoch": 2.0960728359831498, "grad_norm": 1.0474432706832886, "learning_rate": 0.0007381013045250714, "loss": 3.7247, "step": 30850 }, { "epoch": 2.096412556053812, "grad_norm": 1.458800196647644, "learning_rate": 0.0007380588395162386, "loss": 3.5556, "step": 30855 }, { "epoch": 2.0967522761244735, "grad_norm": 1.290332555770874, "learning_rate": 0.0007380163745074059, "loss": 3.6617, "step": 30860 }, { "epoch": 2.097091996195135, "grad_norm": 1.4560106992721558, "learning_rate": 0.0007379739094985732, "loss": 3.259, "step": 30865 }, { "epoch": 2.097431716265797, "grad_norm": 1.2255209684371948, "learning_rate": 0.0007379314444897405, "loss": 3.6779, "step": 30870 }, { "epoch": 2.097771436336459, "grad_norm": 1.0315830707550049, "learning_rate": 0.0007378889794809078, "loss": 3.63, "step": 30875 }, { "epoch": 2.0981111564071204, "grad_norm": 1.419600248336792, "learning_rate": 0.0007378465144720751, "loss": 3.3147, "step": 30880 }, { "epoch": 2.0984508764777825, "grad_norm": 1.3887310028076172, "learning_rate": 0.0007378040494632423, "loss": 3.6859, "step": 30885 }, { "epoch": 2.098790596548444, "grad_norm": 1.1906429529190063, "learning_rate": 0.0007377615844544095, "loss": 3.6957, "step": 30890 }, { "epoch": 2.0991303166191058, "grad_norm": 1.4501134157180786, "learning_rate": 0.0007377191194455769, "loss": 3.4146, "step": 30895 }, { "epoch": 2.099470036689768, "grad_norm": 1.422751784324646, "learning_rate": 0.0007376766544367441, "loss": 3.5884, "step": 30900 }, { "epoch": 2.0998097567604295, "grad_norm": 1.3795543909072876, "learning_rate": 0.0007376341894279114, "loss": 3.8701, "step": 30905 }, { "epoch": 2.100149476831091, "grad_norm": 1.3465982675552368, "learning_rate": 0.0007375917244190788, "loss": 3.5602, "step": 30910 }, { "epoch": 2.100489196901753, "grad_norm": 3.104445219039917, "learning_rate": 0.000737549259410246, "loss": 3.6054, "step": 30915 }, { "epoch": 2.100828916972415, "grad_norm": 1.9794492721557617, "learning_rate": 0.0007375067944014132, "loss": 3.2438, "step": 30920 }, { "epoch": 2.1011686370430764, "grad_norm": 1.6262844800949097, "learning_rate": 0.0007374643293925806, "loss": 3.7321, "step": 30925 }, { "epoch": 2.101508357113738, "grad_norm": 1.1977416276931763, "learning_rate": 0.0007374218643837478, "loss": 3.4219, "step": 30930 }, { "epoch": 2.1018480771844, "grad_norm": 1.4119267463684082, "learning_rate": 0.000737379399374915, "loss": 3.5277, "step": 30935 }, { "epoch": 2.1021877972550618, "grad_norm": 1.0167783498764038, "learning_rate": 0.0007373369343660824, "loss": 3.6423, "step": 30940 }, { "epoch": 2.1025275173257234, "grad_norm": 1.7915012836456299, "learning_rate": 0.0007372944693572497, "loss": 3.175, "step": 30945 }, { "epoch": 2.1028672373963855, "grad_norm": 1.4695342779159546, "learning_rate": 0.0007372520043484169, "loss": 3.3903, "step": 30950 }, { "epoch": 2.103206957467047, "grad_norm": 1.894014596939087, "learning_rate": 0.0007372095393395842, "loss": 3.625, "step": 30955 }, { "epoch": 2.1035466775377087, "grad_norm": 1.6722623109817505, "learning_rate": 0.0007371670743307515, "loss": 3.8051, "step": 30960 }, { "epoch": 2.103886397608371, "grad_norm": 1.2998801469802856, "learning_rate": 0.0007371246093219187, "loss": 3.4956, "step": 30965 }, { "epoch": 2.1042261176790324, "grad_norm": 1.259463906288147, "learning_rate": 0.000737082144313086, "loss": 3.2865, "step": 30970 }, { "epoch": 2.104565837749694, "grad_norm": 1.3835316896438599, "learning_rate": 0.0007370396793042534, "loss": 3.5009, "step": 30975 }, { "epoch": 2.104905557820356, "grad_norm": 1.2091950178146362, "learning_rate": 0.0007369972142954206, "loss": 3.3872, "step": 30980 }, { "epoch": 2.105245277891018, "grad_norm": 1.2116912603378296, "learning_rate": 0.0007369547492865879, "loss": 3.4397, "step": 30985 }, { "epoch": 2.1055849979616794, "grad_norm": 1.5892963409423828, "learning_rate": 0.0007369122842777551, "loss": 3.5398, "step": 30990 }, { "epoch": 2.1059247180323415, "grad_norm": 1.206973671913147, "learning_rate": 0.0007368698192689224, "loss": 3.7683, "step": 30995 }, { "epoch": 2.106264438103003, "grad_norm": 1.3299305438995361, "learning_rate": 0.0007368273542600897, "loss": 3.6678, "step": 31000 }, { "epoch": 2.1066041581736648, "grad_norm": 1.1370826959609985, "learning_rate": 0.0007367848892512569, "loss": 3.5027, "step": 31005 }, { "epoch": 2.106943878244327, "grad_norm": 1.3387519121170044, "learning_rate": 0.0007367424242424243, "loss": 3.5262, "step": 31010 }, { "epoch": 2.1072835983149885, "grad_norm": 1.0531114339828491, "learning_rate": 0.0007366999592335916, "loss": 3.745, "step": 31015 }, { "epoch": 2.10762331838565, "grad_norm": 1.0861103534698486, "learning_rate": 0.0007366574942247588, "loss": 3.51, "step": 31020 }, { "epoch": 2.107963038456312, "grad_norm": 1.2831658124923706, "learning_rate": 0.000736615029215926, "loss": 3.5426, "step": 31025 }, { "epoch": 2.108302758526974, "grad_norm": 1.520253300666809, "learning_rate": 0.0007365725642070934, "loss": 3.6061, "step": 31030 }, { "epoch": 2.1086424785976354, "grad_norm": 1.6594432592391968, "learning_rate": 0.0007365300991982606, "loss": 3.3816, "step": 31035 }, { "epoch": 2.1089821986682975, "grad_norm": 1.3587483167648315, "learning_rate": 0.0007364876341894278, "loss": 3.4993, "step": 31040 }, { "epoch": 2.109321918738959, "grad_norm": 1.0824953317642212, "learning_rate": 0.0007364451691805953, "loss": 3.6672, "step": 31045 }, { "epoch": 2.1096616388096208, "grad_norm": 1.7767930030822754, "learning_rate": 0.0007364027041717625, "loss": 3.5557, "step": 31050 }, { "epoch": 2.110001358880283, "grad_norm": 1.243147611618042, "learning_rate": 0.0007363602391629297, "loss": 3.7843, "step": 31055 }, { "epoch": 2.1103410789509445, "grad_norm": 1.2769103050231934, "learning_rate": 0.0007363177741540971, "loss": 3.5168, "step": 31060 }, { "epoch": 2.110680799021606, "grad_norm": 1.8604645729064941, "learning_rate": 0.0007362753091452643, "loss": 3.599, "step": 31065 }, { "epoch": 2.111020519092268, "grad_norm": 1.3915965557098389, "learning_rate": 0.0007362328441364315, "loss": 3.5975, "step": 31070 }, { "epoch": 2.11136023916293, "grad_norm": 1.058197259902954, "learning_rate": 0.0007361903791275988, "loss": 3.8127, "step": 31075 }, { "epoch": 2.1116999592335914, "grad_norm": 1.3919144868850708, "learning_rate": 0.0007361479141187662, "loss": 3.4106, "step": 31080 }, { "epoch": 2.112039679304253, "grad_norm": 2.3059744834899902, "learning_rate": 0.0007361054491099334, "loss": 3.4203, "step": 31085 }, { "epoch": 2.112379399374915, "grad_norm": 1.3494987487792969, "learning_rate": 0.0007360629841011007, "loss": 3.3867, "step": 31090 }, { "epoch": 2.1127191194455768, "grad_norm": 1.030698537826538, "learning_rate": 0.000736020519092268, "loss": 3.4678, "step": 31095 }, { "epoch": 2.1130588395162384, "grad_norm": 1.0878065824508667, "learning_rate": 0.0007359780540834352, "loss": 3.276, "step": 31100 }, { "epoch": 2.1133985595869005, "grad_norm": 1.4367218017578125, "learning_rate": 0.0007359355890746025, "loss": 3.6187, "step": 31105 }, { "epoch": 2.113738279657562, "grad_norm": 1.5923399925231934, "learning_rate": 0.0007358931240657698, "loss": 3.6981, "step": 31110 }, { "epoch": 2.1140779997282237, "grad_norm": 1.1084554195404053, "learning_rate": 0.0007358506590569371, "loss": 3.3351, "step": 31115 }, { "epoch": 2.114417719798886, "grad_norm": 1.111324667930603, "learning_rate": 0.0007358081940481044, "loss": 3.4742, "step": 31120 }, { "epoch": 2.1147574398695475, "grad_norm": 1.573406457901001, "learning_rate": 0.0007357657290392716, "loss": 3.7386, "step": 31125 }, { "epoch": 2.115097159940209, "grad_norm": 1.2818058729171753, "learning_rate": 0.000735723264030439, "loss": 3.4029, "step": 31130 }, { "epoch": 2.115436880010871, "grad_norm": 1.6118664741516113, "learning_rate": 0.0007356807990216062, "loss": 3.2381, "step": 31135 }, { "epoch": 2.115776600081533, "grad_norm": 1.4600114822387695, "learning_rate": 0.0007356383340127734, "loss": 3.2857, "step": 31140 }, { "epoch": 2.1161163201521944, "grad_norm": 1.8122349977493286, "learning_rate": 0.0007355958690039408, "loss": 3.7832, "step": 31145 }, { "epoch": 2.1164560402228565, "grad_norm": 1.584443211555481, "learning_rate": 0.0007355534039951081, "loss": 3.9253, "step": 31150 }, { "epoch": 2.116795760293518, "grad_norm": 1.1939679384231567, "learning_rate": 0.0007355109389862753, "loss": 3.3177, "step": 31155 }, { "epoch": 2.1171354803641798, "grad_norm": 1.347859263420105, "learning_rate": 0.0007354684739774427, "loss": 3.2905, "step": 31160 }, { "epoch": 2.117475200434842, "grad_norm": 1.0615178346633911, "learning_rate": 0.0007354260089686099, "loss": 3.6622, "step": 31165 }, { "epoch": 2.1178149205055035, "grad_norm": 1.5131583213806152, "learning_rate": 0.0007353835439597771, "loss": 3.4736, "step": 31170 }, { "epoch": 2.118154640576165, "grad_norm": 2.5566301345825195, "learning_rate": 0.0007353410789509444, "loss": 3.6553, "step": 31175 }, { "epoch": 2.118494360646827, "grad_norm": 1.3970283269882202, "learning_rate": 0.0007352986139421117, "loss": 3.5879, "step": 31180 }, { "epoch": 2.118834080717489, "grad_norm": 1.151605486869812, "learning_rate": 0.000735256148933279, "loss": 3.4634, "step": 31185 }, { "epoch": 2.1191738007881504, "grad_norm": 1.267061471939087, "learning_rate": 0.0007352136839244463, "loss": 3.4287, "step": 31190 }, { "epoch": 2.1195135208588125, "grad_norm": 1.3249139785766602, "learning_rate": 0.0007351712189156136, "loss": 3.431, "step": 31195 }, { "epoch": 2.119853240929474, "grad_norm": 1.3186211585998535, "learning_rate": 0.0007351287539067808, "loss": 3.6928, "step": 31200 }, { "epoch": 2.1201929610001358, "grad_norm": 1.476728081703186, "learning_rate": 0.0007350862888979481, "loss": 3.7018, "step": 31205 }, { "epoch": 2.120532681070798, "grad_norm": 1.4299085140228271, "learning_rate": 0.0007350438238891154, "loss": 3.4553, "step": 31210 }, { "epoch": 2.1208724011414595, "grad_norm": 1.5277849435806274, "learning_rate": 0.0007350013588802826, "loss": 3.6949, "step": 31215 }, { "epoch": 2.121212121212121, "grad_norm": 1.519430160522461, "learning_rate": 0.00073495889387145, "loss": 3.5501, "step": 31220 }, { "epoch": 2.121551841282783, "grad_norm": 1.6707732677459717, "learning_rate": 0.0007349164288626172, "loss": 3.4919, "step": 31225 }, { "epoch": 2.121891561353445, "grad_norm": 4.57146692276001, "learning_rate": 0.0007348739638537845, "loss": 3.5796, "step": 31230 }, { "epoch": 2.1222312814241064, "grad_norm": 1.681418776512146, "learning_rate": 0.0007348314988449518, "loss": 3.4831, "step": 31235 }, { "epoch": 2.1225710014947685, "grad_norm": 1.3851436376571655, "learning_rate": 0.000734789033836119, "loss": 3.5442, "step": 31240 }, { "epoch": 2.12291072156543, "grad_norm": 1.216661810874939, "learning_rate": 0.0007347465688272863, "loss": 3.6112, "step": 31245 }, { "epoch": 2.123250441636092, "grad_norm": 1.279902458190918, "learning_rate": 0.0007347041038184537, "loss": 3.6568, "step": 31250 }, { "epoch": 2.123590161706754, "grad_norm": 2.4099531173706055, "learning_rate": 0.0007346616388096209, "loss": 3.6628, "step": 31255 }, { "epoch": 2.1239298817774155, "grad_norm": 1.1040658950805664, "learning_rate": 0.0007346191738007882, "loss": 3.4962, "step": 31260 }, { "epoch": 2.124269601848077, "grad_norm": 1.4655786752700806, "learning_rate": 0.0007345767087919555, "loss": 3.248, "step": 31265 }, { "epoch": 2.1246093219187387, "grad_norm": 1.451086401939392, "learning_rate": 0.0007345342437831227, "loss": 3.4063, "step": 31270 }, { "epoch": 2.124949041989401, "grad_norm": 1.476006269454956, "learning_rate": 0.0007344917787742899, "loss": 3.2395, "step": 31275 }, { "epoch": 2.1252887620600625, "grad_norm": 1.1654952764511108, "learning_rate": 0.0007344493137654573, "loss": 3.5346, "step": 31280 }, { "epoch": 2.125628482130724, "grad_norm": 1.2009793519973755, "learning_rate": 0.0007344068487566246, "loss": 3.6271, "step": 31285 }, { "epoch": 2.125968202201386, "grad_norm": 1.5308265686035156, "learning_rate": 0.0007343643837477918, "loss": 3.6016, "step": 31290 }, { "epoch": 2.126307922272048, "grad_norm": 1.5364195108413696, "learning_rate": 0.0007343219187389592, "loss": 3.0734, "step": 31295 }, { "epoch": 2.1266476423427094, "grad_norm": 1.1811902523040771, "learning_rate": 0.0007342794537301264, "loss": 3.3639, "step": 31300 }, { "epoch": 2.1269873624133715, "grad_norm": 1.734483242034912, "learning_rate": 0.0007342369887212936, "loss": 3.3675, "step": 31305 }, { "epoch": 2.127327082484033, "grad_norm": 1.1231868267059326, "learning_rate": 0.000734194523712461, "loss": 3.3865, "step": 31310 }, { "epoch": 2.1276668025546948, "grad_norm": 1.3975874185562134, "learning_rate": 0.0007341520587036282, "loss": 3.4816, "step": 31315 }, { "epoch": 2.128006522625357, "grad_norm": 1.5554382801055908, "learning_rate": 0.0007341095936947955, "loss": 3.4325, "step": 31320 }, { "epoch": 2.1283462426960185, "grad_norm": 2.4629712104797363, "learning_rate": 0.0007340671286859629, "loss": 3.4196, "step": 31325 }, { "epoch": 2.12868596276668, "grad_norm": 1.533933401107788, "learning_rate": 0.0007340246636771301, "loss": 3.5254, "step": 31330 }, { "epoch": 2.129025682837342, "grad_norm": 1.9173308610916138, "learning_rate": 0.0007339821986682973, "loss": 3.6553, "step": 31335 }, { "epoch": 2.129365402908004, "grad_norm": 1.2328873872756958, "learning_rate": 0.0007339397336594646, "loss": 3.5395, "step": 31340 }, { "epoch": 2.1297051229786654, "grad_norm": 1.8244836330413818, "learning_rate": 0.0007338972686506319, "loss": 3.5513, "step": 31345 }, { "epoch": 2.1300448430493275, "grad_norm": 1.1746333837509155, "learning_rate": 0.0007338548036417991, "loss": 3.6052, "step": 31350 }, { "epoch": 2.130384563119989, "grad_norm": 1.4135231971740723, "learning_rate": 0.0007338123386329665, "loss": 3.5371, "step": 31355 }, { "epoch": 2.1307242831906508, "grad_norm": 1.3690011501312256, "learning_rate": 0.0007337698736241338, "loss": 3.3297, "step": 31360 }, { "epoch": 2.131064003261313, "grad_norm": 1.334261178970337, "learning_rate": 0.000733727408615301, "loss": 3.4593, "step": 31365 }, { "epoch": 2.1314037233319745, "grad_norm": 1.1294000148773193, "learning_rate": 0.0007336849436064683, "loss": 3.587, "step": 31370 }, { "epoch": 2.131743443402636, "grad_norm": 1.4828041791915894, "learning_rate": 0.0007336424785976355, "loss": 3.5224, "step": 31375 }, { "epoch": 2.132083163473298, "grad_norm": 1.593181848526001, "learning_rate": 0.0007336000135888028, "loss": 3.4836, "step": 31380 }, { "epoch": 2.13242288354396, "grad_norm": 1.6349881887435913, "learning_rate": 0.0007335575485799701, "loss": 3.6391, "step": 31385 }, { "epoch": 2.1327626036146214, "grad_norm": 2.0677051544189453, "learning_rate": 0.0007335150835711374, "loss": 3.6952, "step": 31390 }, { "epoch": 2.1331023236852835, "grad_norm": 1.8693615198135376, "learning_rate": 0.0007334726185623047, "loss": 3.346, "step": 31395 }, { "epoch": 2.133442043755945, "grad_norm": 1.2992089986801147, "learning_rate": 0.000733430153553472, "loss": 3.6363, "step": 31400 }, { "epoch": 2.133781763826607, "grad_norm": 1.2639001607894897, "learning_rate": 0.0007333876885446392, "loss": 3.3915, "step": 31405 }, { "epoch": 2.134121483897269, "grad_norm": 1.198630928993225, "learning_rate": 0.0007333452235358064, "loss": 3.657, "step": 31410 }, { "epoch": 2.1344612039679305, "grad_norm": 1.7918894290924072, "learning_rate": 0.0007333027585269738, "loss": 3.5018, "step": 31415 }, { "epoch": 2.134800924038592, "grad_norm": 1.337140440940857, "learning_rate": 0.000733260293518141, "loss": 3.6293, "step": 31420 }, { "epoch": 2.1351406441092537, "grad_norm": 1.3995124101638794, "learning_rate": 0.0007332178285093083, "loss": 3.6926, "step": 31425 }, { "epoch": 2.135480364179916, "grad_norm": 1.497120976448059, "learning_rate": 0.0007331753635004757, "loss": 3.6473, "step": 31430 }, { "epoch": 2.1358200842505775, "grad_norm": 1.4772894382476807, "learning_rate": 0.0007331328984916429, "loss": 3.4276, "step": 31435 }, { "epoch": 2.136159804321239, "grad_norm": 1.2660785913467407, "learning_rate": 0.0007330904334828101, "loss": 3.5554, "step": 31440 }, { "epoch": 2.136499524391901, "grad_norm": 1.1796268224716187, "learning_rate": 0.0007330479684739775, "loss": 3.2265, "step": 31445 }, { "epoch": 2.136839244462563, "grad_norm": 1.2636078596115112, "learning_rate": 0.0007330055034651447, "loss": 3.4606, "step": 31450 }, { "epoch": 2.1371789645332244, "grad_norm": 1.3945190906524658, "learning_rate": 0.0007329630384563119, "loss": 3.4943, "step": 31455 }, { "epoch": 2.1375186846038865, "grad_norm": 1.227278709411621, "learning_rate": 0.0007329205734474794, "loss": 3.4724, "step": 31460 }, { "epoch": 2.137858404674548, "grad_norm": 1.3202300071716309, "learning_rate": 0.0007328781084386466, "loss": 3.4068, "step": 31465 }, { "epoch": 2.1381981247452098, "grad_norm": 1.5224651098251343, "learning_rate": 0.0007328356434298139, "loss": 3.2423, "step": 31470 }, { "epoch": 2.138537844815872, "grad_norm": 1.4070591926574707, "learning_rate": 0.0007327931784209811, "loss": 3.3559, "step": 31475 }, { "epoch": 2.1388775648865335, "grad_norm": 1.4879881143569946, "learning_rate": 0.0007327507134121484, "loss": 3.3514, "step": 31480 }, { "epoch": 2.139217284957195, "grad_norm": 1.3080092668533325, "learning_rate": 0.0007327082484033157, "loss": 3.5384, "step": 31485 }, { "epoch": 2.139557005027857, "grad_norm": 2.632699966430664, "learning_rate": 0.0007326657833944829, "loss": 3.6481, "step": 31490 }, { "epoch": 2.139896725098519, "grad_norm": 1.7297991514205933, "learning_rate": 0.0007326233183856503, "loss": 3.4303, "step": 31495 }, { "epoch": 2.1402364451691804, "grad_norm": 1.1870893239974976, "learning_rate": 0.0007325808533768176, "loss": 3.3238, "step": 31500 }, { "epoch": 2.1405761652398425, "grad_norm": 1.2225130796432495, "learning_rate": 0.0007325383883679848, "loss": 3.4324, "step": 31505 }, { "epoch": 2.140915885310504, "grad_norm": 1.5194956064224243, "learning_rate": 0.000732495923359152, "loss": 3.4758, "step": 31510 }, { "epoch": 2.1412556053811658, "grad_norm": 1.3301379680633545, "learning_rate": 0.0007324534583503194, "loss": 3.4582, "step": 31515 }, { "epoch": 2.141595325451828, "grad_norm": 1.2271156311035156, "learning_rate": 0.0007324109933414866, "loss": 3.4673, "step": 31520 }, { "epoch": 2.1419350455224895, "grad_norm": 1.3673830032348633, "learning_rate": 0.0007323685283326538, "loss": 3.2563, "step": 31525 }, { "epoch": 2.142274765593151, "grad_norm": 1.433120608329773, "learning_rate": 0.0007323260633238213, "loss": 3.6332, "step": 31530 }, { "epoch": 2.142614485663813, "grad_norm": 1.2500410079956055, "learning_rate": 0.0007322835983149885, "loss": 3.4908, "step": 31535 }, { "epoch": 2.142954205734475, "grad_norm": 1.1803709268569946, "learning_rate": 0.0007322411333061557, "loss": 3.5937, "step": 31540 }, { "epoch": 2.1432939258051364, "grad_norm": 1.421764850616455, "learning_rate": 0.0007321986682973231, "loss": 3.5164, "step": 31545 }, { "epoch": 2.1436336458757985, "grad_norm": 1.1259887218475342, "learning_rate": 0.0007321562032884903, "loss": 3.7873, "step": 31550 }, { "epoch": 2.14397336594646, "grad_norm": 1.724563717842102, "learning_rate": 0.0007321137382796575, "loss": 3.6016, "step": 31555 }, { "epoch": 2.144313086017122, "grad_norm": 1.3762400150299072, "learning_rate": 0.0007320712732708249, "loss": 3.8053, "step": 31560 }, { "epoch": 2.144652806087784, "grad_norm": 1.9343039989471436, "learning_rate": 0.0007320288082619922, "loss": 3.5051, "step": 31565 }, { "epoch": 2.1449925261584455, "grad_norm": 1.2860138416290283, "learning_rate": 0.0007319863432531594, "loss": 3.4526, "step": 31570 }, { "epoch": 2.145332246229107, "grad_norm": 1.3533344268798828, "learning_rate": 0.0007319438782443267, "loss": 3.6024, "step": 31575 }, { "epoch": 2.145671966299769, "grad_norm": 1.4785888195037842, "learning_rate": 0.000731901413235494, "loss": 3.4746, "step": 31580 }, { "epoch": 2.146011686370431, "grad_norm": 1.5176622867584229, "learning_rate": 0.0007318589482266612, "loss": 3.5031, "step": 31585 }, { "epoch": 2.1463514064410925, "grad_norm": 1.2576477527618408, "learning_rate": 0.0007318164832178285, "loss": 3.5444, "step": 31590 }, { "epoch": 2.1466911265117545, "grad_norm": 1.6162080764770508, "learning_rate": 0.0007317740182089958, "loss": 3.3645, "step": 31595 }, { "epoch": 2.147030846582416, "grad_norm": 1.2463794946670532, "learning_rate": 0.0007317315532001631, "loss": 3.4721, "step": 31600 }, { "epoch": 2.147370566653078, "grad_norm": 1.9808671474456787, "learning_rate": 0.0007316890881913304, "loss": 3.2747, "step": 31605 }, { "epoch": 2.14771028672374, "grad_norm": 1.378146767616272, "learning_rate": 0.0007316466231824977, "loss": 3.3805, "step": 31610 }, { "epoch": 2.1480500067944015, "grad_norm": 1.1525826454162598, "learning_rate": 0.0007316041581736649, "loss": 3.6286, "step": 31615 }, { "epoch": 2.148389726865063, "grad_norm": 1.2933603525161743, "learning_rate": 0.0007315616931648322, "loss": 3.5154, "step": 31620 }, { "epoch": 2.1487294469357248, "grad_norm": 1.5818557739257812, "learning_rate": 0.0007315192281559994, "loss": 3.6533, "step": 31625 }, { "epoch": 2.149069167006387, "grad_norm": 1.4684216976165771, "learning_rate": 0.0007314767631471667, "loss": 3.5322, "step": 31630 }, { "epoch": 2.1494088870770485, "grad_norm": 1.6477502584457397, "learning_rate": 0.0007314342981383341, "loss": 3.3937, "step": 31635 }, { "epoch": 2.14974860714771, "grad_norm": 1.4895943403244019, "learning_rate": 0.0007313918331295013, "loss": 3.6165, "step": 31640 }, { "epoch": 2.150088327218372, "grad_norm": 1.4292031526565552, "learning_rate": 0.0007313493681206686, "loss": 3.4496, "step": 31645 }, { "epoch": 2.150428047289034, "grad_norm": 1.2087215185165405, "learning_rate": 0.0007313069031118359, "loss": 3.751, "step": 31650 }, { "epoch": 2.1507677673596954, "grad_norm": 1.0622904300689697, "learning_rate": 0.0007312644381030031, "loss": 3.6651, "step": 31655 }, { "epoch": 2.1511074874303575, "grad_norm": 1.0587648153305054, "learning_rate": 0.0007312219730941703, "loss": 3.6675, "step": 31660 }, { "epoch": 2.151447207501019, "grad_norm": 1.1629116535186768, "learning_rate": 0.0007311795080853377, "loss": 3.6413, "step": 31665 }, { "epoch": 2.1517869275716808, "grad_norm": 1.4834555387496948, "learning_rate": 0.000731137043076505, "loss": 3.6548, "step": 31670 }, { "epoch": 2.152126647642343, "grad_norm": 1.2269501686096191, "learning_rate": 0.0007310945780676722, "loss": 3.6619, "step": 31675 }, { "epoch": 2.1524663677130045, "grad_norm": 1.5379955768585205, "learning_rate": 0.0007310521130588396, "loss": 3.4301, "step": 31680 }, { "epoch": 2.152806087783666, "grad_norm": 1.4849393367767334, "learning_rate": 0.0007310096480500068, "loss": 3.4992, "step": 31685 }, { "epoch": 2.153145807854328, "grad_norm": 1.5808340311050415, "learning_rate": 0.000730967183041174, "loss": 3.6932, "step": 31690 }, { "epoch": 2.15348552792499, "grad_norm": 1.3623825311660767, "learning_rate": 0.0007309247180323414, "loss": 3.1551, "step": 31695 }, { "epoch": 2.1538252479956514, "grad_norm": 1.318457841873169, "learning_rate": 0.0007308822530235086, "loss": 3.6028, "step": 31700 }, { "epoch": 2.1541649680663135, "grad_norm": 1.5946632623672485, "learning_rate": 0.0007308397880146759, "loss": 3.5843, "step": 31705 }, { "epoch": 2.154504688136975, "grad_norm": 1.2641748189926147, "learning_rate": 0.0007307973230058433, "loss": 3.516, "step": 31710 }, { "epoch": 2.154844408207637, "grad_norm": 1.0264804363250732, "learning_rate": 0.0007307548579970105, "loss": 3.3853, "step": 31715 }, { "epoch": 2.155184128278299, "grad_norm": 1.8280638456344604, "learning_rate": 0.0007307123929881777, "loss": 3.5018, "step": 31720 }, { "epoch": 2.1555238483489605, "grad_norm": 1.1690385341644287, "learning_rate": 0.000730669927979345, "loss": 3.448, "step": 31725 }, { "epoch": 2.155863568419622, "grad_norm": 1.1667194366455078, "learning_rate": 0.0007306274629705123, "loss": 3.7119, "step": 31730 }, { "epoch": 2.156203288490284, "grad_norm": 1.547912359237671, "learning_rate": 0.0007305849979616795, "loss": 3.1975, "step": 31735 }, { "epoch": 2.156543008560946, "grad_norm": 1.4756261110305786, "learning_rate": 0.0007305425329528469, "loss": 3.7164, "step": 31740 }, { "epoch": 2.1568827286316075, "grad_norm": 1.3572628498077393, "learning_rate": 0.0007305000679440142, "loss": 3.4645, "step": 31745 }, { "epoch": 2.1572224487022695, "grad_norm": 1.277490258216858, "learning_rate": 0.0007304576029351814, "loss": 3.2217, "step": 31750 }, { "epoch": 2.157562168772931, "grad_norm": 1.3801332712173462, "learning_rate": 0.0007304151379263487, "loss": 3.44, "step": 31755 }, { "epoch": 2.157901888843593, "grad_norm": 1.2338483333587646, "learning_rate": 0.0007303726729175159, "loss": 3.5293, "step": 31760 }, { "epoch": 2.1582416089142544, "grad_norm": 1.563442349433899, "learning_rate": 0.0007303302079086832, "loss": 3.5203, "step": 31765 }, { "epoch": 2.1585813289849165, "grad_norm": 1.1834250688552856, "learning_rate": 0.0007302877428998506, "loss": 3.7006, "step": 31770 }, { "epoch": 2.158921049055578, "grad_norm": 1.649490475654602, "learning_rate": 0.0007302452778910178, "loss": 3.2861, "step": 31775 }, { "epoch": 2.1592607691262398, "grad_norm": 1.3241772651672363, "learning_rate": 0.0007302028128821851, "loss": 3.4284, "step": 31780 }, { "epoch": 2.159600489196902, "grad_norm": 1.2802170515060425, "learning_rate": 0.0007301603478733524, "loss": 3.4673, "step": 31785 }, { "epoch": 2.1599402092675635, "grad_norm": 1.6758700609207153, "learning_rate": 0.0007301178828645196, "loss": 3.8205, "step": 31790 }, { "epoch": 2.160279929338225, "grad_norm": 1.2870088815689087, "learning_rate": 0.0007300754178556869, "loss": 3.6038, "step": 31795 }, { "epoch": 2.160619649408887, "grad_norm": 1.1604894399642944, "learning_rate": 0.0007300329528468542, "loss": 3.3404, "step": 31800 }, { "epoch": 2.160959369479549, "grad_norm": 1.6078025102615356, "learning_rate": 0.0007299904878380215, "loss": 3.6585, "step": 31805 }, { "epoch": 2.1612990895502104, "grad_norm": 1.2892205715179443, "learning_rate": 0.0007299480228291889, "loss": 3.7933, "step": 31810 }, { "epoch": 2.1616388096208725, "grad_norm": 1.1521046161651611, "learning_rate": 0.0007299055578203561, "loss": 3.6611, "step": 31815 }, { "epoch": 2.161978529691534, "grad_norm": 1.1849477291107178, "learning_rate": 0.0007298630928115233, "loss": 3.5465, "step": 31820 }, { "epoch": 2.1623182497621958, "grad_norm": 1.2130799293518066, "learning_rate": 0.0007298206278026906, "loss": 3.6645, "step": 31825 }, { "epoch": 2.162657969832858, "grad_norm": 1.41741144657135, "learning_rate": 0.0007297781627938579, "loss": 3.5016, "step": 31830 }, { "epoch": 2.1629976899035195, "grad_norm": 1.4777449369430542, "learning_rate": 0.0007297356977850251, "loss": 3.413, "step": 31835 }, { "epoch": 2.163337409974181, "grad_norm": 1.4391080141067505, "learning_rate": 0.0007296932327761925, "loss": 3.7998, "step": 31840 }, { "epoch": 2.163677130044843, "grad_norm": 1.2979681491851807, "learning_rate": 0.0007296507677673598, "loss": 3.7298, "step": 31845 }, { "epoch": 2.164016850115505, "grad_norm": 1.504098892211914, "learning_rate": 0.000729608302758527, "loss": 3.5123, "step": 31850 }, { "epoch": 2.1643565701861665, "grad_norm": 1.1056004762649536, "learning_rate": 0.0007295658377496943, "loss": 3.6351, "step": 31855 }, { "epoch": 2.1646962902568285, "grad_norm": 1.352620005607605, "learning_rate": 0.0007295233727408615, "loss": 3.5276, "step": 31860 }, { "epoch": 2.16503601032749, "grad_norm": 1.08523428440094, "learning_rate": 0.0007294809077320288, "loss": 3.6784, "step": 31865 }, { "epoch": 2.165375730398152, "grad_norm": 1.279996395111084, "learning_rate": 0.0007294384427231961, "loss": 3.6476, "step": 31870 }, { "epoch": 2.165715450468814, "grad_norm": 1.28517746925354, "learning_rate": 0.0007293959777143634, "loss": 3.5615, "step": 31875 }, { "epoch": 2.1660551705394755, "grad_norm": 1.5190953016281128, "learning_rate": 0.0007293535127055307, "loss": 3.3135, "step": 31880 }, { "epoch": 2.166394890610137, "grad_norm": 2.500117540359497, "learning_rate": 0.000729311047696698, "loss": 3.6011, "step": 31885 }, { "epoch": 2.166734610680799, "grad_norm": 0.9556498527526855, "learning_rate": 0.0007292685826878652, "loss": 3.5714, "step": 31890 }, { "epoch": 2.167074330751461, "grad_norm": 1.3134775161743164, "learning_rate": 0.0007292261176790325, "loss": 3.4411, "step": 31895 }, { "epoch": 2.1674140508221225, "grad_norm": 1.4106976985931396, "learning_rate": 0.0007291836526701998, "loss": 3.4912, "step": 31900 }, { "epoch": 2.1677537708927845, "grad_norm": 1.1504517793655396, "learning_rate": 0.000729141187661367, "loss": 3.5727, "step": 31905 }, { "epoch": 2.168093490963446, "grad_norm": 1.5406469106674194, "learning_rate": 0.0007290987226525343, "loss": 3.673, "step": 31910 }, { "epoch": 2.168433211034108, "grad_norm": 1.9125068187713623, "learning_rate": 0.0007290562576437017, "loss": 3.6391, "step": 31915 }, { "epoch": 2.16877293110477, "grad_norm": 1.2834621667861938, "learning_rate": 0.0007290137926348689, "loss": 3.5829, "step": 31920 }, { "epoch": 2.1691126511754315, "grad_norm": 1.1015485525131226, "learning_rate": 0.0007289713276260361, "loss": 3.5223, "step": 31925 }, { "epoch": 2.169452371246093, "grad_norm": 1.120002031326294, "learning_rate": 0.0007289288626172035, "loss": 3.3183, "step": 31930 }, { "epoch": 2.169792091316755, "grad_norm": 1.1795735359191895, "learning_rate": 0.0007288863976083707, "loss": 3.5468, "step": 31935 }, { "epoch": 2.170131811387417, "grad_norm": 1.1723285913467407, "learning_rate": 0.0007288439325995379, "loss": 3.6114, "step": 31940 }, { "epoch": 2.1704715314580785, "grad_norm": 1.3129159212112427, "learning_rate": 0.0007288014675907054, "loss": 3.593, "step": 31945 }, { "epoch": 2.1708112515287405, "grad_norm": 1.2729737758636475, "learning_rate": 0.0007287590025818726, "loss": 3.4058, "step": 31950 }, { "epoch": 2.171150971599402, "grad_norm": 1.6128054857254028, "learning_rate": 0.0007287165375730398, "loss": 3.6281, "step": 31955 }, { "epoch": 2.171490691670064, "grad_norm": 1.1816983222961426, "learning_rate": 0.0007286740725642071, "loss": 3.5805, "step": 31960 }, { "epoch": 2.1718304117407254, "grad_norm": 1.3037546873092651, "learning_rate": 0.0007286316075553744, "loss": 3.6212, "step": 31965 }, { "epoch": 2.1721701318113875, "grad_norm": 1.2585115432739258, "learning_rate": 0.0007285891425465416, "loss": 3.5888, "step": 31970 }, { "epoch": 2.172509851882049, "grad_norm": 1.625495195388794, "learning_rate": 0.0007285466775377089, "loss": 3.5256, "step": 31975 }, { "epoch": 2.172849571952711, "grad_norm": 1.7713794708251953, "learning_rate": 0.0007285042125288763, "loss": 3.4617, "step": 31980 }, { "epoch": 2.173189292023373, "grad_norm": 1.4907675981521606, "learning_rate": 0.0007284617475200435, "loss": 3.5707, "step": 31985 }, { "epoch": 2.1735290120940345, "grad_norm": 1.3862016201019287, "learning_rate": 0.0007284192825112108, "loss": 3.4107, "step": 31990 }, { "epoch": 2.173868732164696, "grad_norm": 1.2186583280563354, "learning_rate": 0.0007283768175023781, "loss": 3.422, "step": 31995 }, { "epoch": 2.174208452235358, "grad_norm": 1.2887455224990845, "learning_rate": 0.0007283343524935453, "loss": 3.4458, "step": 32000 }, { "epoch": 2.17454817230602, "grad_norm": 5.620162487030029, "learning_rate": 0.0007282918874847126, "loss": 3.9431, "step": 32005 }, { "epoch": 2.1748878923766815, "grad_norm": 1.5146279335021973, "learning_rate": 0.0007282494224758798, "loss": 3.4378, "step": 32010 }, { "epoch": 2.1752276124473435, "grad_norm": 1.085885763168335, "learning_rate": 0.0007282069574670472, "loss": 3.5917, "step": 32015 }, { "epoch": 2.175567332518005, "grad_norm": 1.5327341556549072, "learning_rate": 0.0007281644924582145, "loss": 3.3918, "step": 32020 }, { "epoch": 2.175907052588667, "grad_norm": 1.4463834762573242, "learning_rate": 0.0007281220274493817, "loss": 3.5916, "step": 32025 }, { "epoch": 2.176246772659329, "grad_norm": 1.2981557846069336, "learning_rate": 0.000728079562440549, "loss": 3.6077, "step": 32030 }, { "epoch": 2.1765864927299905, "grad_norm": 1.4469237327575684, "learning_rate": 0.0007280370974317163, "loss": 3.675, "step": 32035 }, { "epoch": 2.176926212800652, "grad_norm": 1.0635931491851807, "learning_rate": 0.0007279946324228835, "loss": 3.2907, "step": 32040 }, { "epoch": 2.177265932871314, "grad_norm": 1.0372934341430664, "learning_rate": 0.0007279521674140507, "loss": 3.6487, "step": 32045 }, { "epoch": 2.177605652941976, "grad_norm": 1.4557172060012817, "learning_rate": 0.0007279097024052182, "loss": 3.4088, "step": 32050 }, { "epoch": 2.1779453730126375, "grad_norm": 1.6378780603408813, "learning_rate": 0.0007278672373963854, "loss": 3.484, "step": 32055 }, { "epoch": 2.1782850930832995, "grad_norm": 1.1221394538879395, "learning_rate": 0.0007278247723875526, "loss": 3.6419, "step": 32060 }, { "epoch": 2.178624813153961, "grad_norm": 1.3198163509368896, "learning_rate": 0.00072778230737872, "loss": 3.6118, "step": 32065 }, { "epoch": 2.178964533224623, "grad_norm": 1.1812790632247925, "learning_rate": 0.0007277398423698872, "loss": 3.6553, "step": 32070 }, { "epoch": 2.179304253295285, "grad_norm": 1.334834098815918, "learning_rate": 0.0007276973773610544, "loss": 3.4862, "step": 32075 }, { "epoch": 2.1796439733659465, "grad_norm": 1.2805320024490356, "learning_rate": 0.0007276549123522218, "loss": 3.6414, "step": 32080 }, { "epoch": 2.179983693436608, "grad_norm": 1.1985077857971191, "learning_rate": 0.0007276124473433891, "loss": 3.856, "step": 32085 }, { "epoch": 2.18032341350727, "grad_norm": 1.186833143234253, "learning_rate": 0.0007275699823345563, "loss": 3.2252, "step": 32090 }, { "epoch": 2.180663133577932, "grad_norm": 1.4286279678344727, "learning_rate": 0.0007275275173257237, "loss": 3.5616, "step": 32095 }, { "epoch": 2.1810028536485935, "grad_norm": 1.0401053428649902, "learning_rate": 0.0007274850523168909, "loss": 3.6645, "step": 32100 }, { "epoch": 2.181342573719255, "grad_norm": 1.4427510499954224, "learning_rate": 0.0007274425873080581, "loss": 3.4834, "step": 32105 }, { "epoch": 2.181682293789917, "grad_norm": 1.3334896564483643, "learning_rate": 0.0007274001222992254, "loss": 3.5308, "step": 32110 }, { "epoch": 2.182022013860579, "grad_norm": 2.073711395263672, "learning_rate": 0.0007273576572903927, "loss": 3.4959, "step": 32115 }, { "epoch": 2.1823617339312404, "grad_norm": 1.3764234781265259, "learning_rate": 0.00072731519228156, "loss": 3.5611, "step": 32120 }, { "epoch": 2.1827014540019025, "grad_norm": 1.1781060695648193, "learning_rate": 0.0007272727272727273, "loss": 3.7579, "step": 32125 }, { "epoch": 2.183041174072564, "grad_norm": 1.0483592748641968, "learning_rate": 0.0007272302622638946, "loss": 3.4358, "step": 32130 }, { "epoch": 2.183380894143226, "grad_norm": 1.7228375673294067, "learning_rate": 0.0007271877972550618, "loss": 3.5885, "step": 32135 }, { "epoch": 2.183720614213888, "grad_norm": 1.1853581666946411, "learning_rate": 0.0007271453322462291, "loss": 3.5516, "step": 32140 }, { "epoch": 2.1840603342845495, "grad_norm": 1.3125890493392944, "learning_rate": 0.0007271028672373963, "loss": 3.39, "step": 32145 }, { "epoch": 2.184400054355211, "grad_norm": 1.3231093883514404, "learning_rate": 0.0007270604022285637, "loss": 3.5014, "step": 32150 }, { "epoch": 2.184739774425873, "grad_norm": 1.6907426118850708, "learning_rate": 0.000727017937219731, "loss": 3.2431, "step": 32155 }, { "epoch": 2.185079494496535, "grad_norm": 1.1522982120513916, "learning_rate": 0.0007269754722108982, "loss": 3.5535, "step": 32160 }, { "epoch": 2.1854192145671965, "grad_norm": 1.3381720781326294, "learning_rate": 0.0007269330072020656, "loss": 3.5433, "step": 32165 }, { "epoch": 2.1857589346378585, "grad_norm": 1.4457721710205078, "learning_rate": 0.0007268905421932328, "loss": 3.6188, "step": 32170 }, { "epoch": 2.18609865470852, "grad_norm": 1.6695446968078613, "learning_rate": 0.0007268480771844, "loss": 3.3552, "step": 32175 }, { "epoch": 2.186438374779182, "grad_norm": 1.3928050994873047, "learning_rate": 0.0007268056121755674, "loss": 3.6266, "step": 32180 }, { "epoch": 2.186778094849844, "grad_norm": 1.244268774986267, "learning_rate": 0.0007267631471667346, "loss": 3.7044, "step": 32185 }, { "epoch": 2.1871178149205055, "grad_norm": 1.1544065475463867, "learning_rate": 0.0007267206821579019, "loss": 3.6804, "step": 32190 }, { "epoch": 2.187457534991167, "grad_norm": 1.1441682577133179, "learning_rate": 0.0007266782171490693, "loss": 3.6926, "step": 32195 }, { "epoch": 2.187797255061829, "grad_norm": 1.697754979133606, "learning_rate": 0.0007266357521402365, "loss": 3.4024, "step": 32200 }, { "epoch": 2.188136975132491, "grad_norm": 1.1884316205978394, "learning_rate": 0.0007265932871314037, "loss": 3.6159, "step": 32205 }, { "epoch": 2.1884766952031525, "grad_norm": 1.2914835214614868, "learning_rate": 0.000726550822122571, "loss": 3.6729, "step": 32210 }, { "epoch": 2.1888164152738145, "grad_norm": 1.5351619720458984, "learning_rate": 0.0007265083571137383, "loss": 3.5178, "step": 32215 }, { "epoch": 2.189156135344476, "grad_norm": 1.5580947399139404, "learning_rate": 0.0007264658921049055, "loss": 3.5721, "step": 32220 }, { "epoch": 2.189495855415138, "grad_norm": 1.4440460205078125, "learning_rate": 0.0007264234270960729, "loss": 3.2998, "step": 32225 }, { "epoch": 2.1898355754858, "grad_norm": 1.913870930671692, "learning_rate": 0.0007263809620872402, "loss": 3.6138, "step": 32230 }, { "epoch": 2.1901752955564615, "grad_norm": 1.354516625404358, "learning_rate": 0.0007263384970784074, "loss": 3.7216, "step": 32235 }, { "epoch": 2.190515015627123, "grad_norm": 1.3162956237792969, "learning_rate": 0.0007262960320695747, "loss": 3.6467, "step": 32240 }, { "epoch": 2.190854735697785, "grad_norm": 1.6770585775375366, "learning_rate": 0.000726253567060742, "loss": 3.2305, "step": 32245 }, { "epoch": 2.191194455768447, "grad_norm": 1.130767822265625, "learning_rate": 0.0007262111020519092, "loss": 3.5393, "step": 32250 }, { "epoch": 2.1915341758391085, "grad_norm": 1.887071967124939, "learning_rate": 0.0007261686370430765, "loss": 3.5458, "step": 32255 }, { "epoch": 2.1918738959097706, "grad_norm": 1.3945832252502441, "learning_rate": 0.0007261261720342438, "loss": 3.3237, "step": 32260 }, { "epoch": 2.192213615980432, "grad_norm": 1.4745761156082153, "learning_rate": 0.0007260837070254111, "loss": 3.7261, "step": 32265 }, { "epoch": 2.192553336051094, "grad_norm": 1.216693639755249, "learning_rate": 0.0007260412420165784, "loss": 3.4807, "step": 32270 }, { "epoch": 2.192893056121756, "grad_norm": 1.2232826948165894, "learning_rate": 0.0007259987770077456, "loss": 3.5806, "step": 32275 }, { "epoch": 2.1932327761924175, "grad_norm": 1.6668601036071777, "learning_rate": 0.0007259563119989129, "loss": 3.5646, "step": 32280 }, { "epoch": 2.193572496263079, "grad_norm": 2.12695574760437, "learning_rate": 0.0007259138469900802, "loss": 3.1153, "step": 32285 }, { "epoch": 2.1939122163337412, "grad_norm": 1.4526033401489258, "learning_rate": 0.0007258713819812474, "loss": 3.5624, "step": 32290 }, { "epoch": 2.194251936404403, "grad_norm": 3.07171630859375, "learning_rate": 0.0007258289169724148, "loss": 3.6075, "step": 32295 }, { "epoch": 2.1945916564750645, "grad_norm": 1.3528242111206055, "learning_rate": 0.0007257864519635821, "loss": 3.4789, "step": 32300 }, { "epoch": 2.194931376545726, "grad_norm": 1.5449144840240479, "learning_rate": 0.0007257439869547493, "loss": 3.5633, "step": 32305 }, { "epoch": 2.195271096616388, "grad_norm": 1.7398293018341064, "learning_rate": 0.0007257015219459165, "loss": 3.5433, "step": 32310 }, { "epoch": 2.19561081668705, "grad_norm": 1.1002076864242554, "learning_rate": 0.0007256590569370839, "loss": 3.3491, "step": 32315 }, { "epoch": 2.1959505367577115, "grad_norm": 1.5448869466781616, "learning_rate": 0.0007256165919282511, "loss": 3.201, "step": 32320 }, { "epoch": 2.1962902568283735, "grad_norm": 1.276330828666687, "learning_rate": 0.0007255741269194183, "loss": 3.6436, "step": 32325 }, { "epoch": 2.196629976899035, "grad_norm": 1.118735671043396, "learning_rate": 0.0007255316619105858, "loss": 3.4369, "step": 32330 }, { "epoch": 2.196969696969697, "grad_norm": 1.4662193059921265, "learning_rate": 0.000725489196901753, "loss": 3.6136, "step": 32335 }, { "epoch": 2.197309417040359, "grad_norm": 1.2737736701965332, "learning_rate": 0.0007254467318929202, "loss": 3.8111, "step": 32340 }, { "epoch": 2.1976491371110205, "grad_norm": 1.277269721031189, "learning_rate": 0.0007254042668840876, "loss": 3.6699, "step": 32345 }, { "epoch": 2.197988857181682, "grad_norm": 1.2075681686401367, "learning_rate": 0.0007253618018752548, "loss": 3.6419, "step": 32350 }, { "epoch": 2.198328577252344, "grad_norm": 1.6999928951263428, "learning_rate": 0.000725319336866422, "loss": 3.5927, "step": 32355 }, { "epoch": 2.198668297323006, "grad_norm": 1.2223929166793823, "learning_rate": 0.0007252768718575894, "loss": 3.5193, "step": 32360 }, { "epoch": 2.1990080173936675, "grad_norm": 1.1507055759429932, "learning_rate": 0.0007252344068487567, "loss": 3.2776, "step": 32365 }, { "epoch": 2.1993477374643295, "grad_norm": 1.2931385040283203, "learning_rate": 0.0007251919418399239, "loss": 3.2526, "step": 32370 }, { "epoch": 2.199687457534991, "grad_norm": 1.1267566680908203, "learning_rate": 0.0007251494768310912, "loss": 3.3356, "step": 32375 }, { "epoch": 2.200027177605653, "grad_norm": 1.222893238067627, "learning_rate": 0.0007251070118222585, "loss": 3.5479, "step": 32380 }, { "epoch": 2.200366897676315, "grad_norm": 1.4136744737625122, "learning_rate": 0.0007250645468134257, "loss": 3.5684, "step": 32385 }, { "epoch": 2.2007066177469765, "grad_norm": 1.1433337926864624, "learning_rate": 0.000725022081804593, "loss": 3.7921, "step": 32390 }, { "epoch": 2.201046337817638, "grad_norm": 1.3095691204071045, "learning_rate": 0.0007249796167957604, "loss": 3.3804, "step": 32395 }, { "epoch": 2.2013860578883, "grad_norm": 1.1525980234146118, "learning_rate": 0.0007249371517869276, "loss": 3.6733, "step": 32400 }, { "epoch": 2.201725777958962, "grad_norm": 1.31692373752594, "learning_rate": 0.0007248946867780949, "loss": 3.5917, "step": 32405 }, { "epoch": 2.2020654980296235, "grad_norm": 1.346644639968872, "learning_rate": 0.0007248522217692621, "loss": 3.4683, "step": 32410 }, { "epoch": 2.2024052181002856, "grad_norm": 1.2185696363449097, "learning_rate": 0.0007248097567604294, "loss": 3.6688, "step": 32415 }, { "epoch": 2.202744938170947, "grad_norm": 1.4531577825546265, "learning_rate": 0.0007247672917515967, "loss": 3.6001, "step": 32420 }, { "epoch": 2.203084658241609, "grad_norm": 1.7210699319839478, "learning_rate": 0.0007247248267427639, "loss": 3.6534, "step": 32425 }, { "epoch": 2.203424378312271, "grad_norm": 1.5131125450134277, "learning_rate": 0.0007246823617339313, "loss": 3.2759, "step": 32430 }, { "epoch": 2.2037640983829325, "grad_norm": 1.2846604585647583, "learning_rate": 0.0007246398967250986, "loss": 3.558, "step": 32435 }, { "epoch": 2.204103818453594, "grad_norm": 1.1903517246246338, "learning_rate": 0.0007245974317162658, "loss": 3.765, "step": 32440 }, { "epoch": 2.204443538524256, "grad_norm": 1.1738569736480713, "learning_rate": 0.000724554966707433, "loss": 3.8207, "step": 32445 }, { "epoch": 2.204783258594918, "grad_norm": 1.2994623184204102, "learning_rate": 0.0007245125016986004, "loss": 3.3625, "step": 32450 }, { "epoch": 2.2051229786655795, "grad_norm": 1.3007521629333496, "learning_rate": 0.0007244700366897676, "loss": 3.3249, "step": 32455 }, { "epoch": 2.205462698736241, "grad_norm": 1.2580735683441162, "learning_rate": 0.0007244275716809348, "loss": 3.5018, "step": 32460 }, { "epoch": 2.205802418806903, "grad_norm": 1.3791768550872803, "learning_rate": 0.0007243851066721023, "loss": 3.6517, "step": 32465 }, { "epoch": 2.206142138877565, "grad_norm": 1.293538212776184, "learning_rate": 0.0007243426416632695, "loss": 3.6003, "step": 32470 }, { "epoch": 2.2064818589482265, "grad_norm": 1.1654856204986572, "learning_rate": 0.0007243001766544367, "loss": 3.7679, "step": 32475 }, { "epoch": 2.2068215790188885, "grad_norm": 1.436458706855774, "learning_rate": 0.0007242577116456041, "loss": 3.1419, "step": 32480 }, { "epoch": 2.20716129908955, "grad_norm": 1.1984840631484985, "learning_rate": 0.0007242152466367713, "loss": 3.803, "step": 32485 }, { "epoch": 2.207501019160212, "grad_norm": 1.7019152641296387, "learning_rate": 0.0007241727816279386, "loss": 3.4704, "step": 32490 }, { "epoch": 2.207840739230874, "grad_norm": 1.1325246095657349, "learning_rate": 0.0007241303166191058, "loss": 3.7127, "step": 32495 }, { "epoch": 2.2081804593015355, "grad_norm": 1.3623496294021606, "learning_rate": 0.0007240878516102732, "loss": 3.3988, "step": 32500 }, { "epoch": 2.208520179372197, "grad_norm": 1.3641012907028198, "learning_rate": 0.0007240453866014405, "loss": 3.4592, "step": 32505 }, { "epoch": 2.208859899442859, "grad_norm": 1.7782063484191895, "learning_rate": 0.0007240029215926077, "loss": 3.6358, "step": 32510 }, { "epoch": 2.209199619513521, "grad_norm": 1.4119460582733154, "learning_rate": 0.000723960456583775, "loss": 3.711, "step": 32515 }, { "epoch": 2.2095393395841825, "grad_norm": 1.3145607709884644, "learning_rate": 0.0007239179915749423, "loss": 3.5412, "step": 32520 }, { "epoch": 2.2098790596548445, "grad_norm": 1.448532223701477, "learning_rate": 0.0007238755265661095, "loss": 3.668, "step": 32525 }, { "epoch": 2.210218779725506, "grad_norm": 1.404344081878662, "learning_rate": 0.0007238330615572768, "loss": 3.6332, "step": 32530 }, { "epoch": 2.210558499796168, "grad_norm": 3.0454604625701904, "learning_rate": 0.0007237905965484442, "loss": 3.5814, "step": 32535 }, { "epoch": 2.21089821986683, "grad_norm": 2.6590678691864014, "learning_rate": 0.0007237481315396114, "loss": 3.7542, "step": 32540 }, { "epoch": 2.2112379399374915, "grad_norm": 1.6150310039520264, "learning_rate": 0.0007237056665307786, "loss": 3.5996, "step": 32545 }, { "epoch": 2.211577660008153, "grad_norm": 1.0606215000152588, "learning_rate": 0.000723663201521946, "loss": 3.6765, "step": 32550 }, { "epoch": 2.211917380078815, "grad_norm": 1.4550188779830933, "learning_rate": 0.0007236207365131132, "loss": 3.5712, "step": 32555 }, { "epoch": 2.212257100149477, "grad_norm": 1.304207444190979, "learning_rate": 0.0007235782715042804, "loss": 3.4479, "step": 32560 }, { "epoch": 2.2125968202201385, "grad_norm": 1.132792592048645, "learning_rate": 0.0007235358064954478, "loss": 3.5562, "step": 32565 }, { "epoch": 2.2129365402908006, "grad_norm": 1.2677881717681885, "learning_rate": 0.0007234933414866151, "loss": 3.3961, "step": 32570 }, { "epoch": 2.213276260361462, "grad_norm": 1.396966814994812, "learning_rate": 0.0007234508764777823, "loss": 3.3964, "step": 32575 }, { "epoch": 2.213615980432124, "grad_norm": 1.3556327819824219, "learning_rate": 0.0007234084114689497, "loss": 3.5863, "step": 32580 }, { "epoch": 2.213955700502786, "grad_norm": 1.3618030548095703, "learning_rate": 0.0007233659464601169, "loss": 3.6997, "step": 32585 }, { "epoch": 2.2142954205734475, "grad_norm": 1.225949764251709, "learning_rate": 0.0007233234814512841, "loss": 3.458, "step": 32590 }, { "epoch": 2.214635140644109, "grad_norm": 1.7820392847061157, "learning_rate": 0.0007232810164424514, "loss": 3.3566, "step": 32595 }, { "epoch": 2.2149748607147712, "grad_norm": 1.394184947013855, "learning_rate": 0.0007232385514336187, "loss": 3.5741, "step": 32600 }, { "epoch": 2.215314580785433, "grad_norm": 1.3041805028915405, "learning_rate": 0.000723196086424786, "loss": 3.4285, "step": 32605 }, { "epoch": 2.2156543008560945, "grad_norm": 1.6653411388397217, "learning_rate": 0.0007231536214159533, "loss": 3.3312, "step": 32610 }, { "epoch": 2.2159940209267566, "grad_norm": 1.4086729288101196, "learning_rate": 0.0007231111564071206, "loss": 3.7167, "step": 32615 }, { "epoch": 2.216333740997418, "grad_norm": 1.4935173988342285, "learning_rate": 0.0007230686913982878, "loss": 3.6015, "step": 32620 }, { "epoch": 2.21667346106808, "grad_norm": 1.7636864185333252, "learning_rate": 0.0007230262263894551, "loss": 3.5558, "step": 32625 }, { "epoch": 2.217013181138742, "grad_norm": 1.3412646055221558, "learning_rate": 0.0007229837613806224, "loss": 3.5195, "step": 32630 }, { "epoch": 2.2173529012094035, "grad_norm": 1.5200954675674438, "learning_rate": 0.0007229412963717896, "loss": 3.5173, "step": 32635 }, { "epoch": 2.217692621280065, "grad_norm": 1.3182215690612793, "learning_rate": 0.000722898831362957, "loss": 3.4939, "step": 32640 }, { "epoch": 2.218032341350727, "grad_norm": 1.4469724893569946, "learning_rate": 0.0007228563663541242, "loss": 3.5384, "step": 32645 }, { "epoch": 2.218372061421389, "grad_norm": 1.1539111137390137, "learning_rate": 0.0007228139013452915, "loss": 3.366, "step": 32650 }, { "epoch": 2.2187117814920505, "grad_norm": 1.781531572341919, "learning_rate": 0.0007227714363364588, "loss": 3.5024, "step": 32655 }, { "epoch": 2.219051501562712, "grad_norm": 1.5200533866882324, "learning_rate": 0.000722728971327626, "loss": 3.5016, "step": 32660 }, { "epoch": 2.219391221633374, "grad_norm": 1.2283003330230713, "learning_rate": 0.0007226865063187933, "loss": 3.2817, "step": 32665 }, { "epoch": 2.219730941704036, "grad_norm": 8.440790176391602, "learning_rate": 0.0007226440413099606, "loss": 3.7402, "step": 32670 }, { "epoch": 2.2200706617746975, "grad_norm": 1.4729208946228027, "learning_rate": 0.0007226015763011279, "loss": 3.5045, "step": 32675 }, { "epoch": 2.2204103818453595, "grad_norm": 1.3694889545440674, "learning_rate": 0.0007225591112922952, "loss": 3.3805, "step": 32680 }, { "epoch": 2.220750101916021, "grad_norm": 1.3046451807022095, "learning_rate": 0.0007225166462834625, "loss": 3.5424, "step": 32685 }, { "epoch": 2.221089821986683, "grad_norm": 1.7019277811050415, "learning_rate": 0.0007224741812746297, "loss": 3.2516, "step": 32690 }, { "epoch": 2.221429542057345, "grad_norm": 1.6092323064804077, "learning_rate": 0.0007224317162657969, "loss": 3.3894, "step": 32695 }, { "epoch": 2.2217692621280065, "grad_norm": 1.0712114572525024, "learning_rate": 0.0007223892512569643, "loss": 3.5232, "step": 32700 }, { "epoch": 2.222108982198668, "grad_norm": 1.1142834424972534, "learning_rate": 0.0007223467862481315, "loss": 3.4104, "step": 32705 }, { "epoch": 2.2224487022693302, "grad_norm": 1.0469918251037598, "learning_rate": 0.0007223043212392988, "loss": 3.6957, "step": 32710 }, { "epoch": 2.222788422339992, "grad_norm": 1.404909372329712, "learning_rate": 0.0007222618562304662, "loss": 3.6235, "step": 32715 }, { "epoch": 2.2231281424106535, "grad_norm": 1.5410913228988647, "learning_rate": 0.0007222193912216334, "loss": 3.4907, "step": 32720 }, { "epoch": 2.2234678624813156, "grad_norm": 1.1630200147628784, "learning_rate": 0.0007221769262128006, "loss": 3.4733, "step": 32725 }, { "epoch": 2.223807582551977, "grad_norm": 1.5329357385635376, "learning_rate": 0.000722134461203968, "loss": 3.5576, "step": 32730 }, { "epoch": 2.224147302622639, "grad_norm": 1.7275360822677612, "learning_rate": 0.0007220919961951352, "loss": 3.5975, "step": 32735 }, { "epoch": 2.224487022693301, "grad_norm": 1.198326826095581, "learning_rate": 0.0007220495311863024, "loss": 3.633, "step": 32740 }, { "epoch": 2.2248267427639625, "grad_norm": 1.4184614419937134, "learning_rate": 0.0007220070661774698, "loss": 3.6771, "step": 32745 }, { "epoch": 2.225166462834624, "grad_norm": 1.6328293085098267, "learning_rate": 0.0007219646011686371, "loss": 3.1938, "step": 32750 }, { "epoch": 2.2255061829052862, "grad_norm": 1.9105521440505981, "learning_rate": 0.0007219221361598043, "loss": 3.4654, "step": 32755 }, { "epoch": 2.225845902975948, "grad_norm": 1.14641273021698, "learning_rate": 0.0007218796711509716, "loss": 3.6838, "step": 32760 }, { "epoch": 2.2261856230466095, "grad_norm": 1.4645860195159912, "learning_rate": 0.0007218372061421389, "loss": 3.4651, "step": 32765 }, { "epoch": 2.2265253431172716, "grad_norm": 1.5458794832229614, "learning_rate": 0.0007217947411333061, "loss": 3.6738, "step": 32770 }, { "epoch": 2.226865063187933, "grad_norm": 1.4407198429107666, "learning_rate": 0.0007217522761244734, "loss": 3.584, "step": 32775 }, { "epoch": 2.227204783258595, "grad_norm": 1.3627541065216064, "learning_rate": 0.0007217098111156408, "loss": 3.5642, "step": 32780 }, { "epoch": 2.2275445033292565, "grad_norm": 1.440516471862793, "learning_rate": 0.000721667346106808, "loss": 3.451, "step": 32785 }, { "epoch": 2.2278842233999185, "grad_norm": 1.1661708354949951, "learning_rate": 0.0007216248810979753, "loss": 3.4831, "step": 32790 }, { "epoch": 2.22822394347058, "grad_norm": 1.2438905239105225, "learning_rate": 0.0007215824160891425, "loss": 3.6901, "step": 32795 }, { "epoch": 2.228563663541242, "grad_norm": 1.6153427362442017, "learning_rate": 0.0007215399510803098, "loss": 3.7382, "step": 32800 }, { "epoch": 2.228903383611904, "grad_norm": 2.6791696548461914, "learning_rate": 0.0007214974860714771, "loss": 3.4424, "step": 32805 }, { "epoch": 2.2292431036825655, "grad_norm": 1.2693957090377808, "learning_rate": 0.0007214550210626443, "loss": 3.5477, "step": 32810 }, { "epoch": 2.229582823753227, "grad_norm": 1.096671223640442, "learning_rate": 0.0007214125560538117, "loss": 3.5916, "step": 32815 }, { "epoch": 2.229922543823889, "grad_norm": 1.6562937498092651, "learning_rate": 0.000721370091044979, "loss": 3.63, "step": 32820 }, { "epoch": 2.230262263894551, "grad_norm": 1.140293002128601, "learning_rate": 0.0007213276260361462, "loss": 3.6943, "step": 32825 }, { "epoch": 2.2306019839652125, "grad_norm": 1.829532265663147, "learning_rate": 0.0007212851610273136, "loss": 3.8384, "step": 32830 }, { "epoch": 2.2309417040358746, "grad_norm": 1.5160261392593384, "learning_rate": 0.0007212426960184808, "loss": 3.5997, "step": 32835 }, { "epoch": 2.231281424106536, "grad_norm": 1.4479150772094727, "learning_rate": 0.000721200231009648, "loss": 3.4637, "step": 32840 }, { "epoch": 2.231621144177198, "grad_norm": 1.6506489515304565, "learning_rate": 0.0007211577660008153, "loss": 3.4739, "step": 32845 }, { "epoch": 2.23196086424786, "grad_norm": 1.3008475303649902, "learning_rate": 0.0007211153009919827, "loss": 3.5729, "step": 32850 }, { "epoch": 2.2323005843185215, "grad_norm": 1.1530274152755737, "learning_rate": 0.0007210728359831499, "loss": 3.4772, "step": 32855 }, { "epoch": 2.232640304389183, "grad_norm": 1.4369944334030151, "learning_rate": 0.0007210303709743172, "loss": 3.3514, "step": 32860 }, { "epoch": 2.2329800244598452, "grad_norm": 2.680433511734009, "learning_rate": 0.0007209879059654845, "loss": 3.3328, "step": 32865 }, { "epoch": 2.233319744530507, "grad_norm": 1.2287845611572266, "learning_rate": 0.0007209454409566517, "loss": 3.6944, "step": 32870 }, { "epoch": 2.2336594646011685, "grad_norm": 1.4926691055297852, "learning_rate": 0.000720902975947819, "loss": 3.7224, "step": 32875 }, { "epoch": 2.2339991846718306, "grad_norm": 2.3824658393859863, "learning_rate": 0.0007208605109389862, "loss": 3.5269, "step": 32880 }, { "epoch": 2.234338904742492, "grad_norm": 1.252953052520752, "learning_rate": 0.0007208180459301536, "loss": 3.4882, "step": 32885 }, { "epoch": 2.234678624813154, "grad_norm": 1.2947479486465454, "learning_rate": 0.0007207755809213209, "loss": 3.5747, "step": 32890 }, { "epoch": 2.235018344883816, "grad_norm": 1.5935649871826172, "learning_rate": 0.0007207331159124881, "loss": 3.513, "step": 32895 }, { "epoch": 2.2353580649544775, "grad_norm": 1.384166955947876, "learning_rate": 0.0007206906509036554, "loss": 3.5794, "step": 32900 }, { "epoch": 2.235697785025139, "grad_norm": 1.4263434410095215, "learning_rate": 0.0007206481858948227, "loss": 3.5338, "step": 32905 }, { "epoch": 2.2360375050958012, "grad_norm": 1.3111294507980347, "learning_rate": 0.0007206057208859899, "loss": 3.8321, "step": 32910 }, { "epoch": 2.236377225166463, "grad_norm": 1.800020694732666, "learning_rate": 0.0007205632558771572, "loss": 3.8348, "step": 32915 }, { "epoch": 2.2367169452371245, "grad_norm": 1.2291573286056519, "learning_rate": 0.0007205207908683246, "loss": 3.4757, "step": 32920 }, { "epoch": 2.2370566653077866, "grad_norm": 1.413156509399414, "learning_rate": 0.0007204783258594918, "loss": 3.6128, "step": 32925 }, { "epoch": 2.237396385378448, "grad_norm": 3.000133514404297, "learning_rate": 0.000720435860850659, "loss": 3.7578, "step": 32930 }, { "epoch": 2.23773610544911, "grad_norm": 1.245147943496704, "learning_rate": 0.0007203933958418264, "loss": 3.2664, "step": 32935 }, { "epoch": 2.238075825519772, "grad_norm": 1.3543602228164673, "learning_rate": 0.0007203509308329936, "loss": 3.272, "step": 32940 }, { "epoch": 2.2384155455904335, "grad_norm": 1.6610727310180664, "learning_rate": 0.0007203084658241608, "loss": 3.5407, "step": 32945 }, { "epoch": 2.238755265661095, "grad_norm": 1.0930430889129639, "learning_rate": 0.0007202660008153283, "loss": 3.6805, "step": 32950 }, { "epoch": 2.2390949857317572, "grad_norm": 1.1956779956817627, "learning_rate": 0.0007202235358064955, "loss": 3.5953, "step": 32955 }, { "epoch": 2.239434705802419, "grad_norm": 1.0903252363204956, "learning_rate": 0.0007201810707976627, "loss": 3.5234, "step": 32960 }, { "epoch": 2.2397744258730805, "grad_norm": 1.5850752592086792, "learning_rate": 0.0007201386057888301, "loss": 3.5431, "step": 32965 }, { "epoch": 2.2401141459437426, "grad_norm": 1.3301349878311157, "learning_rate": 0.0007200961407799973, "loss": 3.5688, "step": 32970 }, { "epoch": 2.240453866014404, "grad_norm": 1.8727625608444214, "learning_rate": 0.0007200536757711645, "loss": 3.5336, "step": 32975 }, { "epoch": 2.240793586085066, "grad_norm": 1.3361690044403076, "learning_rate": 0.0007200112107623319, "loss": 3.5894, "step": 32980 }, { "epoch": 2.2411333061557275, "grad_norm": 1.6220734119415283, "learning_rate": 0.0007199687457534992, "loss": 3.5291, "step": 32985 }, { "epoch": 2.2414730262263896, "grad_norm": 1.5874048471450806, "learning_rate": 0.0007199262807446664, "loss": 3.4094, "step": 32990 }, { "epoch": 2.241812746297051, "grad_norm": 1.43446946144104, "learning_rate": 0.0007198838157358337, "loss": 3.4314, "step": 32995 }, { "epoch": 2.242152466367713, "grad_norm": 1.3470032215118408, "learning_rate": 0.000719841350727001, "loss": 3.4293, "step": 33000 }, { "epoch": 2.242492186438375, "grad_norm": 1.2657098770141602, "learning_rate": 0.0007197988857181682, "loss": 3.6071, "step": 33005 }, { "epoch": 2.2428319065090365, "grad_norm": 1.5036349296569824, "learning_rate": 0.0007197564207093355, "loss": 3.2949, "step": 33010 }, { "epoch": 2.243171626579698, "grad_norm": 1.5538729429244995, "learning_rate": 0.0007197139557005028, "loss": 3.684, "step": 33015 }, { "epoch": 2.2435113466503602, "grad_norm": 1.3402645587921143, "learning_rate": 0.0007196714906916701, "loss": 3.5827, "step": 33020 }, { "epoch": 2.243851066721022, "grad_norm": 1.1265509128570557, "learning_rate": 0.0007196290256828374, "loss": 3.4625, "step": 33025 }, { "epoch": 2.2441907867916835, "grad_norm": 1.283750057220459, "learning_rate": 0.0007195865606740047, "loss": 3.5636, "step": 33030 }, { "epoch": 2.2445305068623456, "grad_norm": 1.2559723854064941, "learning_rate": 0.0007195440956651719, "loss": 3.4234, "step": 33035 }, { "epoch": 2.244870226933007, "grad_norm": 1.106141448020935, "learning_rate": 0.0007195016306563392, "loss": 3.4746, "step": 33040 }, { "epoch": 2.245209947003669, "grad_norm": 1.4915004968643188, "learning_rate": 0.0007194591656475064, "loss": 3.6242, "step": 33045 }, { "epoch": 2.245549667074331, "grad_norm": 1.327793002128601, "learning_rate": 0.0007194167006386737, "loss": 3.863, "step": 33050 }, { "epoch": 2.2458893871449925, "grad_norm": 1.3611582517623901, "learning_rate": 0.0007193742356298411, "loss": 3.8739, "step": 33055 }, { "epoch": 2.246229107215654, "grad_norm": 1.1685373783111572, "learning_rate": 0.0007193317706210083, "loss": 3.5182, "step": 33060 }, { "epoch": 2.2465688272863162, "grad_norm": 1.5898710489273071, "learning_rate": 0.0007192893056121756, "loss": 3.5773, "step": 33065 }, { "epoch": 2.246908547356978, "grad_norm": 1.5600392818450928, "learning_rate": 0.0007192468406033429, "loss": 3.6323, "step": 33070 }, { "epoch": 2.2472482674276395, "grad_norm": 1.2305926084518433, "learning_rate": 0.0007192043755945101, "loss": 3.6833, "step": 33075 }, { "epoch": 2.2475879874983016, "grad_norm": 1.3958816528320312, "learning_rate": 0.0007191619105856773, "loss": 3.5936, "step": 33080 }, { "epoch": 2.247927707568963, "grad_norm": 3.451164484024048, "learning_rate": 0.0007191194455768447, "loss": 3.5758, "step": 33085 }, { "epoch": 2.248267427639625, "grad_norm": 1.0381062030792236, "learning_rate": 0.000719076980568012, "loss": 3.3651, "step": 33090 }, { "epoch": 2.248607147710287, "grad_norm": 1.1912273168563843, "learning_rate": 0.0007190345155591792, "loss": 3.2289, "step": 33095 }, { "epoch": 2.2489468677809485, "grad_norm": 1.285571813583374, "learning_rate": 0.0007189920505503466, "loss": 3.4666, "step": 33100 }, { "epoch": 2.24928658785161, "grad_norm": 1.2148730754852295, "learning_rate": 0.0007189495855415138, "loss": 3.3665, "step": 33105 }, { "epoch": 2.2496263079222723, "grad_norm": 1.3171840906143188, "learning_rate": 0.000718907120532681, "loss": 3.4445, "step": 33110 }, { "epoch": 2.249966027992934, "grad_norm": 1.1867731809616089, "learning_rate": 0.0007188646555238484, "loss": 3.6946, "step": 33115 }, { "epoch": 2.2503057480635955, "grad_norm": 1.4241801500320435, "learning_rate": 0.0007188221905150156, "loss": 3.537, "step": 33120 }, { "epoch": 2.250645468134257, "grad_norm": 1.3147765398025513, "learning_rate": 0.0007187797255061829, "loss": 3.7264, "step": 33125 }, { "epoch": 2.250985188204919, "grad_norm": 1.209172010421753, "learning_rate": 0.0007187372604973503, "loss": 3.7251, "step": 33130 }, { "epoch": 2.251324908275581, "grad_norm": 1.283652663230896, "learning_rate": 0.0007186947954885175, "loss": 3.6414, "step": 33135 }, { "epoch": 2.2516646283462425, "grad_norm": 1.7357983589172363, "learning_rate": 0.0007186523304796847, "loss": 3.6092, "step": 33140 }, { "epoch": 2.2520043484169046, "grad_norm": 1.4497102499008179, "learning_rate": 0.000718609865470852, "loss": 3.5942, "step": 33145 }, { "epoch": 2.252344068487566, "grad_norm": 1.343370795249939, "learning_rate": 0.0007185674004620193, "loss": 3.3057, "step": 33150 }, { "epoch": 2.252683788558228, "grad_norm": 1.5233656167984009, "learning_rate": 0.0007185249354531865, "loss": 3.8093, "step": 33155 }, { "epoch": 2.25302350862889, "grad_norm": 1.2667694091796875, "learning_rate": 0.0007184824704443539, "loss": 3.5892, "step": 33160 }, { "epoch": 2.2533632286995515, "grad_norm": 1.2640128135681152, "learning_rate": 0.0007184400054355212, "loss": 3.7459, "step": 33165 }, { "epoch": 2.253702948770213, "grad_norm": 1.6700338125228882, "learning_rate": 0.0007183975404266885, "loss": 3.3571, "step": 33170 }, { "epoch": 2.2540426688408752, "grad_norm": 1.6009303331375122, "learning_rate": 0.0007183550754178557, "loss": 3.5086, "step": 33175 }, { "epoch": 2.254382388911537, "grad_norm": 1.234564185142517, "learning_rate": 0.0007183126104090229, "loss": 3.3689, "step": 33180 }, { "epoch": 2.2547221089821985, "grad_norm": 1.076176404953003, "learning_rate": 0.0007182701454001903, "loss": 3.592, "step": 33185 }, { "epoch": 2.2550618290528606, "grad_norm": 1.2581712007522583, "learning_rate": 0.0007182276803913575, "loss": 3.5315, "step": 33190 }, { "epoch": 2.255401549123522, "grad_norm": 1.466629981994629, "learning_rate": 0.0007181852153825248, "loss": 3.671, "step": 33195 }, { "epoch": 2.255741269194184, "grad_norm": 1.147621750831604, "learning_rate": 0.0007181427503736922, "loss": 3.5973, "step": 33200 }, { "epoch": 2.256080989264846, "grad_norm": 1.2447246313095093, "learning_rate": 0.0007181002853648594, "loss": 3.605, "step": 33205 }, { "epoch": 2.2564207093355075, "grad_norm": 1.1286699771881104, "learning_rate": 0.0007180578203560266, "loss": 3.6144, "step": 33210 }, { "epoch": 2.256760429406169, "grad_norm": 1.2785998582839966, "learning_rate": 0.000718015355347194, "loss": 3.3394, "step": 33215 }, { "epoch": 2.2571001494768312, "grad_norm": 1.2502751350402832, "learning_rate": 0.0007179728903383612, "loss": 3.671, "step": 33220 }, { "epoch": 2.257439869547493, "grad_norm": 1.7685034275054932, "learning_rate": 0.0007179304253295284, "loss": 3.4206, "step": 33225 }, { "epoch": 2.2577795896181545, "grad_norm": 1.541629672050476, "learning_rate": 0.0007178879603206959, "loss": 3.5598, "step": 33230 }, { "epoch": 2.2581193096888166, "grad_norm": 1.0430649518966675, "learning_rate": 0.0007178454953118631, "loss": 3.5026, "step": 33235 }, { "epoch": 2.258459029759478, "grad_norm": 1.4367443323135376, "learning_rate": 0.0007178030303030303, "loss": 3.4817, "step": 33240 }, { "epoch": 2.25879874983014, "grad_norm": 1.48128342628479, "learning_rate": 0.0007177605652941976, "loss": 3.3006, "step": 33245 }, { "epoch": 2.259138469900802, "grad_norm": 1.434276819229126, "learning_rate": 0.0007177181002853649, "loss": 3.5712, "step": 33250 }, { "epoch": 2.2594781899714635, "grad_norm": 1.1127127408981323, "learning_rate": 0.0007176756352765321, "loss": 3.2831, "step": 33255 }, { "epoch": 2.259817910042125, "grad_norm": 1.0274392366409302, "learning_rate": 0.0007176331702676994, "loss": 3.7085, "step": 33260 }, { "epoch": 2.2601576301127873, "grad_norm": 1.616745948791504, "learning_rate": 0.0007175907052588668, "loss": 3.7393, "step": 33265 }, { "epoch": 2.260497350183449, "grad_norm": 1.3680514097213745, "learning_rate": 0.000717548240250034, "loss": 3.4294, "step": 33270 }, { "epoch": 2.2608370702541105, "grad_norm": 1.3946887254714966, "learning_rate": 0.0007175057752412013, "loss": 3.537, "step": 33275 }, { "epoch": 2.2611767903247726, "grad_norm": 1.3541173934936523, "learning_rate": 0.0007174633102323685, "loss": 3.5565, "step": 33280 }, { "epoch": 2.261516510395434, "grad_norm": 1.5777666568756104, "learning_rate": 0.0007174208452235358, "loss": 3.6228, "step": 33285 }, { "epoch": 2.261856230466096, "grad_norm": 1.032005786895752, "learning_rate": 0.0007173783802147031, "loss": 3.4666, "step": 33290 }, { "epoch": 2.262195950536758, "grad_norm": 1.5115681886672974, "learning_rate": 0.0007173359152058703, "loss": 3.6114, "step": 33295 }, { "epoch": 2.2625356706074196, "grad_norm": 1.181076169013977, "learning_rate": 0.0007172934501970377, "loss": 3.4859, "step": 33300 }, { "epoch": 2.262875390678081, "grad_norm": 1.3570500612258911, "learning_rate": 0.000717250985188205, "loss": 3.4692, "step": 33305 }, { "epoch": 2.2632151107487433, "grad_norm": 1.4698492288589478, "learning_rate": 0.0007172085201793722, "loss": 3.5586, "step": 33310 }, { "epoch": 2.263554830819405, "grad_norm": 1.2392487525939941, "learning_rate": 0.0007171660551705395, "loss": 3.722, "step": 33315 }, { "epoch": 2.2638945508900665, "grad_norm": 1.4212714433670044, "learning_rate": 0.0007171235901617068, "loss": 3.6179, "step": 33320 }, { "epoch": 2.2642342709607286, "grad_norm": 1.162737250328064, "learning_rate": 0.000717081125152874, "loss": 3.4012, "step": 33325 }, { "epoch": 2.2645739910313902, "grad_norm": 1.5208595991134644, "learning_rate": 0.0007170386601440412, "loss": 3.5343, "step": 33330 }, { "epoch": 2.264913711102052, "grad_norm": 1.1947013139724731, "learning_rate": 0.0007169961951352087, "loss": 3.5678, "step": 33335 }, { "epoch": 2.2652534311727135, "grad_norm": 1.1253529787063599, "learning_rate": 0.0007169537301263759, "loss": 3.4385, "step": 33340 }, { "epoch": 2.2655931512433756, "grad_norm": 1.1298452615737915, "learning_rate": 0.0007169112651175431, "loss": 3.4448, "step": 33345 }, { "epoch": 2.265932871314037, "grad_norm": 1.2649072408676147, "learning_rate": 0.0007168688001087105, "loss": 3.7092, "step": 33350 }, { "epoch": 2.266272591384699, "grad_norm": 1.9789320230484009, "learning_rate": 0.0007168263350998777, "loss": 3.6445, "step": 33355 }, { "epoch": 2.266612311455361, "grad_norm": 1.030139446258545, "learning_rate": 0.0007167838700910449, "loss": 3.7694, "step": 33360 }, { "epoch": 2.2669520315260225, "grad_norm": 1.233336329460144, "learning_rate": 0.0007167414050822123, "loss": 3.6508, "step": 33365 }, { "epoch": 2.267291751596684, "grad_norm": 1.2129874229431152, "learning_rate": 0.0007166989400733796, "loss": 3.6486, "step": 33370 }, { "epoch": 2.2676314716673462, "grad_norm": 1.224916934967041, "learning_rate": 0.0007166564750645468, "loss": 3.2717, "step": 33375 }, { "epoch": 2.267971191738008, "grad_norm": 1.1953694820404053, "learning_rate": 0.0007166140100557141, "loss": 3.6934, "step": 33380 }, { "epoch": 2.2683109118086695, "grad_norm": 1.2435686588287354, "learning_rate": 0.0007165715450468814, "loss": 3.3595, "step": 33385 }, { "epoch": 2.2686506318793316, "grad_norm": 1.50998854637146, "learning_rate": 0.0007165290800380486, "loss": 3.4988, "step": 33390 }, { "epoch": 2.268990351949993, "grad_norm": 1.3768973350524902, "learning_rate": 0.0007164866150292159, "loss": 3.5713, "step": 33395 }, { "epoch": 2.269330072020655, "grad_norm": 1.3701375722885132, "learning_rate": 0.0007164441500203832, "loss": 3.4027, "step": 33400 }, { "epoch": 2.269669792091317, "grad_norm": 1.3727940320968628, "learning_rate": 0.0007164016850115505, "loss": 3.3295, "step": 33405 }, { "epoch": 2.2700095121619785, "grad_norm": 1.3366252183914185, "learning_rate": 0.0007163592200027178, "loss": 3.455, "step": 33410 }, { "epoch": 2.27034923223264, "grad_norm": 1.1983777284622192, "learning_rate": 0.0007163167549938851, "loss": 3.5886, "step": 33415 }, { "epoch": 2.2706889523033023, "grad_norm": 1.259459376335144, "learning_rate": 0.0007162742899850523, "loss": 3.5487, "step": 33420 }, { "epoch": 2.271028672373964, "grad_norm": 1.3228375911712646, "learning_rate": 0.0007162318249762196, "loss": 3.2858, "step": 33425 }, { "epoch": 2.2713683924446255, "grad_norm": 1.1346125602722168, "learning_rate": 0.0007161893599673868, "loss": 3.4365, "step": 33430 }, { "epoch": 2.2717081125152876, "grad_norm": 0.9801726341247559, "learning_rate": 0.0007161468949585541, "loss": 3.3633, "step": 33435 }, { "epoch": 2.2720478325859492, "grad_norm": 1.21451997756958, "learning_rate": 0.0007161044299497215, "loss": 3.3642, "step": 33440 }, { "epoch": 2.272387552656611, "grad_norm": 1.117774486541748, "learning_rate": 0.0007160619649408887, "loss": 3.5263, "step": 33445 }, { "epoch": 2.2727272727272725, "grad_norm": 1.4487773180007935, "learning_rate": 0.000716019499932056, "loss": 3.3508, "step": 33450 }, { "epoch": 2.2730669927979346, "grad_norm": 1.4595751762390137, "learning_rate": 0.0007159770349232233, "loss": 3.6443, "step": 33455 }, { "epoch": 2.273406712868596, "grad_norm": 2.073150157928467, "learning_rate": 0.0007159345699143905, "loss": 3.5209, "step": 33460 }, { "epoch": 2.273746432939258, "grad_norm": 1.5756163597106934, "learning_rate": 0.0007158921049055577, "loss": 3.4506, "step": 33465 }, { "epoch": 2.27408615300992, "grad_norm": 1.7304434776306152, "learning_rate": 0.0007158496398967251, "loss": 3.4757, "step": 33470 }, { "epoch": 2.2744258730805815, "grad_norm": 0.9804384708404541, "learning_rate": 0.0007158071748878924, "loss": 3.5341, "step": 33475 }, { "epoch": 2.274765593151243, "grad_norm": 1.3685249090194702, "learning_rate": 0.0007157647098790596, "loss": 3.4765, "step": 33480 }, { "epoch": 2.2751053132219052, "grad_norm": 1.2763466835021973, "learning_rate": 0.000715722244870227, "loss": 3.5579, "step": 33485 }, { "epoch": 2.275445033292567, "grad_norm": 1.009291648864746, "learning_rate": 0.0007156797798613942, "loss": 3.6652, "step": 33490 }, { "epoch": 2.2757847533632285, "grad_norm": 1.3389085531234741, "learning_rate": 0.0007156373148525614, "loss": 3.6812, "step": 33495 }, { "epoch": 2.2761244734338906, "grad_norm": 1.0628933906555176, "learning_rate": 0.0007155948498437288, "loss": 3.7813, "step": 33500 }, { "epoch": 2.276464193504552, "grad_norm": 1.2405725717544556, "learning_rate": 0.000715552384834896, "loss": 3.7575, "step": 33505 }, { "epoch": 2.276803913575214, "grad_norm": 1.1728367805480957, "learning_rate": 0.0007155099198260634, "loss": 3.3713, "step": 33510 }, { "epoch": 2.277143633645876, "grad_norm": 1.415752649307251, "learning_rate": 0.0007154674548172307, "loss": 3.609, "step": 33515 }, { "epoch": 2.2774833537165375, "grad_norm": 1.6899791955947876, "learning_rate": 0.0007154249898083979, "loss": 3.5019, "step": 33520 }, { "epoch": 2.277823073787199, "grad_norm": 1.2744336128234863, "learning_rate": 0.0007153825247995652, "loss": 3.5074, "step": 33525 }, { "epoch": 2.2781627938578612, "grad_norm": 1.3641059398651123, "learning_rate": 0.0007153400597907324, "loss": 3.5458, "step": 33530 }, { "epoch": 2.278502513928523, "grad_norm": 1.301623821258545, "learning_rate": 0.0007152975947818997, "loss": 3.399, "step": 33535 }, { "epoch": 2.2788422339991845, "grad_norm": 1.63902747631073, "learning_rate": 0.0007152551297730671, "loss": 3.8619, "step": 33540 }, { "epoch": 2.2791819540698466, "grad_norm": 1.3594369888305664, "learning_rate": 0.0007152126647642343, "loss": 3.4016, "step": 33545 }, { "epoch": 2.279521674140508, "grad_norm": 1.2930277585983276, "learning_rate": 0.0007151701997554016, "loss": 3.5257, "step": 33550 }, { "epoch": 2.27986139421117, "grad_norm": 1.678803563117981, "learning_rate": 0.0007151277347465689, "loss": 3.4554, "step": 33555 }, { "epoch": 2.280201114281832, "grad_norm": 1.484554409980774, "learning_rate": 0.0007150852697377361, "loss": 3.5014, "step": 33560 }, { "epoch": 2.2805408343524936, "grad_norm": 1.1270033121109009, "learning_rate": 0.0007150428047289033, "loss": 3.3954, "step": 33565 }, { "epoch": 2.280880554423155, "grad_norm": 0.9522916674613953, "learning_rate": 0.0007150003397200707, "loss": 3.7268, "step": 33570 }, { "epoch": 2.2812202744938173, "grad_norm": 1.4148060083389282, "learning_rate": 0.000714957874711238, "loss": 3.4684, "step": 33575 }, { "epoch": 2.281559994564479, "grad_norm": 2.0102827548980713, "learning_rate": 0.0007149154097024052, "loss": 3.3543, "step": 33580 }, { "epoch": 2.2818997146351405, "grad_norm": 1.1531792879104614, "learning_rate": 0.0007148729446935726, "loss": 3.6151, "step": 33585 }, { "epoch": 2.2822394347058026, "grad_norm": 1.3345447778701782, "learning_rate": 0.0007148304796847398, "loss": 3.4347, "step": 33590 }, { "epoch": 2.2825791547764642, "grad_norm": 1.547054409980774, "learning_rate": 0.000714788014675907, "loss": 3.4056, "step": 33595 }, { "epoch": 2.282918874847126, "grad_norm": 1.2514489889144897, "learning_rate": 0.0007147455496670744, "loss": 3.6527, "step": 33600 }, { "epoch": 2.283258594917788, "grad_norm": 1.410163164138794, "learning_rate": 0.0007147030846582416, "loss": 3.3685, "step": 33605 }, { "epoch": 2.2835983149884496, "grad_norm": 1.5807229280471802, "learning_rate": 0.0007146606196494089, "loss": 3.5608, "step": 33610 }, { "epoch": 2.283938035059111, "grad_norm": 1.3049622774124146, "learning_rate": 0.0007146181546405763, "loss": 3.537, "step": 33615 }, { "epoch": 2.2842777551297733, "grad_norm": 1.4108484983444214, "learning_rate": 0.0007145756896317435, "loss": 3.5652, "step": 33620 }, { "epoch": 2.284617475200435, "grad_norm": 1.3343027830123901, "learning_rate": 0.0007145332246229107, "loss": 3.7805, "step": 33625 }, { "epoch": 2.2849571952710965, "grad_norm": 1.5689398050308228, "learning_rate": 0.000714490759614078, "loss": 3.4348, "step": 33630 }, { "epoch": 2.2852969153417586, "grad_norm": 1.5468060970306396, "learning_rate": 0.0007144482946052453, "loss": 3.5597, "step": 33635 }, { "epoch": 2.2856366354124202, "grad_norm": 1.198010802268982, "learning_rate": 0.0007144058295964125, "loss": 3.7633, "step": 33640 }, { "epoch": 2.285976355483082, "grad_norm": 2.2745351791381836, "learning_rate": 0.0007143633645875799, "loss": 3.6764, "step": 33645 }, { "epoch": 2.286316075553744, "grad_norm": 1.2847111225128174, "learning_rate": 0.0007143208995787472, "loss": 3.4833, "step": 33650 }, { "epoch": 2.2866557956244056, "grad_norm": 1.4774061441421509, "learning_rate": 0.0007142784345699144, "loss": 3.509, "step": 33655 }, { "epoch": 2.286995515695067, "grad_norm": 1.0734914541244507, "learning_rate": 0.0007142359695610817, "loss": 3.6662, "step": 33660 }, { "epoch": 2.2873352357657293, "grad_norm": 1.2931019067764282, "learning_rate": 0.000714193504552249, "loss": 3.433, "step": 33665 }, { "epoch": 2.287674955836391, "grad_norm": 1.2751637697219849, "learning_rate": 0.0007141510395434162, "loss": 3.5829, "step": 33670 }, { "epoch": 2.2880146759070525, "grad_norm": 1.4007445573806763, "learning_rate": 0.0007141085745345835, "loss": 3.2429, "step": 33675 }, { "epoch": 2.288354395977714, "grad_norm": 1.3631699085235596, "learning_rate": 0.0007140661095257508, "loss": 3.4399, "step": 33680 }, { "epoch": 2.2886941160483762, "grad_norm": 1.382764220237732, "learning_rate": 0.0007140236445169181, "loss": 3.5857, "step": 33685 }, { "epoch": 2.289033836119038, "grad_norm": 1.3557147979736328, "learning_rate": 0.0007139811795080854, "loss": 3.6122, "step": 33690 }, { "epoch": 2.2893735561896995, "grad_norm": 1.2280830144882202, "learning_rate": 0.0007139387144992526, "loss": 3.6262, "step": 33695 }, { "epoch": 2.2897132762603616, "grad_norm": 1.2848362922668457, "learning_rate": 0.0007138962494904199, "loss": 3.794, "step": 33700 }, { "epoch": 2.290052996331023, "grad_norm": 1.06666898727417, "learning_rate": 0.0007138537844815872, "loss": 3.5286, "step": 33705 }, { "epoch": 2.290392716401685, "grad_norm": 1.8230592012405396, "learning_rate": 0.0007138113194727544, "loss": 3.3387, "step": 33710 }, { "epoch": 2.290732436472347, "grad_norm": 1.472624659538269, "learning_rate": 0.0007137688544639218, "loss": 3.3951, "step": 33715 }, { "epoch": 2.2910721565430086, "grad_norm": 1.3118075132369995, "learning_rate": 0.0007137263894550891, "loss": 3.4435, "step": 33720 }, { "epoch": 2.29141187661367, "grad_norm": 1.0918959379196167, "learning_rate": 0.0007136839244462563, "loss": 3.5361, "step": 33725 }, { "epoch": 2.2917515966843323, "grad_norm": 1.3260865211486816, "learning_rate": 0.0007136414594374235, "loss": 3.8342, "step": 33730 }, { "epoch": 2.292091316754994, "grad_norm": 4.67048454284668, "learning_rate": 0.0007135989944285909, "loss": 3.4325, "step": 33735 }, { "epoch": 2.2924310368256555, "grad_norm": 1.3571141958236694, "learning_rate": 0.0007135565294197581, "loss": 3.5205, "step": 33740 }, { "epoch": 2.2927707568963176, "grad_norm": 1.6393500566482544, "learning_rate": 0.0007135140644109253, "loss": 3.4428, "step": 33745 }, { "epoch": 2.2931104769669792, "grad_norm": 1.188126802444458, "learning_rate": 0.0007134715994020928, "loss": 3.6306, "step": 33750 }, { "epoch": 2.293450197037641, "grad_norm": 1.2390764951705933, "learning_rate": 0.00071342913439326, "loss": 3.7489, "step": 33755 }, { "epoch": 2.293789917108303, "grad_norm": 1.2228353023529053, "learning_rate": 0.0007133866693844272, "loss": 3.3453, "step": 33760 }, { "epoch": 2.2941296371789646, "grad_norm": 1.1433910131454468, "learning_rate": 0.0007133442043755946, "loss": 3.2242, "step": 33765 }, { "epoch": 2.294469357249626, "grad_norm": 1.2434896230697632, "learning_rate": 0.0007133017393667618, "loss": 3.63, "step": 33770 }, { "epoch": 2.2948090773202883, "grad_norm": 1.4489591121673584, "learning_rate": 0.000713259274357929, "loss": 3.5908, "step": 33775 }, { "epoch": 2.29514879739095, "grad_norm": 1.268809199333191, "learning_rate": 0.0007132168093490963, "loss": 3.5231, "step": 33780 }, { "epoch": 2.2954885174616115, "grad_norm": 1.3904938697814941, "learning_rate": 0.0007131743443402637, "loss": 3.5712, "step": 33785 }, { "epoch": 2.295828237532273, "grad_norm": 1.5404410362243652, "learning_rate": 0.0007131318793314309, "loss": 3.5061, "step": 33790 }, { "epoch": 2.2961679576029352, "grad_norm": 1.6533280611038208, "learning_rate": 0.0007130894143225982, "loss": 3.6048, "step": 33795 }, { "epoch": 2.296507677673597, "grad_norm": 1.1519615650177002, "learning_rate": 0.0007130469493137655, "loss": 3.6716, "step": 33800 }, { "epoch": 2.2968473977442585, "grad_norm": 1.6735962629318237, "learning_rate": 0.0007130044843049327, "loss": 3.5857, "step": 33805 }, { "epoch": 2.2971871178149206, "grad_norm": 1.339843511581421, "learning_rate": 0.0007129620192961, "loss": 3.4515, "step": 33810 }, { "epoch": 2.297526837885582, "grad_norm": 1.092934012413025, "learning_rate": 0.0007129195542872672, "loss": 3.6131, "step": 33815 }, { "epoch": 2.297866557956244, "grad_norm": 1.2138490676879883, "learning_rate": 0.0007128770892784346, "loss": 3.5151, "step": 33820 }, { "epoch": 2.298206278026906, "grad_norm": 1.236484408378601, "learning_rate": 0.0007128346242696019, "loss": 3.3724, "step": 33825 }, { "epoch": 2.2985459980975675, "grad_norm": 1.4058336019515991, "learning_rate": 0.0007127921592607691, "loss": 3.5798, "step": 33830 }, { "epoch": 2.298885718168229, "grad_norm": 1.1994633674621582, "learning_rate": 0.0007127496942519364, "loss": 3.384, "step": 33835 }, { "epoch": 2.2992254382388913, "grad_norm": 1.453461766242981, "learning_rate": 0.0007127072292431037, "loss": 3.4463, "step": 33840 }, { "epoch": 2.299565158309553, "grad_norm": 1.2654715776443481, "learning_rate": 0.0007126647642342709, "loss": 3.8204, "step": 33845 }, { "epoch": 2.2999048783802145, "grad_norm": 1.0929481983184814, "learning_rate": 0.0007126222992254383, "loss": 3.6192, "step": 33850 }, { "epoch": 2.3002445984508766, "grad_norm": 1.139617681503296, "learning_rate": 0.0007125798342166056, "loss": 3.5479, "step": 33855 }, { "epoch": 2.300584318521538, "grad_norm": 1.0493309497833252, "learning_rate": 0.0007125373692077728, "loss": 3.2194, "step": 33860 }, { "epoch": 2.3009240385922, "grad_norm": 1.240277886390686, "learning_rate": 0.0007124949041989402, "loss": 3.7368, "step": 33865 }, { "epoch": 2.301263758662862, "grad_norm": 1.2813690900802612, "learning_rate": 0.0007124524391901074, "loss": 3.2314, "step": 33870 }, { "epoch": 2.3016034787335236, "grad_norm": 1.0599918365478516, "learning_rate": 0.0007124099741812746, "loss": 3.3215, "step": 33875 }, { "epoch": 2.301943198804185, "grad_norm": 1.2981805801391602, "learning_rate": 0.0007123675091724419, "loss": 3.5929, "step": 33880 }, { "epoch": 2.3022829188748473, "grad_norm": 1.1847648620605469, "learning_rate": 0.0007123250441636092, "loss": 3.3973, "step": 33885 }, { "epoch": 2.302622638945509, "grad_norm": 1.2966468334197998, "learning_rate": 0.0007122825791547765, "loss": 3.5466, "step": 33890 }, { "epoch": 2.3029623590161705, "grad_norm": 1.1771886348724365, "learning_rate": 0.0007122401141459438, "loss": 3.5594, "step": 33895 }, { "epoch": 2.3033020790868326, "grad_norm": 1.2798349857330322, "learning_rate": 0.0007121976491371111, "loss": 3.3565, "step": 33900 }, { "epoch": 2.3036417991574942, "grad_norm": 1.5096454620361328, "learning_rate": 0.0007121551841282783, "loss": 3.6478, "step": 33905 }, { "epoch": 2.303981519228156, "grad_norm": 1.2581496238708496, "learning_rate": 0.0007121127191194456, "loss": 3.5338, "step": 33910 }, { "epoch": 2.304321239298818, "grad_norm": 1.3487437963485718, "learning_rate": 0.0007120702541106128, "loss": 3.7793, "step": 33915 }, { "epoch": 2.3046609593694796, "grad_norm": 1.42862069606781, "learning_rate": 0.0007120277891017801, "loss": 3.7094, "step": 33920 }, { "epoch": 2.305000679440141, "grad_norm": 1.4115608930587769, "learning_rate": 0.0007119853240929475, "loss": 3.7176, "step": 33925 }, { "epoch": 2.3053403995108033, "grad_norm": 1.1296679973602295, "learning_rate": 0.0007119428590841147, "loss": 3.3559, "step": 33930 }, { "epoch": 2.305680119581465, "grad_norm": 1.4487255811691284, "learning_rate": 0.000711900394075282, "loss": 3.651, "step": 33935 }, { "epoch": 2.3060198396521265, "grad_norm": 1.2356623411178589, "learning_rate": 0.0007118579290664493, "loss": 3.5746, "step": 33940 }, { "epoch": 2.3063595597227886, "grad_norm": 1.4583053588867188, "learning_rate": 0.0007118154640576165, "loss": 3.6697, "step": 33945 }, { "epoch": 2.3066992797934502, "grad_norm": 1.3363720178604126, "learning_rate": 0.0007117729990487838, "loss": 3.4764, "step": 33950 }, { "epoch": 2.307038999864112, "grad_norm": 1.2919975519180298, "learning_rate": 0.0007117305340399511, "loss": 3.3957, "step": 33955 }, { "epoch": 2.307378719934774, "grad_norm": 1.280137062072754, "learning_rate": 0.0007116880690311184, "loss": 3.6414, "step": 33960 }, { "epoch": 2.3077184400054356, "grad_norm": 1.2989274263381958, "learning_rate": 0.0007116456040222856, "loss": 3.4365, "step": 33965 }, { "epoch": 2.308058160076097, "grad_norm": 1.2626713514328003, "learning_rate": 0.000711603139013453, "loss": 3.2117, "step": 33970 }, { "epoch": 2.3083978801467593, "grad_norm": 1.1683300733566284, "learning_rate": 0.0007115606740046202, "loss": 3.6615, "step": 33975 }, { "epoch": 2.308737600217421, "grad_norm": 1.694398283958435, "learning_rate": 0.0007115182089957874, "loss": 3.2379, "step": 33980 }, { "epoch": 2.3090773202880825, "grad_norm": 1.844620943069458, "learning_rate": 0.0007114757439869548, "loss": 3.6216, "step": 33985 }, { "epoch": 2.3094170403587446, "grad_norm": 1.5847959518432617, "learning_rate": 0.000711433278978122, "loss": 3.4935, "step": 33990 }, { "epoch": 2.3097567604294063, "grad_norm": 1.540331482887268, "learning_rate": 0.0007113908139692893, "loss": 3.7389, "step": 33995 }, { "epoch": 2.310096480500068, "grad_norm": 1.6078861951828003, "learning_rate": 0.0007113483489604567, "loss": 3.5074, "step": 34000 }, { "epoch": 2.31043620057073, "grad_norm": 1.2735233306884766, "learning_rate": 0.0007113058839516239, "loss": 3.6324, "step": 34005 }, { "epoch": 2.3107759206413916, "grad_norm": 1.5353095531463623, "learning_rate": 0.0007112634189427911, "loss": 3.6926, "step": 34010 }, { "epoch": 2.311115640712053, "grad_norm": 1.245751976966858, "learning_rate": 0.0007112209539339584, "loss": 3.6449, "step": 34015 }, { "epoch": 2.311455360782715, "grad_norm": 1.3108457326889038, "learning_rate": 0.0007111784889251257, "loss": 3.559, "step": 34020 }, { "epoch": 2.311795080853377, "grad_norm": 1.2892709970474243, "learning_rate": 0.0007111360239162929, "loss": 3.5343, "step": 34025 }, { "epoch": 2.3121348009240386, "grad_norm": 1.2568087577819824, "learning_rate": 0.0007110935589074603, "loss": 3.3018, "step": 34030 }, { "epoch": 2.3124745209947, "grad_norm": 1.432296633720398, "learning_rate": 0.0007110510938986276, "loss": 3.6113, "step": 34035 }, { "epoch": 2.3128142410653623, "grad_norm": 1.2085051536560059, "learning_rate": 0.0007110086288897948, "loss": 3.7488, "step": 34040 }, { "epoch": 2.313153961136024, "grad_norm": 1.3541918992996216, "learning_rate": 0.0007109661638809621, "loss": 3.6703, "step": 34045 }, { "epoch": 2.3134936812066855, "grad_norm": 1.4495383501052856, "learning_rate": 0.0007109236988721294, "loss": 3.4224, "step": 34050 }, { "epoch": 2.3138334012773476, "grad_norm": 1.8699785470962524, "learning_rate": 0.0007108812338632966, "loss": 3.3174, "step": 34055 }, { "epoch": 2.3141731213480092, "grad_norm": 1.573621153831482, "learning_rate": 0.000710838768854464, "loss": 3.3831, "step": 34060 }, { "epoch": 2.314512841418671, "grad_norm": 1.2635750770568848, "learning_rate": 0.0007107963038456312, "loss": 3.6743, "step": 34065 }, { "epoch": 2.314852561489333, "grad_norm": 1.5078963041305542, "learning_rate": 0.0007107538388367985, "loss": 3.4631, "step": 34070 }, { "epoch": 2.3151922815599946, "grad_norm": 1.2902421951293945, "learning_rate": 0.0007107113738279658, "loss": 3.3436, "step": 34075 }, { "epoch": 2.315532001630656, "grad_norm": 1.426299810409546, "learning_rate": 0.000710668908819133, "loss": 3.7451, "step": 34080 }, { "epoch": 2.3158717217013183, "grad_norm": NaN, "learning_rate": 0.000710634936812067, "loss": 3.75, "step": 34085 }, { "epoch": 2.31621144177198, "grad_norm": 1.1067218780517578, "learning_rate": 0.0007105924718032342, "loss": 3.6952, "step": 34090 }, { "epoch": 2.3165511618426415, "grad_norm": 1.1188215017318726, "learning_rate": 0.0007105500067944014, "loss": 3.7715, "step": 34095 }, { "epoch": 2.3168908819133036, "grad_norm": 1.1070109605789185, "learning_rate": 0.0007105075417855688, "loss": 3.6373, "step": 34100 }, { "epoch": 2.3172306019839652, "grad_norm": 1.3303273916244507, "learning_rate": 0.000710465076776736, "loss": 3.3043, "step": 34105 }, { "epoch": 2.317570322054627, "grad_norm": 1.1623960733413696, "learning_rate": 0.0007104226117679032, "loss": 3.6357, "step": 34110 }, { "epoch": 2.317910042125289, "grad_norm": 1.5674742460250854, "learning_rate": 0.0007103801467590705, "loss": 3.7419, "step": 34115 }, { "epoch": 2.3182497621959506, "grad_norm": 1.9323136806488037, "learning_rate": 0.0007103376817502379, "loss": 3.6849, "step": 34120 }, { "epoch": 2.318589482266612, "grad_norm": 1.4507629871368408, "learning_rate": 0.0007102952167414051, "loss": 3.5919, "step": 34125 }, { "epoch": 2.318929202337274, "grad_norm": 1.2796618938446045, "learning_rate": 0.0007102527517325724, "loss": 3.5789, "step": 34130 }, { "epoch": 2.319268922407936, "grad_norm": 1.0896058082580566, "learning_rate": 0.0007102102867237397, "loss": 3.7008, "step": 34135 }, { "epoch": 2.3196086424785975, "grad_norm": 1.0360177755355835, "learning_rate": 0.0007101678217149069, "loss": 3.6763, "step": 34140 }, { "epoch": 2.319948362549259, "grad_norm": 1.5839879512786865, "learning_rate": 0.0007101253567060742, "loss": 3.3868, "step": 34145 }, { "epoch": 2.3202880826199213, "grad_norm": 1.3917754888534546, "learning_rate": 0.0007100828916972414, "loss": 3.6213, "step": 34150 }, { "epoch": 2.320627802690583, "grad_norm": 1.3019505739212036, "learning_rate": 0.0007100404266884088, "loss": 3.4429, "step": 34155 }, { "epoch": 2.3209675227612445, "grad_norm": 1.6481595039367676, "learning_rate": 0.0007099979616795761, "loss": 3.409, "step": 34160 }, { "epoch": 2.3213072428319066, "grad_norm": 1.1635388135910034, "learning_rate": 0.0007099554966707433, "loss": 3.4329, "step": 34165 }, { "epoch": 2.3216469629025682, "grad_norm": 1.0519384145736694, "learning_rate": 0.0007099130316619106, "loss": 3.5084, "step": 34170 }, { "epoch": 2.32198668297323, "grad_norm": 1.264143466949463, "learning_rate": 0.0007098705666530779, "loss": 3.3511, "step": 34175 }, { "epoch": 2.322326403043892, "grad_norm": 1.2599331140518188, "learning_rate": 0.0007098281016442451, "loss": 3.3047, "step": 34180 }, { "epoch": 2.3226661231145536, "grad_norm": 1.569013237953186, "learning_rate": 0.0007097856366354124, "loss": 3.4145, "step": 34185 }, { "epoch": 2.323005843185215, "grad_norm": 1.1731699705123901, "learning_rate": 0.0007097431716265798, "loss": 3.339, "step": 34190 }, { "epoch": 2.3233455632558773, "grad_norm": 1.505217432975769, "learning_rate": 0.000709700706617747, "loss": 3.6382, "step": 34195 }, { "epoch": 2.323685283326539, "grad_norm": 1.4487855434417725, "learning_rate": 0.0007096582416089142, "loss": 3.4864, "step": 34200 }, { "epoch": 2.3240250033972005, "grad_norm": 1.4222323894500732, "learning_rate": 0.0007096157766000816, "loss": 3.4618, "step": 34205 }, { "epoch": 2.3243647234678626, "grad_norm": 1.1890966892242432, "learning_rate": 0.0007095733115912488, "loss": 3.9573, "step": 34210 }, { "epoch": 2.3247044435385242, "grad_norm": 1.004165530204773, "learning_rate": 0.000709530846582416, "loss": 3.4993, "step": 34215 }, { "epoch": 2.325044163609186, "grad_norm": 1.3730034828186035, "learning_rate": 0.0007094883815735834, "loss": 3.8079, "step": 34220 }, { "epoch": 2.325383883679848, "grad_norm": 1.5800085067749023, "learning_rate": 0.0007094459165647507, "loss": 3.4232, "step": 34225 }, { "epoch": 2.3257236037505096, "grad_norm": 1.2859643697738647, "learning_rate": 0.0007094034515559179, "loss": 3.6734, "step": 34230 }, { "epoch": 2.326063323821171, "grad_norm": 1.113191843032837, "learning_rate": 0.0007093609865470853, "loss": 3.5195, "step": 34235 }, { "epoch": 2.3264030438918333, "grad_norm": 1.1292625665664673, "learning_rate": 0.0007093185215382525, "loss": 3.6059, "step": 34240 }, { "epoch": 2.326742763962495, "grad_norm": 1.1898980140686035, "learning_rate": 0.0007092760565294197, "loss": 3.5363, "step": 34245 }, { "epoch": 2.3270824840331565, "grad_norm": 1.4007021188735962, "learning_rate": 0.000709233591520587, "loss": 3.5793, "step": 34250 }, { "epoch": 2.3274222041038186, "grad_norm": 1.2219185829162598, "learning_rate": 0.0007091911265117543, "loss": 3.5352, "step": 34255 }, { "epoch": 2.3277619241744802, "grad_norm": 1.3545726537704468, "learning_rate": 0.0007091486615029216, "loss": 3.5534, "step": 34260 }, { "epoch": 2.328101644245142, "grad_norm": 1.3401528596878052, "learning_rate": 0.0007091061964940889, "loss": 3.3322, "step": 34265 }, { "epoch": 2.328441364315804, "grad_norm": 1.4615627527236938, "learning_rate": 0.0007090637314852562, "loss": 3.6768, "step": 34270 }, { "epoch": 2.3287810843864656, "grad_norm": 1.5633808374404907, "learning_rate": 0.0007090212664764234, "loss": 3.389, "step": 34275 }, { "epoch": 2.329120804457127, "grad_norm": 1.2763053178787231, "learning_rate": 0.0007089788014675907, "loss": 3.9283, "step": 34280 }, { "epoch": 2.3294605245277893, "grad_norm": 1.1711959838867188, "learning_rate": 0.000708936336458758, "loss": 3.4555, "step": 34285 }, { "epoch": 2.329800244598451, "grad_norm": 1.50472891330719, "learning_rate": 0.0007088938714499252, "loss": 3.3326, "step": 34290 }, { "epoch": 2.3301399646691126, "grad_norm": 1.1203980445861816, "learning_rate": 0.0007088514064410926, "loss": 3.7068, "step": 34295 }, { "epoch": 2.3304796847397746, "grad_norm": 1.279531478881836, "learning_rate": 0.0007088089414322598, "loss": 3.403, "step": 34300 }, { "epoch": 2.3308194048104363, "grad_norm": 1.2889761924743652, "learning_rate": 0.0007087664764234271, "loss": 3.7229, "step": 34305 }, { "epoch": 2.331159124881098, "grad_norm": 1.2034543752670288, "learning_rate": 0.0007087240114145944, "loss": 3.6605, "step": 34310 }, { "epoch": 2.33149884495176, "grad_norm": 1.439772605895996, "learning_rate": 0.0007086815464057616, "loss": 3.5782, "step": 34315 }, { "epoch": 2.3318385650224216, "grad_norm": 1.4191739559173584, "learning_rate": 0.0007086390813969289, "loss": 3.2503, "step": 34320 }, { "epoch": 2.3321782850930832, "grad_norm": 1.4296424388885498, "learning_rate": 0.0007085966163880963, "loss": 3.3679, "step": 34325 }, { "epoch": 2.3325180051637453, "grad_norm": 1.3269087076187134, "learning_rate": 0.0007085541513792635, "loss": 3.5685, "step": 34330 }, { "epoch": 2.332857725234407, "grad_norm": 1.259093999862671, "learning_rate": 0.0007085116863704308, "loss": 3.3456, "step": 34335 }, { "epoch": 2.3331974453050686, "grad_norm": 1.8407952785491943, "learning_rate": 0.0007084692213615981, "loss": 3.528, "step": 34340 }, { "epoch": 2.3335371653757306, "grad_norm": 1.3864861726760864, "learning_rate": 0.0007084267563527653, "loss": 3.4699, "step": 34345 }, { "epoch": 2.3338768854463923, "grad_norm": 1.3094781637191772, "learning_rate": 0.0007083842913439325, "loss": 3.6488, "step": 34350 }, { "epoch": 2.334216605517054, "grad_norm": 1.6423310041427612, "learning_rate": 0.0007083418263350999, "loss": 3.5795, "step": 34355 }, { "epoch": 2.3345563255877155, "grad_norm": 1.0921677350997925, "learning_rate": 0.0007082993613262672, "loss": 3.6733, "step": 34360 }, { "epoch": 2.3348960456583776, "grad_norm": 1.2223179340362549, "learning_rate": 0.0007082568963174344, "loss": 3.4535, "step": 34365 }, { "epoch": 2.3352357657290392, "grad_norm": 1.2466927766799927, "learning_rate": 0.0007082144313086018, "loss": 3.674, "step": 34370 }, { "epoch": 2.335575485799701, "grad_norm": 1.233607292175293, "learning_rate": 0.000708171966299769, "loss": 3.6227, "step": 34375 }, { "epoch": 2.335915205870363, "grad_norm": 1.2816568613052368, "learning_rate": 0.0007081295012909362, "loss": 3.4317, "step": 34380 }, { "epoch": 2.3362549259410246, "grad_norm": 1.4604038000106812, "learning_rate": 0.0007080870362821036, "loss": 3.5886, "step": 34385 }, { "epoch": 2.336594646011686, "grad_norm": 2.96753191947937, "learning_rate": 0.0007080445712732708, "loss": 3.468, "step": 34390 }, { "epoch": 2.3369343660823483, "grad_norm": 1.88933527469635, "learning_rate": 0.0007080021062644382, "loss": 3.6489, "step": 34395 }, { "epoch": 2.33727408615301, "grad_norm": 1.5834228992462158, "learning_rate": 0.0007079596412556054, "loss": 3.6932, "step": 34400 }, { "epoch": 2.3376138062236715, "grad_norm": 1.3807767629623413, "learning_rate": 0.0007079171762467727, "loss": 3.6289, "step": 34405 }, { "epoch": 2.3379535262943336, "grad_norm": 1.4093319177627563, "learning_rate": 0.00070787471123794, "loss": 3.6271, "step": 34410 }, { "epoch": 2.3382932463649952, "grad_norm": 1.647131323814392, "learning_rate": 0.0007078322462291072, "loss": 3.6089, "step": 34415 }, { "epoch": 2.338632966435657, "grad_norm": 1.5909268856048584, "learning_rate": 0.0007077897812202745, "loss": 3.382, "step": 34420 }, { "epoch": 2.338972686506319, "grad_norm": 1.273716926574707, "learning_rate": 0.0007077473162114418, "loss": 3.2276, "step": 34425 }, { "epoch": 2.3393124065769806, "grad_norm": 1.3522368669509888, "learning_rate": 0.0007077048512026091, "loss": 3.6233, "step": 34430 }, { "epoch": 2.339652126647642, "grad_norm": 1.0953441858291626, "learning_rate": 0.0007076623861937764, "loss": 3.7332, "step": 34435 }, { "epoch": 2.3399918467183043, "grad_norm": 1.4364516735076904, "learning_rate": 0.0007076199211849437, "loss": 3.3598, "step": 34440 }, { "epoch": 2.340331566788966, "grad_norm": 1.2953033447265625, "learning_rate": 0.0007075774561761109, "loss": 3.6102, "step": 34445 }, { "epoch": 2.3406712868596276, "grad_norm": 1.3778847455978394, "learning_rate": 0.0007075349911672781, "loss": 3.5949, "step": 34450 }, { "epoch": 2.3410110069302896, "grad_norm": 1.348983883857727, "learning_rate": 0.0007074925261584455, "loss": 3.7543, "step": 34455 }, { "epoch": 2.3413507270009513, "grad_norm": 2.31988525390625, "learning_rate": 0.0007074500611496127, "loss": 3.6656, "step": 34460 }, { "epoch": 2.341690447071613, "grad_norm": 1.3290640115737915, "learning_rate": 0.00070740759614078, "loss": 3.5972, "step": 34465 }, { "epoch": 2.3420301671422745, "grad_norm": 1.0805820226669312, "learning_rate": 0.0007073651311319474, "loss": 3.494, "step": 34470 }, { "epoch": 2.3423698872129366, "grad_norm": 1.5901193618774414, "learning_rate": 0.0007073226661231146, "loss": 3.3633, "step": 34475 }, { "epoch": 2.3427096072835982, "grad_norm": 1.391592025756836, "learning_rate": 0.0007072802011142818, "loss": 3.6202, "step": 34480 }, { "epoch": 2.34304932735426, "grad_norm": 1.6405380964279175, "learning_rate": 0.0007072377361054492, "loss": 3.5304, "step": 34485 }, { "epoch": 2.343389047424922, "grad_norm": 1.3722666501998901, "learning_rate": 0.0007071952710966164, "loss": 3.6435, "step": 34490 }, { "epoch": 2.3437287674955836, "grad_norm": 1.3067944049835205, "learning_rate": 0.0007071528060877836, "loss": 3.6751, "step": 34495 }, { "epoch": 2.344068487566245, "grad_norm": 1.3242738246917725, "learning_rate": 0.000707110341078951, "loss": 3.3379, "step": 34500 }, { "epoch": 2.3444082076369073, "grad_norm": 1.560415506362915, "learning_rate": 0.0007070678760701183, "loss": 3.5129, "step": 34505 }, { "epoch": 2.344747927707569, "grad_norm": 1.4990801811218262, "learning_rate": 0.0007070254110612855, "loss": 3.5264, "step": 34510 }, { "epoch": 2.3450876477782305, "grad_norm": 1.398866891860962, "learning_rate": 0.0007069829460524528, "loss": 3.446, "step": 34515 }, { "epoch": 2.3454273678488926, "grad_norm": 1.1827389001846313, "learning_rate": 0.0007069404810436201, "loss": 3.3535, "step": 34520 }, { "epoch": 2.3457670879195542, "grad_norm": 1.3107531070709229, "learning_rate": 0.0007068980160347873, "loss": 3.6558, "step": 34525 }, { "epoch": 2.346106807990216, "grad_norm": 1.1854281425476074, "learning_rate": 0.0007068555510259546, "loss": 3.6774, "step": 34530 }, { "epoch": 2.346446528060878, "grad_norm": 1.7380239963531494, "learning_rate": 0.000706813086017122, "loss": 3.6825, "step": 34535 }, { "epoch": 2.3467862481315396, "grad_norm": 1.377498984336853, "learning_rate": 0.0007067706210082892, "loss": 3.3073, "step": 34540 }, { "epoch": 2.347125968202201, "grad_norm": 1.4294886589050293, "learning_rate": 0.0007067281559994565, "loss": 3.5666, "step": 34545 }, { "epoch": 2.3474656882728633, "grad_norm": 1.466270089149475, "learning_rate": 0.0007066856909906237, "loss": 3.6723, "step": 34550 }, { "epoch": 2.347805408343525, "grad_norm": 1.1739718914031982, "learning_rate": 0.000706643225981791, "loss": 3.659, "step": 34555 }, { "epoch": 2.3481451284141865, "grad_norm": 1.3190675973892212, "learning_rate": 0.0007066007609729583, "loss": 3.7009, "step": 34560 }, { "epoch": 2.3484848484848486, "grad_norm": 1.312744379043579, "learning_rate": 0.0007065582959641255, "loss": 3.6989, "step": 34565 }, { "epoch": 2.3488245685555103, "grad_norm": 1.183693528175354, "learning_rate": 0.0007065158309552929, "loss": 3.5574, "step": 34570 }, { "epoch": 2.349164288626172, "grad_norm": 1.4633190631866455, "learning_rate": 0.0007064733659464602, "loss": 3.4755, "step": 34575 }, { "epoch": 2.349504008696834, "grad_norm": 1.284348726272583, "learning_rate": 0.0007064309009376274, "loss": 3.6306, "step": 34580 }, { "epoch": 2.3498437287674956, "grad_norm": 1.4237675666809082, "learning_rate": 0.0007063884359287946, "loss": 3.4931, "step": 34585 }, { "epoch": 2.350183448838157, "grad_norm": 1.3007091283798218, "learning_rate": 0.000706345970919962, "loss": 3.7057, "step": 34590 }, { "epoch": 2.3505231689088193, "grad_norm": 1.350005030632019, "learning_rate": 0.0007063035059111292, "loss": 3.4135, "step": 34595 }, { "epoch": 2.350862888979481, "grad_norm": 1.1138718128204346, "learning_rate": 0.0007062610409022964, "loss": 3.6063, "step": 34600 }, { "epoch": 2.3512026090501426, "grad_norm": 1.5399374961853027, "learning_rate": 0.0007062185758934639, "loss": 3.5814, "step": 34605 }, { "epoch": 2.3515423291208046, "grad_norm": 1.2090463638305664, "learning_rate": 0.0007061761108846311, "loss": 3.6519, "step": 34610 }, { "epoch": 2.3518820491914663, "grad_norm": 1.2436639070510864, "learning_rate": 0.0007061336458757983, "loss": 3.5117, "step": 34615 }, { "epoch": 2.352221769262128, "grad_norm": 1.072998285293579, "learning_rate": 0.0007060911808669657, "loss": 3.4728, "step": 34620 }, { "epoch": 2.35256148933279, "grad_norm": 1.025968074798584, "learning_rate": 0.0007060487158581329, "loss": 3.5346, "step": 34625 }, { "epoch": 2.3529012094034516, "grad_norm": 1.2724545001983643, "learning_rate": 0.0007060062508493001, "loss": 3.6656, "step": 34630 }, { "epoch": 2.3532409294741132, "grad_norm": 1.695297360420227, "learning_rate": 0.0007059637858404674, "loss": 3.7839, "step": 34635 }, { "epoch": 2.3535806495447753, "grad_norm": 1.251686692237854, "learning_rate": 0.0007059213208316348, "loss": 3.4006, "step": 34640 }, { "epoch": 2.353920369615437, "grad_norm": 0.9923813343048096, "learning_rate": 0.000705878855822802, "loss": 3.5451, "step": 34645 }, { "epoch": 2.3542600896860986, "grad_norm": 1.2625112533569336, "learning_rate": 0.0007058363908139693, "loss": 3.5059, "step": 34650 }, { "epoch": 2.3545998097567606, "grad_norm": 1.5852892398834229, "learning_rate": 0.0007057939258051366, "loss": 3.6181, "step": 34655 }, { "epoch": 2.3549395298274223, "grad_norm": 1.2492409944534302, "learning_rate": 0.0007057514607963038, "loss": 3.5926, "step": 34660 }, { "epoch": 2.355279249898084, "grad_norm": 1.3470721244812012, "learning_rate": 0.0007057089957874711, "loss": 3.6083, "step": 34665 }, { "epoch": 2.355618969968746, "grad_norm": 1.1401872634887695, "learning_rate": 0.0007056665307786384, "loss": 3.5646, "step": 34670 }, { "epoch": 2.3559586900394076, "grad_norm": 1.0860415697097778, "learning_rate": 0.0007056240657698057, "loss": 3.6095, "step": 34675 }, { "epoch": 2.3562984101100692, "grad_norm": 1.5901094675064087, "learning_rate": 0.000705581600760973, "loss": 3.4762, "step": 34680 }, { "epoch": 2.3566381301807313, "grad_norm": 1.1754382848739624, "learning_rate": 0.0007055391357521403, "loss": 3.5275, "step": 34685 }, { "epoch": 2.356977850251393, "grad_norm": 1.1694772243499756, "learning_rate": 0.0007054966707433075, "loss": 3.505, "step": 34690 }, { "epoch": 2.3573175703220546, "grad_norm": 1.2715271711349487, "learning_rate": 0.0007054542057344748, "loss": 3.4177, "step": 34695 }, { "epoch": 2.357657290392716, "grad_norm": 1.2660025358200073, "learning_rate": 0.000705411740725642, "loss": 3.8513, "step": 34700 }, { "epoch": 2.3579970104633783, "grad_norm": 1.4185303449630737, "learning_rate": 0.0007053692757168093, "loss": 3.4824, "step": 34705 }, { "epoch": 2.35833673053404, "grad_norm": 1.1990762948989868, "learning_rate": 0.0007053268107079767, "loss": 3.3557, "step": 34710 }, { "epoch": 2.3586764506047015, "grad_norm": 1.340793251991272, "learning_rate": 0.0007052843456991439, "loss": 3.353, "step": 34715 }, { "epoch": 2.3590161706753636, "grad_norm": 1.5403127670288086, "learning_rate": 0.0007052418806903112, "loss": 3.6976, "step": 34720 }, { "epoch": 2.3593558907460253, "grad_norm": 1.2917119264602661, "learning_rate": 0.0007051994156814785, "loss": 3.5666, "step": 34725 }, { "epoch": 2.359695610816687, "grad_norm": 1.3377546072006226, "learning_rate": 0.0007051569506726457, "loss": 3.4859, "step": 34730 }, { "epoch": 2.360035330887349, "grad_norm": 1.3681493997573853, "learning_rate": 0.000705114485663813, "loss": 3.4741, "step": 34735 }, { "epoch": 2.3603750509580106, "grad_norm": 1.4282573461532593, "learning_rate": 0.0007050720206549803, "loss": 3.2558, "step": 34740 }, { "epoch": 2.360714771028672, "grad_norm": 2.628671169281006, "learning_rate": 0.0007050295556461476, "loss": 3.5788, "step": 34745 }, { "epoch": 2.3610544910993343, "grad_norm": 1.3291476964950562, "learning_rate": 0.000704987090637315, "loss": 3.578, "step": 34750 }, { "epoch": 2.361394211169996, "grad_norm": 1.2329154014587402, "learning_rate": 0.0007049446256284822, "loss": 3.5686, "step": 34755 }, { "epoch": 2.3617339312406576, "grad_norm": 1.2466022968292236, "learning_rate": 0.0007049021606196494, "loss": 3.6075, "step": 34760 }, { "epoch": 2.3620736513113196, "grad_norm": 1.3490564823150635, "learning_rate": 0.0007048596956108167, "loss": 3.6077, "step": 34765 }, { "epoch": 2.3624133713819813, "grad_norm": 1.3360005617141724, "learning_rate": 0.000704817230601984, "loss": 3.3156, "step": 34770 }, { "epoch": 2.362753091452643, "grad_norm": 1.553459882736206, "learning_rate": 0.0007047747655931512, "loss": 3.5901, "step": 34775 }, { "epoch": 2.363092811523305, "grad_norm": 1.1594059467315674, "learning_rate": 0.0007047323005843186, "loss": 3.4575, "step": 34780 }, { "epoch": 2.3634325315939666, "grad_norm": 1.3183895349502563, "learning_rate": 0.0007046898355754859, "loss": 3.4683, "step": 34785 }, { "epoch": 2.3637722516646282, "grad_norm": 1.9580680131912231, "learning_rate": 0.0007046473705666531, "loss": 3.7422, "step": 34790 }, { "epoch": 2.3641119717352903, "grad_norm": 1.0299049615859985, "learning_rate": 0.0007046049055578204, "loss": 3.4275, "step": 34795 }, { "epoch": 2.364451691805952, "grad_norm": 1.6547545194625854, "learning_rate": 0.0007045624405489876, "loss": 3.6803, "step": 34800 }, { "epoch": 2.3647914118766136, "grad_norm": 1.110260009765625, "learning_rate": 0.0007045199755401549, "loss": 3.6158, "step": 34805 }, { "epoch": 2.365131131947275, "grad_norm": 1.370435357093811, "learning_rate": 0.0007044775105313222, "loss": 3.5337, "step": 34810 }, { "epoch": 2.3654708520179373, "grad_norm": 1.2947505712509155, "learning_rate": 0.0007044350455224895, "loss": 3.3243, "step": 34815 }, { "epoch": 2.365810572088599, "grad_norm": 4.538529872894287, "learning_rate": 0.0007043925805136568, "loss": 3.3263, "step": 34820 }, { "epoch": 2.3661502921592605, "grad_norm": 1.0859700441360474, "learning_rate": 0.0007043501155048241, "loss": 3.0844, "step": 34825 }, { "epoch": 2.3664900122299226, "grad_norm": 1.1947060823440552, "learning_rate": 0.0007043076504959913, "loss": 3.1967, "step": 34830 }, { "epoch": 2.3668297323005842, "grad_norm": 1.1797693967819214, "learning_rate": 0.0007042651854871585, "loss": 3.7953, "step": 34835 }, { "epoch": 2.367169452371246, "grad_norm": 1.3018687963485718, "learning_rate": 0.0007042227204783259, "loss": 3.7213, "step": 34840 }, { "epoch": 2.367509172441908, "grad_norm": 1.3069720268249512, "learning_rate": 0.0007041802554694931, "loss": 3.5292, "step": 34845 }, { "epoch": 2.3678488925125696, "grad_norm": 1.2472831010818481, "learning_rate": 0.0007041377904606604, "loss": 3.655, "step": 34850 }, { "epoch": 2.368188612583231, "grad_norm": 1.1996874809265137, "learning_rate": 0.0007040953254518278, "loss": 3.5229, "step": 34855 }, { "epoch": 2.3685283326538933, "grad_norm": 1.2037960290908813, "learning_rate": 0.000704052860442995, "loss": 3.4843, "step": 34860 }, { "epoch": 2.368868052724555, "grad_norm": 1.7896984815597534, "learning_rate": 0.0007040103954341622, "loss": 3.5346, "step": 34865 }, { "epoch": 2.3692077727952165, "grad_norm": 1.5034233331680298, "learning_rate": 0.0007039679304253296, "loss": 3.6305, "step": 34870 }, { "epoch": 2.3695474928658786, "grad_norm": 1.2916998863220215, "learning_rate": 0.0007039254654164968, "loss": 3.6193, "step": 34875 }, { "epoch": 2.3698872129365403, "grad_norm": 1.2213454246520996, "learning_rate": 0.000703883000407664, "loss": 3.5776, "step": 34880 }, { "epoch": 2.370226933007202, "grad_norm": 1.316820740699768, "learning_rate": 0.0007038405353988315, "loss": 3.598, "step": 34885 }, { "epoch": 2.370566653077864, "grad_norm": 1.051166296005249, "learning_rate": 0.0007037980703899987, "loss": 3.4964, "step": 34890 }, { "epoch": 2.3709063731485256, "grad_norm": 1.257266640663147, "learning_rate": 0.0007037556053811659, "loss": 3.697, "step": 34895 }, { "epoch": 2.3712460932191872, "grad_norm": 1.16795814037323, "learning_rate": 0.0007037131403723332, "loss": 3.6146, "step": 34900 }, { "epoch": 2.3715858132898493, "grad_norm": 1.2730777263641357, "learning_rate": 0.0007036706753635005, "loss": 3.5338, "step": 34905 }, { "epoch": 2.371925533360511, "grad_norm": 1.4660251140594482, "learning_rate": 0.0007036282103546677, "loss": 3.3351, "step": 34910 }, { "epoch": 2.3722652534311726, "grad_norm": 1.0119036436080933, "learning_rate": 0.0007035857453458351, "loss": 3.6879, "step": 34915 }, { "epoch": 2.3726049735018346, "grad_norm": 1.2696367502212524, "learning_rate": 0.0007035432803370024, "loss": 3.5583, "step": 34920 }, { "epoch": 2.3729446935724963, "grad_norm": 1.7464241981506348, "learning_rate": 0.0007035008153281696, "loss": 3.5057, "step": 34925 }, { "epoch": 2.373284413643158, "grad_norm": 1.2230068445205688, "learning_rate": 0.0007034583503193369, "loss": 3.6289, "step": 34930 }, { "epoch": 2.37362413371382, "grad_norm": 1.639595627784729, "learning_rate": 0.0007034158853105041, "loss": 3.6094, "step": 34935 }, { "epoch": 2.3739638537844816, "grad_norm": 1.2488865852355957, "learning_rate": 0.0007033734203016714, "loss": 3.6633, "step": 34940 }, { "epoch": 2.3743035738551432, "grad_norm": 1.478501558303833, "learning_rate": 0.0007033309552928387, "loss": 3.3594, "step": 34945 }, { "epoch": 2.3746432939258053, "grad_norm": 1.6004127264022827, "learning_rate": 0.000703288490284006, "loss": 3.7122, "step": 34950 }, { "epoch": 2.374983013996467, "grad_norm": 1.4952681064605713, "learning_rate": 0.0007032460252751733, "loss": 3.5179, "step": 34955 }, { "epoch": 2.3753227340671286, "grad_norm": 1.23517644405365, "learning_rate": 0.0007032035602663406, "loss": 3.5716, "step": 34960 }, { "epoch": 2.3756624541377906, "grad_norm": 1.268907904624939, "learning_rate": 0.0007031610952575078, "loss": 3.6263, "step": 34965 }, { "epoch": 2.3760021742084523, "grad_norm": 1.4487587213516235, "learning_rate": 0.000703118630248675, "loss": 3.5258, "step": 34970 }, { "epoch": 2.376341894279114, "grad_norm": 1.2686102390289307, "learning_rate": 0.0007030761652398424, "loss": 3.6458, "step": 34975 }, { "epoch": 2.376681614349776, "grad_norm": 1.2807095050811768, "learning_rate": 0.0007030337002310096, "loss": 3.4618, "step": 34980 }, { "epoch": 2.3770213344204376, "grad_norm": 1.4320247173309326, "learning_rate": 0.000702991235222177, "loss": 3.4155, "step": 34985 }, { "epoch": 2.3773610544910992, "grad_norm": 1.1393343210220337, "learning_rate": 0.0007029487702133443, "loss": 3.7466, "step": 34990 }, { "epoch": 2.3777007745617613, "grad_norm": 1.2839637994766235, "learning_rate": 0.0007029063052045115, "loss": 3.4627, "step": 34995 }, { "epoch": 2.378040494632423, "grad_norm": 1.2741515636444092, "learning_rate": 0.0007028638401956787, "loss": 3.6883, "step": 35000 }, { "epoch": 2.3783802147030846, "grad_norm": 1.5087714195251465, "learning_rate": 0.0007028213751868461, "loss": 3.3127, "step": 35005 }, { "epoch": 2.3787199347737467, "grad_norm": 1.242505669593811, "learning_rate": 0.0007027789101780133, "loss": 3.6357, "step": 35010 }, { "epoch": 2.3790596548444083, "grad_norm": 1.4180644750595093, "learning_rate": 0.0007027364451691805, "loss": 3.5011, "step": 35015 }, { "epoch": 2.37939937491507, "grad_norm": 1.1725534200668335, "learning_rate": 0.000702693980160348, "loss": 3.6157, "step": 35020 }, { "epoch": 2.379739094985732, "grad_norm": 1.4277150630950928, "learning_rate": 0.0007026515151515152, "loss": 3.6788, "step": 35025 }, { "epoch": 2.3800788150563936, "grad_norm": 1.8549220561981201, "learning_rate": 0.0007026090501426824, "loss": 3.5864, "step": 35030 }, { "epoch": 2.3804185351270553, "grad_norm": 1.2876139879226685, "learning_rate": 0.0007025665851338497, "loss": 3.6392, "step": 35035 }, { "epoch": 2.380758255197717, "grad_norm": 1.8382561206817627, "learning_rate": 0.000702524120125017, "loss": 3.7418, "step": 35040 }, { "epoch": 2.381097975268379, "grad_norm": 1.5828746557235718, "learning_rate": 0.0007024816551161842, "loss": 3.1656, "step": 35045 }, { "epoch": 2.3814376953390406, "grad_norm": 1.179750919342041, "learning_rate": 0.0007024391901073515, "loss": 3.1973, "step": 35050 }, { "epoch": 2.3817774154097022, "grad_norm": 1.54318368434906, "learning_rate": 0.0007023967250985189, "loss": 3.5704, "step": 35055 }, { "epoch": 2.3821171354803643, "grad_norm": 1.2815368175506592, "learning_rate": 0.0007023542600896861, "loss": 3.5793, "step": 35060 }, { "epoch": 2.382456855551026, "grad_norm": 1.1853424310684204, "learning_rate": 0.0007023117950808534, "loss": 3.6469, "step": 35065 }, { "epoch": 2.3827965756216876, "grad_norm": 1.6283150911331177, "learning_rate": 0.0007022693300720207, "loss": 3.5487, "step": 35070 }, { "epoch": 2.3831362956923496, "grad_norm": 1.2334272861480713, "learning_rate": 0.000702226865063188, "loss": 3.4536, "step": 35075 }, { "epoch": 2.3834760157630113, "grad_norm": 5.113142013549805, "learning_rate": 0.0007021844000543552, "loss": 3.537, "step": 35080 }, { "epoch": 2.383815735833673, "grad_norm": 1.6938074827194214, "learning_rate": 0.0007021419350455224, "loss": 3.4483, "step": 35085 }, { "epoch": 2.384155455904335, "grad_norm": 1.1429225206375122, "learning_rate": 0.0007020994700366899, "loss": 3.5065, "step": 35090 }, { "epoch": 2.3844951759749966, "grad_norm": 2.6590542793273926, "learning_rate": 0.0007020570050278571, "loss": 3.7958, "step": 35095 }, { "epoch": 2.3848348960456582, "grad_norm": 1.164212942123413, "learning_rate": 0.0007020145400190243, "loss": 3.7944, "step": 35100 }, { "epoch": 2.3851746161163203, "grad_norm": 1.100436806678772, "learning_rate": 0.0007019720750101917, "loss": 3.8085, "step": 35105 }, { "epoch": 2.385514336186982, "grad_norm": 1.2705446481704712, "learning_rate": 0.0007019296100013589, "loss": 3.6555, "step": 35110 }, { "epoch": 2.3858540562576436, "grad_norm": 1.3066027164459229, "learning_rate": 0.0007018871449925261, "loss": 3.5054, "step": 35115 }, { "epoch": 2.3861937763283056, "grad_norm": 1.2835038900375366, "learning_rate": 0.0007018446799836935, "loss": 3.8242, "step": 35120 }, { "epoch": 2.3865334963989673, "grad_norm": 1.1912258863449097, "learning_rate": 0.0007018022149748608, "loss": 3.6036, "step": 35125 }, { "epoch": 2.386873216469629, "grad_norm": 1.3738199472427368, "learning_rate": 0.000701759749966028, "loss": 3.8175, "step": 35130 }, { "epoch": 2.387212936540291, "grad_norm": 1.231696605682373, "learning_rate": 0.0007017172849571953, "loss": 3.514, "step": 35135 }, { "epoch": 2.3875526566109526, "grad_norm": 1.1369655132293701, "learning_rate": 0.0007016748199483626, "loss": 3.4016, "step": 35140 }, { "epoch": 2.3878923766816142, "grad_norm": 1.2745827436447144, "learning_rate": 0.0007016323549395298, "loss": 3.6089, "step": 35145 }, { "epoch": 2.388232096752276, "grad_norm": 3.390641450881958, "learning_rate": 0.0007015898899306971, "loss": 3.5312, "step": 35150 }, { "epoch": 2.388571816822938, "grad_norm": 1.3178799152374268, "learning_rate": 0.0007015474249218644, "loss": 3.5385, "step": 35155 }, { "epoch": 2.3889115368935996, "grad_norm": 1.219443440437317, "learning_rate": 0.0007015049599130317, "loss": 3.7337, "step": 35160 }, { "epoch": 2.389251256964261, "grad_norm": 1.695076584815979, "learning_rate": 0.000701462494904199, "loss": 3.6772, "step": 35165 }, { "epoch": 2.3895909770349233, "grad_norm": 1.4236966371536255, "learning_rate": 0.0007014200298953663, "loss": 3.3746, "step": 35170 }, { "epoch": 2.389930697105585, "grad_norm": 2.272021532058716, "learning_rate": 0.0007013775648865335, "loss": 3.6777, "step": 35175 }, { "epoch": 2.3902704171762466, "grad_norm": 1.3778988122940063, "learning_rate": 0.0007013350998777008, "loss": 3.446, "step": 35180 }, { "epoch": 2.3906101372469086, "grad_norm": 1.4085252285003662, "learning_rate": 0.000701292634868868, "loss": 3.4949, "step": 35185 }, { "epoch": 2.3909498573175703, "grad_norm": 1.1748807430267334, "learning_rate": 0.0007012501698600353, "loss": 3.5262, "step": 35190 }, { "epoch": 2.391289577388232, "grad_norm": 1.2758451700210571, "learning_rate": 0.0007012077048512027, "loss": 3.7098, "step": 35195 }, { "epoch": 2.391629297458894, "grad_norm": 1.4940698146820068, "learning_rate": 0.0007011652398423699, "loss": 3.8701, "step": 35200 }, { "epoch": 2.3919690175295556, "grad_norm": 1.2908804416656494, "learning_rate": 0.0007011227748335372, "loss": 3.1542, "step": 35205 }, { "epoch": 2.3923087376002172, "grad_norm": 1.240539312362671, "learning_rate": 0.0007010803098247045, "loss": 3.9763, "step": 35210 }, { "epoch": 2.3926484576708793, "grad_norm": 1.3943372964859009, "learning_rate": 0.0007010378448158717, "loss": 3.6161, "step": 35215 }, { "epoch": 2.392988177741541, "grad_norm": 1.0696280002593994, "learning_rate": 0.000700995379807039, "loss": 3.6258, "step": 35220 }, { "epoch": 2.3933278978122026, "grad_norm": 1.3510327339172363, "learning_rate": 0.0007009529147982063, "loss": 3.8296, "step": 35225 }, { "epoch": 2.3936676178828646, "grad_norm": 1.3975180387496948, "learning_rate": 0.0007009104497893736, "loss": 3.6638, "step": 35230 }, { "epoch": 2.3940073379535263, "grad_norm": 1.4687447547912598, "learning_rate": 0.0007008679847805408, "loss": 3.422, "step": 35235 }, { "epoch": 2.394347058024188, "grad_norm": 1.3828182220458984, "learning_rate": 0.0007008255197717082, "loss": 3.665, "step": 35240 }, { "epoch": 2.39468677809485, "grad_norm": 1.214925765991211, "learning_rate": 0.0007007830547628754, "loss": 3.4063, "step": 35245 }, { "epoch": 2.3950264981655116, "grad_norm": 1.3605114221572876, "learning_rate": 0.0007007405897540426, "loss": 3.4001, "step": 35250 }, { "epoch": 2.3953662182361732, "grad_norm": 2.678436279296875, "learning_rate": 0.00070069812474521, "loss": 3.3854, "step": 35255 }, { "epoch": 2.3957059383068353, "grad_norm": 1.5432859659194946, "learning_rate": 0.0007006556597363772, "loss": 3.5038, "step": 35260 }, { "epoch": 2.396045658377497, "grad_norm": 1.108224868774414, "learning_rate": 0.0007006131947275445, "loss": 3.7731, "step": 35265 }, { "epoch": 2.3963853784481586, "grad_norm": 1.169445276260376, "learning_rate": 0.0007005707297187119, "loss": 3.6645, "step": 35270 }, { "epoch": 2.3967250985188207, "grad_norm": 1.2191123962402344, "learning_rate": 0.0007005282647098791, "loss": 3.4599, "step": 35275 }, { "epoch": 2.3970648185894823, "grad_norm": 1.165268063545227, "learning_rate": 0.0007004857997010463, "loss": 3.5833, "step": 35280 }, { "epoch": 2.397404538660144, "grad_norm": 1.0861514806747437, "learning_rate": 0.0007004433346922136, "loss": 3.5935, "step": 35285 }, { "epoch": 2.397744258730806, "grad_norm": 1.30914306640625, "learning_rate": 0.0007004008696833809, "loss": 3.5948, "step": 35290 }, { "epoch": 2.3980839788014676, "grad_norm": 1.0236833095550537, "learning_rate": 0.0007003584046745481, "loss": 3.6063, "step": 35295 }, { "epoch": 2.3984236988721293, "grad_norm": 1.2453080415725708, "learning_rate": 0.0007003159396657155, "loss": 3.5893, "step": 35300 }, { "epoch": 2.3987634189427913, "grad_norm": 1.16042959690094, "learning_rate": 0.0007002734746568828, "loss": 3.7703, "step": 35305 }, { "epoch": 2.399103139013453, "grad_norm": 1.434299111366272, "learning_rate": 0.00070023100964805, "loss": 3.3537, "step": 35310 }, { "epoch": 2.3994428590841146, "grad_norm": 1.5888516902923584, "learning_rate": 0.0007001885446392173, "loss": 3.3338, "step": 35315 }, { "epoch": 2.3997825791547767, "grad_norm": 1.531897783279419, "learning_rate": 0.0007001460796303845, "loss": 3.5266, "step": 35320 }, { "epoch": 2.4001222992254383, "grad_norm": 1.3932427167892456, "learning_rate": 0.0007001036146215518, "loss": 3.6358, "step": 35325 }, { "epoch": 2.4004620192961, "grad_norm": 1.160780906677246, "learning_rate": 0.0007000611496127191, "loss": 3.368, "step": 35330 }, { "epoch": 2.400801739366762, "grad_norm": 1.1460016965866089, "learning_rate": 0.0007000186846038864, "loss": 3.8119, "step": 35335 }, { "epoch": 2.4011414594374236, "grad_norm": 1.191314935684204, "learning_rate": 0.0006999762195950537, "loss": 3.5008, "step": 35340 }, { "epoch": 2.4014811795080853, "grad_norm": 1.3952231407165527, "learning_rate": 0.000699933754586221, "loss": 3.5477, "step": 35345 }, { "epoch": 2.4018208995787473, "grad_norm": 1.735198974609375, "learning_rate": 0.0006998912895773882, "loss": 3.3962, "step": 35350 }, { "epoch": 2.402160619649409, "grad_norm": 1.3409408330917358, "learning_rate": 0.0006998488245685555, "loss": 3.4627, "step": 35355 }, { "epoch": 2.4025003397200706, "grad_norm": 2.165269374847412, "learning_rate": 0.0006998063595597228, "loss": 3.4632, "step": 35360 }, { "epoch": 2.4028400597907327, "grad_norm": 1.5865267515182495, "learning_rate": 0.00069976389455089, "loss": 3.502, "step": 35365 }, { "epoch": 2.4031797798613943, "grad_norm": 1.2707120180130005, "learning_rate": 0.0006997214295420574, "loss": 3.6356, "step": 35370 }, { "epoch": 2.403519499932056, "grad_norm": 1.1136037111282349, "learning_rate": 0.0006996789645332247, "loss": 3.425, "step": 35375 }, { "epoch": 2.4038592200027176, "grad_norm": 1.3142033815383911, "learning_rate": 0.0006996364995243919, "loss": 3.4588, "step": 35380 }, { "epoch": 2.4041989400733796, "grad_norm": 1.3617955446243286, "learning_rate": 0.0006995940345155591, "loss": 3.6427, "step": 35385 }, { "epoch": 2.4045386601440413, "grad_norm": 3.931619167327881, "learning_rate": 0.0006995515695067265, "loss": 3.5346, "step": 35390 }, { "epoch": 2.404878380214703, "grad_norm": 1.360131025314331, "learning_rate": 0.0006995091044978937, "loss": 3.5323, "step": 35395 }, { "epoch": 2.405218100285365, "grad_norm": 1.6649982929229736, "learning_rate": 0.0006994666394890609, "loss": 3.5874, "step": 35400 }, { "epoch": 2.4055578203560266, "grad_norm": 1.3206546306610107, "learning_rate": 0.0006994241744802284, "loss": 3.7848, "step": 35405 }, { "epoch": 2.4058975404266882, "grad_norm": 1.5437058210372925, "learning_rate": 0.0006993817094713956, "loss": 3.3933, "step": 35410 }, { "epoch": 2.4062372604973503, "grad_norm": 1.1580471992492676, "learning_rate": 0.0006993392444625629, "loss": 3.4509, "step": 35415 }, { "epoch": 2.406576980568012, "grad_norm": 1.1073102951049805, "learning_rate": 0.0006992967794537302, "loss": 3.5132, "step": 35420 }, { "epoch": 2.4069167006386736, "grad_norm": 1.3834213018417358, "learning_rate": 0.0006992543144448974, "loss": 3.6448, "step": 35425 }, { "epoch": 2.4072564207093357, "grad_norm": 1.2501112222671509, "learning_rate": 0.0006992118494360647, "loss": 3.6215, "step": 35430 }, { "epoch": 2.4075961407799973, "grad_norm": 1.5042037963867188, "learning_rate": 0.000699169384427232, "loss": 3.4431, "step": 35435 }, { "epoch": 2.407935860850659, "grad_norm": 1.2858237028121948, "learning_rate": 0.0006991269194183993, "loss": 3.4826, "step": 35440 }, { "epoch": 2.408275580921321, "grad_norm": 1.4198728799819946, "learning_rate": 0.0006990844544095666, "loss": 3.7535, "step": 35445 }, { "epoch": 2.4086153009919826, "grad_norm": Infinity, "learning_rate": 0.0006990504824025003, "loss": 3.7853, "step": 35450 }, { "epoch": 2.4089550210626443, "grad_norm": 1.3801672458648682, "learning_rate": 0.0006990080173936675, "loss": 3.4915, "step": 35455 }, { "epoch": 2.4092947411333063, "grad_norm": 1.2761940956115723, "learning_rate": 0.000698965552384835, "loss": 3.6889, "step": 35460 }, { "epoch": 2.409634461203968, "grad_norm": 1.498129963874817, "learning_rate": 0.0006989230873760022, "loss": 3.3464, "step": 35465 }, { "epoch": 2.4099741812746296, "grad_norm": 1.1748076677322388, "learning_rate": 0.0006988806223671694, "loss": 3.6316, "step": 35470 }, { "epoch": 2.4103139013452917, "grad_norm": 1.1563559770584106, "learning_rate": 0.0006988381573583368, "loss": 3.4555, "step": 35475 }, { "epoch": 2.4106536214159533, "grad_norm": 1.0708388090133667, "learning_rate": 0.000698795692349504, "loss": 3.5132, "step": 35480 }, { "epoch": 2.410993341486615, "grad_norm": 1.452616810798645, "learning_rate": 0.0006987532273406712, "loss": 3.595, "step": 35485 }, { "epoch": 2.4113330615572766, "grad_norm": 1.1081477403640747, "learning_rate": 0.0006987107623318386, "loss": 3.7414, "step": 35490 }, { "epoch": 2.4116727816279386, "grad_norm": 1.3235790729522705, "learning_rate": 0.0006986682973230059, "loss": 3.7113, "step": 35495 }, { "epoch": 2.4120125016986003, "grad_norm": 1.336137294769287, "learning_rate": 0.0006986258323141731, "loss": 3.7349, "step": 35500 }, { "epoch": 2.412352221769262, "grad_norm": 1.4714988470077515, "learning_rate": 0.0006985833673053405, "loss": 3.3048, "step": 35505 }, { "epoch": 2.412691941839924, "grad_norm": 1.9195516109466553, "learning_rate": 0.0006985409022965077, "loss": 3.5482, "step": 35510 }, { "epoch": 2.4130316619105856, "grad_norm": 1.097721815109253, "learning_rate": 0.0006984984372876749, "loss": 3.4501, "step": 35515 }, { "epoch": 2.4133713819812472, "grad_norm": 1.5780669450759888, "learning_rate": 0.0006984559722788422, "loss": 3.7454, "step": 35520 }, { "epoch": 2.4137111020519093, "grad_norm": 1.4465327262878418, "learning_rate": 0.0006984135072700095, "loss": 3.5387, "step": 35525 }, { "epoch": 2.414050822122571, "grad_norm": 1.3930774927139282, "learning_rate": 0.0006983710422611768, "loss": 3.769, "step": 35530 }, { "epoch": 2.4143905421932326, "grad_norm": 1.021621823310852, "learning_rate": 0.0006983285772523441, "loss": 3.6473, "step": 35535 }, { "epoch": 2.4147302622638946, "grad_norm": 1.2516058683395386, "learning_rate": 0.0006982861122435114, "loss": 3.6265, "step": 35540 }, { "epoch": 2.4150699823345563, "grad_norm": 1.2900390625, "learning_rate": 0.0006982436472346786, "loss": 3.4723, "step": 35545 }, { "epoch": 2.415409702405218, "grad_norm": 1.4877928495407104, "learning_rate": 0.0006982011822258459, "loss": 3.3231, "step": 35550 }, { "epoch": 2.41574942247588, "grad_norm": 1.3736542463302612, "learning_rate": 0.0006981587172170131, "loss": 3.7838, "step": 35555 }, { "epoch": 2.4160891425465416, "grad_norm": 1.2274916172027588, "learning_rate": 0.0006981162522081804, "loss": 3.6775, "step": 35560 }, { "epoch": 2.4164288626172032, "grad_norm": 1.1064640283584595, "learning_rate": 0.0006980737871993478, "loss": 3.4914, "step": 35565 }, { "epoch": 2.4167685826878653, "grad_norm": 1.2666523456573486, "learning_rate": 0.000698031322190515, "loss": 3.4489, "step": 35570 }, { "epoch": 2.417108302758527, "grad_norm": 0.954515814781189, "learning_rate": 0.0006979888571816823, "loss": 3.4113, "step": 35575 }, { "epoch": 2.4174480228291886, "grad_norm": 1.563506841659546, "learning_rate": 0.0006979463921728496, "loss": 3.5555, "step": 35580 }, { "epoch": 2.4177877428998507, "grad_norm": 1.1322145462036133, "learning_rate": 0.0006979039271640168, "loss": 3.7457, "step": 35585 }, { "epoch": 2.4181274629705123, "grad_norm": 1.952986478805542, "learning_rate": 0.0006978614621551841, "loss": 3.5544, "step": 35590 }, { "epoch": 2.418467183041174, "grad_norm": 1.522763729095459, "learning_rate": 0.0006978189971463514, "loss": 3.7151, "step": 35595 }, { "epoch": 2.418806903111836, "grad_norm": 1.5493541955947876, "learning_rate": 0.0006977765321375187, "loss": 3.6341, "step": 35600 }, { "epoch": 2.4191466231824976, "grad_norm": 1.0645071268081665, "learning_rate": 0.000697734067128686, "loss": 3.6222, "step": 35605 }, { "epoch": 2.4194863432531593, "grad_norm": 1.2268913984298706, "learning_rate": 0.0006976916021198533, "loss": 3.5659, "step": 35610 }, { "epoch": 2.4198260633238213, "grad_norm": 1.3737502098083496, "learning_rate": 0.0006976491371110205, "loss": 3.6466, "step": 35615 }, { "epoch": 2.420165783394483, "grad_norm": 1.0204622745513916, "learning_rate": 0.0006976066721021878, "loss": 3.6318, "step": 35620 }, { "epoch": 2.4205055034651446, "grad_norm": 1.2547650337219238, "learning_rate": 0.0006975642070933551, "loss": 3.4149, "step": 35625 }, { "epoch": 2.4208452235358067, "grad_norm": 1.455335259437561, "learning_rate": 0.0006975217420845223, "loss": 3.5836, "step": 35630 }, { "epoch": 2.4211849436064683, "grad_norm": 1.3768136501312256, "learning_rate": 0.0006974792770756897, "loss": 3.6832, "step": 35635 }, { "epoch": 2.42152466367713, "grad_norm": 1.2696654796600342, "learning_rate": 0.000697436812066857, "loss": 3.4281, "step": 35640 }, { "epoch": 2.421864383747792, "grad_norm": 1.1138136386871338, "learning_rate": 0.0006973943470580242, "loss": 3.4965, "step": 35645 }, { "epoch": 2.4222041038184536, "grad_norm": 1.2200162410736084, "learning_rate": 0.0006973518820491915, "loss": 3.4063, "step": 35650 }, { "epoch": 2.4225438238891153, "grad_norm": 1.2690263986587524, "learning_rate": 0.0006973094170403588, "loss": 3.3312, "step": 35655 }, { "epoch": 2.4228835439597773, "grad_norm": 1.338155746459961, "learning_rate": 0.000697266952031526, "loss": 3.7295, "step": 35660 }, { "epoch": 2.423223264030439, "grad_norm": 1.0465713739395142, "learning_rate": 0.0006972244870226934, "loss": 3.5439, "step": 35665 }, { "epoch": 2.4235629841011006, "grad_norm": 1.2971433401107788, "learning_rate": 0.0006971820220138606, "loss": 3.6764, "step": 35670 }, { "epoch": 2.4239027041717627, "grad_norm": 1.8012278079986572, "learning_rate": 0.0006971395570050279, "loss": 3.4585, "step": 35675 }, { "epoch": 2.4242424242424243, "grad_norm": 1.4334825277328491, "learning_rate": 0.0006970970919961952, "loss": 3.5296, "step": 35680 }, { "epoch": 2.424582144313086, "grad_norm": 1.3466120958328247, "learning_rate": 0.0006970546269873624, "loss": 3.5763, "step": 35685 }, { "epoch": 2.424921864383748, "grad_norm": 1.3192206621170044, "learning_rate": 0.0006970121619785297, "loss": 3.5441, "step": 35690 }, { "epoch": 2.4252615844544096, "grad_norm": 1.4083997011184692, "learning_rate": 0.000696969696969697, "loss": 3.3955, "step": 35695 }, { "epoch": 2.4256013045250713, "grad_norm": 1.4720301628112793, "learning_rate": 0.0006969272319608643, "loss": 3.566, "step": 35700 }, { "epoch": 2.4259410245957334, "grad_norm": 1.3444175720214844, "learning_rate": 0.0006968847669520316, "loss": 3.0468, "step": 35705 }, { "epoch": 2.426280744666395, "grad_norm": 1.2220920324325562, "learning_rate": 0.0006968423019431989, "loss": 3.7693, "step": 35710 }, { "epoch": 2.4266204647370566, "grad_norm": 1.100240707397461, "learning_rate": 0.0006967998369343661, "loss": 3.3776, "step": 35715 }, { "epoch": 2.4269601848077182, "grad_norm": 1.3546596765518188, "learning_rate": 0.0006967573719255333, "loss": 3.5598, "step": 35720 }, { "epoch": 2.4272999048783803, "grad_norm": 1.0443830490112305, "learning_rate": 0.0006967149069167007, "loss": 3.3485, "step": 35725 }, { "epoch": 2.427639624949042, "grad_norm": 1.2431402206420898, "learning_rate": 0.0006966724419078679, "loss": 3.7237, "step": 35730 }, { "epoch": 2.4279793450197036, "grad_norm": 1.153991937637329, "learning_rate": 0.0006966299768990352, "loss": 3.4891, "step": 35735 }, { "epoch": 2.4283190650903657, "grad_norm": 1.103986382484436, "learning_rate": 0.0006965875118902026, "loss": 3.5351, "step": 35740 }, { "epoch": 2.4286587851610273, "grad_norm": 1.3857300281524658, "learning_rate": 0.0006965450468813698, "loss": 3.3204, "step": 35745 }, { "epoch": 2.428998505231689, "grad_norm": 1.4103022813796997, "learning_rate": 0.000696502581872537, "loss": 3.5977, "step": 35750 }, { "epoch": 2.429338225302351, "grad_norm": 1.5275951623916626, "learning_rate": 0.0006964601168637044, "loss": 3.4664, "step": 35755 }, { "epoch": 2.4296779453730126, "grad_norm": 1.1128305196762085, "learning_rate": 0.0006964176518548716, "loss": 3.8047, "step": 35760 }, { "epoch": 2.4300176654436743, "grad_norm": 1.5093801021575928, "learning_rate": 0.0006963751868460388, "loss": 3.6388, "step": 35765 }, { "epoch": 2.4303573855143363, "grad_norm": 1.2318612337112427, "learning_rate": 0.0006963327218372062, "loss": 3.5948, "step": 35770 }, { "epoch": 2.430697105584998, "grad_norm": 1.3451224565505981, "learning_rate": 0.0006962902568283735, "loss": 3.6014, "step": 35775 }, { "epoch": 2.4310368256556596, "grad_norm": 1.0374201536178589, "learning_rate": 0.0006962477918195407, "loss": 3.1399, "step": 35780 }, { "epoch": 2.4313765457263217, "grad_norm": 1.3840405941009521, "learning_rate": 0.000696205326810708, "loss": 3.6231, "step": 35785 }, { "epoch": 2.4317162657969833, "grad_norm": 1.1838042736053467, "learning_rate": 0.0006961628618018753, "loss": 3.4884, "step": 35790 }, { "epoch": 2.432055985867645, "grad_norm": 1.3149148225784302, "learning_rate": 0.0006961203967930425, "loss": 3.672, "step": 35795 }, { "epoch": 2.432395705938307, "grad_norm": 1.1007264852523804, "learning_rate": 0.0006960779317842098, "loss": 3.5238, "step": 35800 }, { "epoch": 2.4327354260089686, "grad_norm": 1.5760598182678223, "learning_rate": 0.0006960354667753772, "loss": 3.48, "step": 35805 }, { "epoch": 2.4330751460796303, "grad_norm": 1.1709420680999756, "learning_rate": 0.0006959930017665444, "loss": 3.6751, "step": 35810 }, { "epoch": 2.4334148661502923, "grad_norm": 1.2044578790664673, "learning_rate": 0.0006959505367577117, "loss": 3.4165, "step": 35815 }, { "epoch": 2.433754586220954, "grad_norm": 0.9583981037139893, "learning_rate": 0.0006959080717488789, "loss": 3.776, "step": 35820 }, { "epoch": 2.4340943062916156, "grad_norm": 1.2676352262496948, "learning_rate": 0.0006958656067400462, "loss": 3.5624, "step": 35825 }, { "epoch": 2.4344340263622772, "grad_norm": 1.0228285789489746, "learning_rate": 0.0006958231417312135, "loss": 3.4574, "step": 35830 }, { "epoch": 2.4347737464329393, "grad_norm": 1.3922451734542847, "learning_rate": 0.0006957806767223807, "loss": 3.322, "step": 35835 }, { "epoch": 2.435113466503601, "grad_norm": 1.149833083152771, "learning_rate": 0.0006957382117135481, "loss": 3.5789, "step": 35840 }, { "epoch": 2.4354531865742626, "grad_norm": 1.5843697786331177, "learning_rate": 0.0006956957467047154, "loss": 3.4439, "step": 35845 }, { "epoch": 2.4357929066449246, "grad_norm": 1.8942269086837769, "learning_rate": 0.0006956532816958826, "loss": 3.418, "step": 35850 }, { "epoch": 2.4361326267155863, "grad_norm": 1.2675694227218628, "learning_rate": 0.0006956108166870498, "loss": 3.6298, "step": 35855 }, { "epoch": 2.436472346786248, "grad_norm": 1.5381340980529785, "learning_rate": 0.0006955683516782172, "loss": 3.6154, "step": 35860 }, { "epoch": 2.43681206685691, "grad_norm": 1.3548442125320435, "learning_rate": 0.0006955258866693844, "loss": 3.3965, "step": 35865 }, { "epoch": 2.4371517869275716, "grad_norm": 1.1700677871704102, "learning_rate": 0.0006954834216605516, "loss": 3.4376, "step": 35870 }, { "epoch": 2.4374915069982332, "grad_norm": 1.3571609258651733, "learning_rate": 0.0006954409566517191, "loss": 3.6292, "step": 35875 }, { "epoch": 2.4378312270688953, "grad_norm": 1.3518925905227661, "learning_rate": 0.0006953984916428863, "loss": 3.57, "step": 35880 }, { "epoch": 2.438170947139557, "grad_norm": 1.5263404846191406, "learning_rate": 0.0006953560266340535, "loss": 3.3605, "step": 35885 }, { "epoch": 2.4385106672102186, "grad_norm": 1.5448472499847412, "learning_rate": 0.0006953135616252209, "loss": 3.475, "step": 35890 }, { "epoch": 2.4388503872808807, "grad_norm": 1.1947009563446045, "learning_rate": 0.0006952710966163881, "loss": 3.6026, "step": 35895 }, { "epoch": 2.4391901073515423, "grad_norm": 1.753350853919983, "learning_rate": 0.0006952286316075553, "loss": 3.3952, "step": 35900 }, { "epoch": 2.439529827422204, "grad_norm": 1.6873669624328613, "learning_rate": 0.0006951861665987226, "loss": 3.4123, "step": 35905 }, { "epoch": 2.439869547492866, "grad_norm": 0.9727484583854675, "learning_rate": 0.00069514370158989, "loss": 3.7255, "step": 35910 }, { "epoch": 2.4402092675635276, "grad_norm": 1.324242353439331, "learning_rate": 0.0006951012365810572, "loss": 3.5085, "step": 35915 }, { "epoch": 2.4405489876341893, "grad_norm": 1.6028605699539185, "learning_rate": 0.0006950587715722245, "loss": 3.4041, "step": 35920 }, { "epoch": 2.4408887077048513, "grad_norm": 1.0806865692138672, "learning_rate": 0.0006950163065633918, "loss": 3.3048, "step": 35925 }, { "epoch": 2.441228427775513, "grad_norm": 1.3808786869049072, "learning_rate": 0.000694973841554559, "loss": 3.6853, "step": 35930 }, { "epoch": 2.4415681478461746, "grad_norm": 1.2856910228729248, "learning_rate": 0.0006949313765457263, "loss": 3.7896, "step": 35935 }, { "epoch": 2.4419078679168367, "grad_norm": 1.300260066986084, "learning_rate": 0.0006948889115368936, "loss": 3.4927, "step": 35940 }, { "epoch": 2.4422475879874983, "grad_norm": 1.3568270206451416, "learning_rate": 0.0006948464465280609, "loss": 3.5607, "step": 35945 }, { "epoch": 2.44258730805816, "grad_norm": 1.349130392074585, "learning_rate": 0.0006948039815192282, "loss": 3.4257, "step": 35950 }, { "epoch": 2.442927028128822, "grad_norm": 1.875653862953186, "learning_rate": 0.0006947615165103954, "loss": 3.7819, "step": 35955 }, { "epoch": 2.4432667481994836, "grad_norm": 1.0605227947235107, "learning_rate": 0.0006947190515015628, "loss": 3.3923, "step": 35960 }, { "epoch": 2.4436064682701453, "grad_norm": 1.1914063692092896, "learning_rate": 0.00069467658649273, "loss": 3.5902, "step": 35965 }, { "epoch": 2.4439461883408073, "grad_norm": 1.36515474319458, "learning_rate": 0.0006946341214838972, "loss": 3.4472, "step": 35970 }, { "epoch": 2.444285908411469, "grad_norm": 1.0451812744140625, "learning_rate": 0.0006945916564750646, "loss": 3.4496, "step": 35975 }, { "epoch": 2.4446256284821306, "grad_norm": 1.2349435091018677, "learning_rate": 0.0006945491914662319, "loss": 3.2544, "step": 35980 }, { "epoch": 2.4449653485527927, "grad_norm": 1.302574872970581, "learning_rate": 0.0006945067264573991, "loss": 3.1777, "step": 35985 }, { "epoch": 2.4453050686234543, "grad_norm": 1.539341926574707, "learning_rate": 0.0006944642614485665, "loss": 3.7281, "step": 35990 }, { "epoch": 2.445644788694116, "grad_norm": 1.5221952199935913, "learning_rate": 0.0006944217964397337, "loss": 3.5239, "step": 35995 }, { "epoch": 2.445984508764778, "grad_norm": 1.3768815994262695, "learning_rate": 0.0006943793314309009, "loss": 3.6631, "step": 36000 }, { "epoch": 2.4463242288354397, "grad_norm": 1.4753042459487915, "learning_rate": 0.0006943368664220682, "loss": 3.6667, "step": 36005 }, { "epoch": 2.4466639489061013, "grad_norm": 1.3092041015625, "learning_rate": 0.0006942944014132355, "loss": 3.5771, "step": 36010 }, { "epoch": 2.4470036689767634, "grad_norm": 1.283613920211792, "learning_rate": 0.0006942519364044028, "loss": 3.5346, "step": 36015 }, { "epoch": 2.447343389047425, "grad_norm": 1.5693796873092651, "learning_rate": 0.0006942094713955701, "loss": 3.2903, "step": 36020 }, { "epoch": 2.4476831091180866, "grad_norm": 2.117438316345215, "learning_rate": 0.0006941670063867374, "loss": 3.6154, "step": 36025 }, { "epoch": 2.4480228291887487, "grad_norm": 1.2273346185684204, "learning_rate": 0.0006941245413779046, "loss": 3.4439, "step": 36030 }, { "epoch": 2.4483625492594103, "grad_norm": 1.2763428688049316, "learning_rate": 0.0006940820763690719, "loss": 3.4976, "step": 36035 }, { "epoch": 2.448702269330072, "grad_norm": 1.4353326559066772, "learning_rate": 0.0006940396113602392, "loss": 3.4611, "step": 36040 }, { "epoch": 2.449041989400734, "grad_norm": 1.1625536680221558, "learning_rate": 0.0006939971463514064, "loss": 3.7248, "step": 36045 }, { "epoch": 2.4493817094713957, "grad_norm": 1.4551938772201538, "learning_rate": 0.0006939546813425738, "loss": 3.6429, "step": 36050 }, { "epoch": 2.4497214295420573, "grad_norm": 1.403239130973816, "learning_rate": 0.000693912216333741, "loss": 3.5496, "step": 36055 }, { "epoch": 2.4500611496127194, "grad_norm": 1.2287176847457886, "learning_rate": 0.0006938697513249083, "loss": 3.5386, "step": 36060 }, { "epoch": 2.450400869683381, "grad_norm": 1.4122438430786133, "learning_rate": 0.0006938272863160756, "loss": 3.5004, "step": 36065 }, { "epoch": 2.4507405897540426, "grad_norm": 1.1945974826812744, "learning_rate": 0.0006937848213072428, "loss": 3.4661, "step": 36070 }, { "epoch": 2.4510803098247043, "grad_norm": 1.3655122518539429, "learning_rate": 0.0006937423562984101, "loss": 3.5731, "step": 36075 }, { "epoch": 2.4514200298953663, "grad_norm": 1.0868828296661377, "learning_rate": 0.0006936998912895774, "loss": 3.659, "step": 36080 }, { "epoch": 2.451759749966028, "grad_norm": 1.1764707565307617, "learning_rate": 0.0006936574262807447, "loss": 3.6991, "step": 36085 }, { "epoch": 2.4520994700366896, "grad_norm": 1.662514567375183, "learning_rate": 0.000693614961271912, "loss": 3.3859, "step": 36090 }, { "epoch": 2.4524391901073517, "grad_norm": 1.4481440782546997, "learning_rate": 0.0006935724962630793, "loss": 3.4394, "step": 36095 }, { "epoch": 2.4527789101780133, "grad_norm": 1.676992654800415, "learning_rate": 0.0006935300312542465, "loss": 3.3501, "step": 36100 }, { "epoch": 2.453118630248675, "grad_norm": 1.5484057664871216, "learning_rate": 0.0006934875662454137, "loss": 3.2982, "step": 36105 }, { "epoch": 2.453458350319337, "grad_norm": 1.2308188676834106, "learning_rate": 0.0006934451012365811, "loss": 3.6929, "step": 36110 }, { "epoch": 2.4537980703899986, "grad_norm": 1.1103712320327759, "learning_rate": 0.0006934026362277483, "loss": 3.6829, "step": 36115 }, { "epoch": 2.4541377904606603, "grad_norm": 1.6322135925292969, "learning_rate": 0.0006933601712189156, "loss": 3.6035, "step": 36120 }, { "epoch": 2.4544775105313223, "grad_norm": 2.852173089981079, "learning_rate": 0.000693317706210083, "loss": 3.4521, "step": 36125 }, { "epoch": 2.454817230601984, "grad_norm": 3.492353916168213, "learning_rate": 0.0006932752412012502, "loss": 3.5452, "step": 36130 }, { "epoch": 2.4551569506726456, "grad_norm": 1.2515127658843994, "learning_rate": 0.0006932327761924174, "loss": 3.6463, "step": 36135 }, { "epoch": 2.4554966707433077, "grad_norm": 1.2777855396270752, "learning_rate": 0.0006931903111835848, "loss": 3.2558, "step": 36140 }, { "epoch": 2.4558363908139693, "grad_norm": 1.476940393447876, "learning_rate": 0.000693147846174752, "loss": 3.789, "step": 36145 }, { "epoch": 2.456176110884631, "grad_norm": 1.3309577703475952, "learning_rate": 0.0006931053811659192, "loss": 3.5606, "step": 36150 }, { "epoch": 2.456515830955293, "grad_norm": 1.2939788103103638, "learning_rate": 0.0006930629161570866, "loss": 3.661, "step": 36155 }, { "epoch": 2.4568555510259547, "grad_norm": 1.700923204421997, "learning_rate": 0.0006930204511482539, "loss": 3.5567, "step": 36160 }, { "epoch": 2.4571952710966163, "grad_norm": 1.2673894166946411, "learning_rate": 0.0006929779861394211, "loss": 3.7817, "step": 36165 }, { "epoch": 2.457534991167278, "grad_norm": 2.4313368797302246, "learning_rate": 0.0006929355211305884, "loss": 3.3274, "step": 36170 }, { "epoch": 2.45787471123794, "grad_norm": 1.2577656507492065, "learning_rate": 0.0006928930561217557, "loss": 3.5698, "step": 36175 }, { "epoch": 2.4582144313086016, "grad_norm": 1.2095625400543213, "learning_rate": 0.0006928505911129229, "loss": 3.5156, "step": 36180 }, { "epoch": 2.4585541513792633, "grad_norm": 1.5202481746673584, "learning_rate": 0.0006928081261040902, "loss": 3.5903, "step": 36185 }, { "epoch": 2.4588938714499253, "grad_norm": 1.310943841934204, "learning_rate": 0.0006927656610952576, "loss": 3.6139, "step": 36190 }, { "epoch": 2.459233591520587, "grad_norm": 1.2660731077194214, "learning_rate": 0.0006927231960864248, "loss": 3.6868, "step": 36195 }, { "epoch": 2.4595733115912486, "grad_norm": 1.3519513607025146, "learning_rate": 0.0006926807310775921, "loss": 3.3882, "step": 36200 }, { "epoch": 2.4599130316619107, "grad_norm": 2.6755003929138184, "learning_rate": 0.0006926382660687593, "loss": 3.8469, "step": 36205 }, { "epoch": 2.4602527517325723, "grad_norm": 1.0861200094223022, "learning_rate": 0.0006925958010599266, "loss": 3.6097, "step": 36210 }, { "epoch": 2.460592471803234, "grad_norm": 1.4648901224136353, "learning_rate": 0.0006925533360510939, "loss": 3.363, "step": 36215 }, { "epoch": 2.460932191873896, "grad_norm": 1.6322166919708252, "learning_rate": 0.0006925108710422611, "loss": 3.788, "step": 36220 }, { "epoch": 2.4612719119445576, "grad_norm": 1.5524550676345825, "learning_rate": 0.0006924684060334285, "loss": 3.2749, "step": 36225 }, { "epoch": 2.4616116320152193, "grad_norm": 1.2477154731750488, "learning_rate": 0.0006924259410245958, "loss": 3.6737, "step": 36230 }, { "epoch": 2.4619513520858813, "grad_norm": 1.259240984916687, "learning_rate": 0.000692383476015763, "loss": 3.229, "step": 36235 }, { "epoch": 2.462291072156543, "grad_norm": 1.4542877674102783, "learning_rate": 0.0006923410110069302, "loss": 3.4665, "step": 36240 }, { "epoch": 2.4626307922272046, "grad_norm": 1.3526105880737305, "learning_rate": 0.0006922985459980976, "loss": 3.5316, "step": 36245 }, { "epoch": 2.4629705122978667, "grad_norm": 1.3266551494598389, "learning_rate": 0.0006922560809892648, "loss": 3.4138, "step": 36250 }, { "epoch": 2.4633102323685283, "grad_norm": 1.5158491134643555, "learning_rate": 0.000692213615980432, "loss": 3.3421, "step": 36255 }, { "epoch": 2.46364995243919, "grad_norm": 1.4536724090576172, "learning_rate": 0.0006921711509715995, "loss": 3.4583, "step": 36260 }, { "epoch": 2.463989672509852, "grad_norm": 1.462844729423523, "learning_rate": 0.0006921286859627667, "loss": 3.4652, "step": 36265 }, { "epoch": 2.4643293925805136, "grad_norm": 1.200499415397644, "learning_rate": 0.0006920862209539339, "loss": 3.73, "step": 36270 }, { "epoch": 2.4646691126511753, "grad_norm": 1.1750056743621826, "learning_rate": 0.0006920437559451013, "loss": 3.5028, "step": 36275 }, { "epoch": 2.4650088327218374, "grad_norm": 1.35722017288208, "learning_rate": 0.0006920012909362685, "loss": 3.5036, "step": 36280 }, { "epoch": 2.465348552792499, "grad_norm": 1.0482386350631714, "learning_rate": 0.0006919588259274357, "loss": 3.5676, "step": 36285 }, { "epoch": 2.4656882728631606, "grad_norm": 1.5626307725906372, "learning_rate": 0.0006919163609186032, "loss": 3.6722, "step": 36290 }, { "epoch": 2.4660279929338227, "grad_norm": 1.2044204473495483, "learning_rate": 0.0006918738959097704, "loss": 3.2448, "step": 36295 }, { "epoch": 2.4663677130044843, "grad_norm": 1.432928204536438, "learning_rate": 0.0006918314309009377, "loss": 3.4874, "step": 36300 }, { "epoch": 2.466707433075146, "grad_norm": 0.9969077706336975, "learning_rate": 0.0006917889658921049, "loss": 3.4476, "step": 36305 }, { "epoch": 2.467047153145808, "grad_norm": 1.5000954866409302, "learning_rate": 0.0006917465008832722, "loss": 3.6254, "step": 36310 }, { "epoch": 2.4673868732164697, "grad_norm": 1.2726449966430664, "learning_rate": 0.0006917040358744395, "loss": 3.8185, "step": 36315 }, { "epoch": 2.4677265932871313, "grad_norm": 0.9837005138397217, "learning_rate": 0.0006916615708656067, "loss": 3.6476, "step": 36320 }, { "epoch": 2.4680663133577934, "grad_norm": 1.6340656280517578, "learning_rate": 0.0006916191058567741, "loss": 3.6097, "step": 36325 }, { "epoch": 2.468406033428455, "grad_norm": 1.3037447929382324, "learning_rate": 0.0006915766408479414, "loss": 3.5319, "step": 36330 }, { "epoch": 2.4687457534991166, "grad_norm": 1.1499453783035278, "learning_rate": 0.0006915341758391086, "loss": 3.7244, "step": 36335 }, { "epoch": 2.4690854735697787, "grad_norm": 1.1472022533416748, "learning_rate": 0.0006914917108302758, "loss": 3.2739, "step": 36340 }, { "epoch": 2.4694251936404403, "grad_norm": 1.6007462739944458, "learning_rate": 0.0006914492458214432, "loss": 3.5561, "step": 36345 }, { "epoch": 2.469764913711102, "grad_norm": 1.3912361860275269, "learning_rate": 0.0006914067808126104, "loss": 3.6587, "step": 36350 }, { "epoch": 2.470104633781764, "grad_norm": 2.0383052825927734, "learning_rate": 0.0006913643158037776, "loss": 3.3639, "step": 36355 }, { "epoch": 2.4704443538524257, "grad_norm": 1.3747413158416748, "learning_rate": 0.0006913218507949451, "loss": 3.4343, "step": 36360 }, { "epoch": 2.4707840739230873, "grad_norm": 1.2142083644866943, "learning_rate": 0.0006912793857861123, "loss": 3.7368, "step": 36365 }, { "epoch": 2.4711237939937494, "grad_norm": 1.1908347606658936, "learning_rate": 0.0006912369207772795, "loss": 3.5532, "step": 36370 }, { "epoch": 2.471463514064411, "grad_norm": 1.3628897666931152, "learning_rate": 0.0006911944557684469, "loss": 3.4439, "step": 36375 }, { "epoch": 2.4718032341350726, "grad_norm": 1.546412467956543, "learning_rate": 0.0006911519907596141, "loss": 3.6932, "step": 36380 }, { "epoch": 2.4721429542057347, "grad_norm": 1.2791039943695068, "learning_rate": 0.0006911095257507813, "loss": 3.623, "step": 36385 }, { "epoch": 2.4724826742763963, "grad_norm": 1.2833747863769531, "learning_rate": 0.0006910670607419487, "loss": 3.4915, "step": 36390 }, { "epoch": 2.472822394347058, "grad_norm": 1.3410390615463257, "learning_rate": 0.000691024595733116, "loss": 3.6166, "step": 36395 }, { "epoch": 2.47316211441772, "grad_norm": 1.3817615509033203, "learning_rate": 0.0006909821307242832, "loss": 3.4108, "step": 36400 }, { "epoch": 2.4735018344883817, "grad_norm": 1.3036830425262451, "learning_rate": 0.0006909396657154505, "loss": 3.6191, "step": 36405 }, { "epoch": 2.4738415545590433, "grad_norm": 1.3165874481201172, "learning_rate": 0.0006908972007066178, "loss": 3.6956, "step": 36410 }, { "epoch": 2.474181274629705, "grad_norm": 1.502355933189392, "learning_rate": 0.000690854735697785, "loss": 3.5222, "step": 36415 }, { "epoch": 2.474520994700367, "grad_norm": 1.2698863744735718, "learning_rate": 0.0006908122706889523, "loss": 3.5292, "step": 36420 }, { "epoch": 2.4748607147710286, "grad_norm": 1.2628329992294312, "learning_rate": 0.0006907698056801196, "loss": 3.4029, "step": 36425 }, { "epoch": 2.4752004348416903, "grad_norm": 1.0810606479644775, "learning_rate": 0.0006907273406712869, "loss": 3.5639, "step": 36430 }, { "epoch": 2.4755401549123524, "grad_norm": 1.3220642805099487, "learning_rate": 0.0006906848756624542, "loss": 3.6612, "step": 36435 }, { "epoch": 2.475879874983014, "grad_norm": 1.364643931388855, "learning_rate": 0.0006906424106536215, "loss": 3.4725, "step": 36440 }, { "epoch": 2.4762195950536756, "grad_norm": 1.432445764541626, "learning_rate": 0.0006905999456447887, "loss": 3.7285, "step": 36445 }, { "epoch": 2.4765593151243377, "grad_norm": 1.0067635774612427, "learning_rate": 0.000690557480635956, "loss": 3.6339, "step": 36450 }, { "epoch": 2.4768990351949993, "grad_norm": 1.3493400812149048, "learning_rate": 0.0006905150156271232, "loss": 3.676, "step": 36455 }, { "epoch": 2.477238755265661, "grad_norm": 1.3371556997299194, "learning_rate": 0.0006904725506182905, "loss": 3.5536, "step": 36460 }, { "epoch": 2.477578475336323, "grad_norm": 1.281294822692871, "learning_rate": 0.0006904300856094579, "loss": 3.3912, "step": 36465 }, { "epoch": 2.4779181954069847, "grad_norm": 1.2744958400726318, "learning_rate": 0.0006903876206006251, "loss": 3.5072, "step": 36470 }, { "epoch": 2.4782579154776463, "grad_norm": 1.153552770614624, "learning_rate": 0.0006903451555917924, "loss": 3.656, "step": 36475 }, { "epoch": 2.4785976355483084, "grad_norm": 1.3060870170593262, "learning_rate": 0.0006903026905829597, "loss": 3.3644, "step": 36480 }, { "epoch": 2.47893735561897, "grad_norm": 1.5345773696899414, "learning_rate": 0.0006902602255741269, "loss": 3.3881, "step": 36485 }, { "epoch": 2.4792770756896316, "grad_norm": 1.5224255323410034, "learning_rate": 0.0006902177605652941, "loss": 3.4272, "step": 36490 }, { "epoch": 2.4796167957602937, "grad_norm": 1.2430273294448853, "learning_rate": 0.0006901752955564615, "loss": 3.6665, "step": 36495 }, { "epoch": 2.4799565158309553, "grad_norm": 1.398357629776001, "learning_rate": 0.0006901328305476288, "loss": 3.3141, "step": 36500 }, { "epoch": 2.480296235901617, "grad_norm": 1.05787193775177, "learning_rate": 0.000690090365538796, "loss": 3.625, "step": 36505 }, { "epoch": 2.4806359559722786, "grad_norm": 1.0348944664001465, "learning_rate": 0.0006900479005299634, "loss": 3.4891, "step": 36510 }, { "epoch": 2.4809756760429407, "grad_norm": 1.378474235534668, "learning_rate": 0.0006900054355211306, "loss": 3.3222, "step": 36515 }, { "epoch": 2.4813153961136023, "grad_norm": 1.7380496263504028, "learning_rate": 0.0006899629705122978, "loss": 3.4647, "step": 36520 }, { "epoch": 2.481655116184264, "grad_norm": 1.3712444305419922, "learning_rate": 0.0006899205055034652, "loss": 3.7026, "step": 36525 }, { "epoch": 2.481994836254926, "grad_norm": 1.3868649005889893, "learning_rate": 0.0006898780404946324, "loss": 3.5578, "step": 36530 }, { "epoch": 2.4823345563255876, "grad_norm": 1.1532150506973267, "learning_rate": 0.0006898355754857997, "loss": 3.6336, "step": 36535 }, { "epoch": 2.4826742763962493, "grad_norm": 1.0581095218658447, "learning_rate": 0.000689793110476967, "loss": 3.3091, "step": 36540 }, { "epoch": 2.4830139964669113, "grad_norm": 1.3119961023330688, "learning_rate": 0.0006897506454681343, "loss": 3.4801, "step": 36545 }, { "epoch": 2.483353716537573, "grad_norm": 1.6458885669708252, "learning_rate": 0.0006897081804593015, "loss": 3.4547, "step": 36550 }, { "epoch": 2.4836934366082346, "grad_norm": 1.334623098373413, "learning_rate": 0.0006896657154504688, "loss": 3.4186, "step": 36555 }, { "epoch": 2.4840331566788967, "grad_norm": 1.4424247741699219, "learning_rate": 0.0006896232504416361, "loss": 3.3293, "step": 36560 }, { "epoch": 2.4843728767495583, "grad_norm": 1.4977364540100098, "learning_rate": 0.0006895807854328033, "loss": 3.3717, "step": 36565 }, { "epoch": 2.48471259682022, "grad_norm": 1.3742716312408447, "learning_rate": 0.0006895383204239707, "loss": 3.2721, "step": 36570 }, { "epoch": 2.485052316890882, "grad_norm": 1.320192575454712, "learning_rate": 0.000689495855415138, "loss": 3.7774, "step": 36575 }, { "epoch": 2.4853920369615436, "grad_norm": 1.5277328491210938, "learning_rate": 0.0006894533904063052, "loss": 3.7707, "step": 36580 }, { "epoch": 2.4857317570322053, "grad_norm": 1.1119384765625, "learning_rate": 0.0006894109253974725, "loss": 3.526, "step": 36585 }, { "epoch": 2.4860714771028674, "grad_norm": 1.2679169178009033, "learning_rate": 0.0006893684603886397, "loss": 3.6058, "step": 36590 }, { "epoch": 2.486411197173529, "grad_norm": 1.5570552349090576, "learning_rate": 0.000689325995379807, "loss": 3.2898, "step": 36595 }, { "epoch": 2.4867509172441906, "grad_norm": 1.2817565202713013, "learning_rate": 0.0006892835303709743, "loss": 3.6385, "step": 36600 }, { "epoch": 2.4870906373148527, "grad_norm": 1.3225024938583374, "learning_rate": 0.0006892410653621416, "loss": 3.3866, "step": 36605 }, { "epoch": 2.4874303573855143, "grad_norm": 1.350062608718872, "learning_rate": 0.0006891986003533089, "loss": 3.6739, "step": 36610 }, { "epoch": 2.487770077456176, "grad_norm": 1.1837927103042603, "learning_rate": 0.0006891561353444762, "loss": 3.6993, "step": 36615 }, { "epoch": 2.488109797526838, "grad_norm": 1.5913565158843994, "learning_rate": 0.0006891136703356434, "loss": 3.5087, "step": 36620 }, { "epoch": 2.4884495175974997, "grad_norm": 1.2385956048965454, "learning_rate": 0.0006890712053268107, "loss": 3.667, "step": 36625 }, { "epoch": 2.4887892376681613, "grad_norm": 1.3884602785110474, "learning_rate": 0.000689028740317978, "loss": 3.3868, "step": 36630 }, { "epoch": 2.4891289577388234, "grad_norm": 2.078152656555176, "learning_rate": 0.0006889862753091452, "loss": 3.6229, "step": 36635 }, { "epoch": 2.489468677809485, "grad_norm": 1.3748376369476318, "learning_rate": 0.0006889438103003127, "loss": 3.5288, "step": 36640 }, { "epoch": 2.4898083978801466, "grad_norm": 1.1825480461120605, "learning_rate": 0.0006889013452914799, "loss": 3.6342, "step": 36645 }, { "epoch": 2.4901481179508087, "grad_norm": 1.23631751537323, "learning_rate": 0.0006888588802826471, "loss": 3.4537, "step": 36650 }, { "epoch": 2.4904878380214703, "grad_norm": 1.310520887374878, "learning_rate": 0.0006888164152738144, "loss": 3.5589, "step": 36655 }, { "epoch": 2.490827558092132, "grad_norm": 1.351293683052063, "learning_rate": 0.0006887739502649817, "loss": 3.2449, "step": 36660 }, { "epoch": 2.491167278162794, "grad_norm": 1.3320021629333496, "learning_rate": 0.0006887314852561489, "loss": 3.3445, "step": 36665 }, { "epoch": 2.4915069982334557, "grad_norm": 1.214336633682251, "learning_rate": 0.0006886890202473162, "loss": 3.3807, "step": 36670 }, { "epoch": 2.4918467183041173, "grad_norm": 1.3149924278259277, "learning_rate": 0.0006886465552384836, "loss": 3.3874, "step": 36675 }, { "epoch": 2.4921864383747794, "grad_norm": 1.4651161432266235, "learning_rate": 0.0006886040902296508, "loss": 3.4242, "step": 36680 }, { "epoch": 2.492526158445441, "grad_norm": 1.3727378845214844, "learning_rate": 0.0006885616252208181, "loss": 3.3625, "step": 36685 }, { "epoch": 2.4928658785161026, "grad_norm": 1.4545278549194336, "learning_rate": 0.0006885191602119853, "loss": 3.2946, "step": 36690 }, { "epoch": 2.4932055985867647, "grad_norm": 1.1537604331970215, "learning_rate": 0.0006884766952031526, "loss": 3.6353, "step": 36695 }, { "epoch": 2.4935453186574263, "grad_norm": 1.5492147207260132, "learning_rate": 0.0006884342301943199, "loss": 3.5778, "step": 36700 }, { "epoch": 2.493885038728088, "grad_norm": 1.6189591884613037, "learning_rate": 0.0006883917651854871, "loss": 3.3111, "step": 36705 }, { "epoch": 2.49422475879875, "grad_norm": 1.2783452272415161, "learning_rate": 0.0006883493001766545, "loss": 3.6343, "step": 36710 }, { "epoch": 2.4945644788694117, "grad_norm": 1.0862585306167603, "learning_rate": 0.0006883068351678218, "loss": 3.7953, "step": 36715 }, { "epoch": 2.4949041989400733, "grad_norm": 1.312441110610962, "learning_rate": 0.000688264370158989, "loss": 3.4205, "step": 36720 }, { "epoch": 2.4952439190107354, "grad_norm": 1.5226001739501953, "learning_rate": 0.0006882219051501563, "loss": 3.23, "step": 36725 }, { "epoch": 2.495583639081397, "grad_norm": 3.1545372009277344, "learning_rate": 0.0006881794401413236, "loss": 3.5633, "step": 36730 }, { "epoch": 2.4959233591520587, "grad_norm": 1.7381364107131958, "learning_rate": 0.0006881369751324908, "loss": 3.8245, "step": 36735 }, { "epoch": 2.4962630792227207, "grad_norm": 1.4192801713943481, "learning_rate": 0.000688094510123658, "loss": 3.3958, "step": 36740 }, { "epoch": 2.4966027992933824, "grad_norm": 1.4097392559051514, "learning_rate": 0.0006880520451148255, "loss": 3.3972, "step": 36745 }, { "epoch": 2.496942519364044, "grad_norm": 1.1453427076339722, "learning_rate": 0.0006880095801059927, "loss": 3.6506, "step": 36750 }, { "epoch": 2.4972822394347056, "grad_norm": 1.3279649019241333, "learning_rate": 0.0006879671150971599, "loss": 3.303, "step": 36755 }, { "epoch": 2.4976219595053677, "grad_norm": 1.129023551940918, "learning_rate": 0.0006879246500883273, "loss": 3.3966, "step": 36760 }, { "epoch": 2.4979616795760293, "grad_norm": 1.2506556510925293, "learning_rate": 0.0006878821850794945, "loss": 3.4563, "step": 36765 }, { "epoch": 2.498301399646691, "grad_norm": 1.2881726026535034, "learning_rate": 0.0006878397200706617, "loss": 3.5593, "step": 36770 }, { "epoch": 2.498641119717353, "grad_norm": 1.1615411043167114, "learning_rate": 0.0006877972550618292, "loss": 3.318, "step": 36775 }, { "epoch": 2.4989808397880147, "grad_norm": 1.1629003286361694, "learning_rate": 0.0006877547900529964, "loss": 3.3258, "step": 36780 }, { "epoch": 2.4993205598586763, "grad_norm": 1.412526249885559, "learning_rate": 0.0006877123250441636, "loss": 3.5573, "step": 36785 }, { "epoch": 2.4996602799293384, "grad_norm": 1.2766958475112915, "learning_rate": 0.000687669860035331, "loss": 3.4406, "step": 36790 }, { "epoch": 2.5, "grad_norm": 1.3329180479049683, "learning_rate": 0.0006876273950264982, "loss": 3.4596, "step": 36795 }, { "epoch": 2.5003397200706616, "grad_norm": 1.5406343936920166, "learning_rate": 0.0006875849300176654, "loss": 3.6617, "step": 36800 }, { "epoch": 2.5006794401413237, "grad_norm": 1.2632948160171509, "learning_rate": 0.0006875424650088327, "loss": 3.736, "step": 36805 }, { "epoch": 2.5010191602119853, "grad_norm": 1.8924286365509033, "learning_rate": 0.0006875, "loss": 3.6648, "step": 36810 }, { "epoch": 2.501358880282647, "grad_norm": 1.2328306436538696, "learning_rate": 0.0006874575349911673, "loss": 3.3647, "step": 36815 }, { "epoch": 2.5016986003533086, "grad_norm": 1.3719329833984375, "learning_rate": 0.0006874150699823346, "loss": 3.6785, "step": 36820 }, { "epoch": 2.5020383204239707, "grad_norm": 1.2601732015609741, "learning_rate": 0.0006873726049735019, "loss": 3.4864, "step": 36825 }, { "epoch": 2.5023780404946323, "grad_norm": 1.4852745532989502, "learning_rate": 0.0006873301399646691, "loss": 3.5428, "step": 36830 }, { "epoch": 2.502717760565294, "grad_norm": 1.470140814781189, "learning_rate": 0.0006872876749558364, "loss": 3.5026, "step": 36835 }, { "epoch": 2.503057480635956, "grad_norm": 1.0581393241882324, "learning_rate": 0.0006872452099470036, "loss": 3.6352, "step": 36840 }, { "epoch": 2.5033972007066176, "grad_norm": 1.9333480596542358, "learning_rate": 0.0006872027449381709, "loss": 3.5459, "step": 36845 }, { "epoch": 2.5037369207772793, "grad_norm": 1.1744228601455688, "learning_rate": 0.0006871602799293383, "loss": 3.4819, "step": 36850 }, { "epoch": 2.5040766408479413, "grad_norm": 1.7078845500946045, "learning_rate": 0.0006871178149205055, "loss": 3.3268, "step": 36855 }, { "epoch": 2.504416360918603, "grad_norm": 1.381579041481018, "learning_rate": 0.0006870753499116728, "loss": 3.6923, "step": 36860 }, { "epoch": 2.5047560809892646, "grad_norm": 1.5024755001068115, "learning_rate": 0.0006870328849028401, "loss": 3.6572, "step": 36865 }, { "epoch": 2.5050958010599267, "grad_norm": 1.1732993125915527, "learning_rate": 0.0006869904198940073, "loss": 3.2999, "step": 36870 }, { "epoch": 2.5054355211305883, "grad_norm": 1.3349395990371704, "learning_rate": 0.0006869479548851745, "loss": 3.3671, "step": 36875 }, { "epoch": 2.50577524120125, "grad_norm": 1.0876657962799072, "learning_rate": 0.000686905489876342, "loss": 3.4749, "step": 36880 }, { "epoch": 2.506114961271912, "grad_norm": 1.342054009437561, "learning_rate": 0.0006868630248675092, "loss": 3.6002, "step": 36885 }, { "epoch": 2.5064546813425737, "grad_norm": 1.770635724067688, "learning_rate": 0.0006868205598586764, "loss": 3.4676, "step": 36890 }, { "epoch": 2.5067944014132353, "grad_norm": 1.3643286228179932, "learning_rate": 0.0006867780948498438, "loss": 3.3121, "step": 36895 }, { "epoch": 2.5071341214838974, "grad_norm": 1.6047084331512451, "learning_rate": 0.000686735629841011, "loss": 3.3545, "step": 36900 }, { "epoch": 2.507473841554559, "grad_norm": 1.1574523448944092, "learning_rate": 0.0006866931648321782, "loss": 3.5579, "step": 36905 }, { "epoch": 2.5078135616252206, "grad_norm": 1.014357089996338, "learning_rate": 0.0006866506998233456, "loss": 3.4551, "step": 36910 }, { "epoch": 2.5081532816958827, "grad_norm": 1.5939226150512695, "learning_rate": 0.0006866082348145129, "loss": 3.5365, "step": 36915 }, { "epoch": 2.5084930017665443, "grad_norm": 1.1182596683502197, "learning_rate": 0.0006865657698056801, "loss": 3.3666, "step": 36920 }, { "epoch": 2.508832721837206, "grad_norm": 1.3735445737838745, "learning_rate": 0.0006865233047968475, "loss": 3.6964, "step": 36925 }, { "epoch": 2.509172441907868, "grad_norm": 1.1331936120986938, "learning_rate": 0.0006864808397880147, "loss": 3.3378, "step": 36930 }, { "epoch": 2.5095121619785297, "grad_norm": 1.5684138536453247, "learning_rate": 0.0006864383747791819, "loss": 3.7014, "step": 36935 }, { "epoch": 2.5098518820491913, "grad_norm": 1.1690208911895752, "learning_rate": 0.0006863959097703492, "loss": 3.6612, "step": 36940 }, { "epoch": 2.5101916021198534, "grad_norm": 1.9179099798202515, "learning_rate": 0.0006863534447615165, "loss": 3.6254, "step": 36945 }, { "epoch": 2.510531322190515, "grad_norm": 1.7496472597122192, "learning_rate": 0.0006863109797526838, "loss": 3.4014, "step": 36950 }, { "epoch": 2.5108710422611766, "grad_norm": 1.4007853269577026, "learning_rate": 0.0006862685147438511, "loss": 3.5976, "step": 36955 }, { "epoch": 2.5112107623318387, "grad_norm": 1.619987964630127, "learning_rate": 0.0006862260497350184, "loss": 3.5152, "step": 36960 }, { "epoch": 2.5115504824025003, "grad_norm": 1.5196226835250854, "learning_rate": 0.0006861835847261856, "loss": 3.5022, "step": 36965 }, { "epoch": 2.511890202473162, "grad_norm": 1.2064507007598877, "learning_rate": 0.0006861411197173529, "loss": 3.5019, "step": 36970 }, { "epoch": 2.512229922543824, "grad_norm": 1.1357744932174683, "learning_rate": 0.0006860986547085201, "loss": 3.7446, "step": 36975 }, { "epoch": 2.5125696426144857, "grad_norm": 1.2684413194656372, "learning_rate": 0.0006860561896996874, "loss": 3.4358, "step": 36980 }, { "epoch": 2.5129093626851473, "grad_norm": 1.449074625968933, "learning_rate": 0.0006860137246908548, "loss": 3.5688, "step": 36985 }, { "epoch": 2.5132490827558094, "grad_norm": 1.317886471748352, "learning_rate": 0.000685971259682022, "loss": 3.845, "step": 36990 }, { "epoch": 2.513588802826471, "grad_norm": 1.2700587511062622, "learning_rate": 0.0006859287946731894, "loss": 3.8317, "step": 36995 }, { "epoch": 2.5139285228971326, "grad_norm": 1.2939103841781616, "learning_rate": 0.0006858863296643566, "loss": 3.5454, "step": 37000 }, { "epoch": 2.5142682429677947, "grad_norm": 3.449033737182617, "learning_rate": 0.0006858438646555238, "loss": 3.8964, "step": 37005 }, { "epoch": 2.5146079630384564, "grad_norm": 1.0735710859298706, "learning_rate": 0.0006858013996466912, "loss": 3.3737, "step": 37010 }, { "epoch": 2.514947683109118, "grad_norm": 1.217881202697754, "learning_rate": 0.0006857589346378584, "loss": 3.5016, "step": 37015 }, { "epoch": 2.51528740317978, "grad_norm": 1.1505094766616821, "learning_rate": 0.0006857164696290257, "loss": 3.3662, "step": 37020 }, { "epoch": 2.5156271232504417, "grad_norm": 1.3988244533538818, "learning_rate": 0.0006856740046201931, "loss": 3.3751, "step": 37025 }, { "epoch": 2.5159668433211033, "grad_norm": 1.0297006368637085, "learning_rate": 0.0006856315396113603, "loss": 3.3641, "step": 37030 }, { "epoch": 2.5163065633917654, "grad_norm": 1.2202695608139038, "learning_rate": 0.0006855890746025275, "loss": 3.4388, "step": 37035 }, { "epoch": 2.516646283462427, "grad_norm": 1.1223148107528687, "learning_rate": 0.0006855466095936948, "loss": 3.5628, "step": 37040 }, { "epoch": 2.5169860035330887, "grad_norm": 1.3899593353271484, "learning_rate": 0.0006855041445848621, "loss": 3.4191, "step": 37045 }, { "epoch": 2.5173257236037507, "grad_norm": 1.4556747674942017, "learning_rate": 0.0006854616795760293, "loss": 3.7032, "step": 37050 }, { "epoch": 2.5176654436744124, "grad_norm": 1.5814201831817627, "learning_rate": 0.0006854192145671967, "loss": 3.609, "step": 37055 }, { "epoch": 2.518005163745074, "grad_norm": 2.639352560043335, "learning_rate": 0.000685376749558364, "loss": 3.3484, "step": 37060 }, { "epoch": 2.518344883815736, "grad_norm": 1.1896936893463135, "learning_rate": 0.0006853342845495312, "loss": 3.6898, "step": 37065 }, { "epoch": 2.5186846038863977, "grad_norm": 1.0390377044677734, "learning_rate": 0.0006852918195406985, "loss": 3.5634, "step": 37070 }, { "epoch": 2.5190243239570593, "grad_norm": 1.1809930801391602, "learning_rate": 0.0006852493545318657, "loss": 3.1865, "step": 37075 }, { "epoch": 2.5193640440277214, "grad_norm": 1.5645149946212769, "learning_rate": 0.000685206889523033, "loss": 3.6374, "step": 37080 }, { "epoch": 2.519703764098383, "grad_norm": 1.418944001197815, "learning_rate": 0.0006851644245142003, "loss": 3.4341, "step": 37085 }, { "epoch": 2.5200434841690447, "grad_norm": 21.677448272705078, "learning_rate": 0.0006851219595053676, "loss": 3.5918, "step": 37090 }, { "epoch": 2.5203832042397067, "grad_norm": 1.7434598207473755, "learning_rate": 0.0006850794944965349, "loss": 3.7427, "step": 37095 }, { "epoch": 2.5207229243103684, "grad_norm": 1.3768932819366455, "learning_rate": 0.0006850370294877022, "loss": 3.4344, "step": 37100 }, { "epoch": 2.52106264438103, "grad_norm": 1.2553117275238037, "learning_rate": 0.0006849945644788694, "loss": 3.2836, "step": 37105 }, { "epoch": 2.521402364451692, "grad_norm": 1.2621283531188965, "learning_rate": 0.0006849520994700367, "loss": 3.4495, "step": 37110 }, { "epoch": 2.5217420845223537, "grad_norm": 2.0370829105377197, "learning_rate": 0.000684909634461204, "loss": 3.598, "step": 37115 }, { "epoch": 2.5220818045930153, "grad_norm": 1.3592642545700073, "learning_rate": 0.0006848671694523712, "loss": 3.642, "step": 37120 }, { "epoch": 2.522421524663677, "grad_norm": 1.4328025579452515, "learning_rate": 0.0006848247044435386, "loss": 3.6771, "step": 37125 }, { "epoch": 2.522761244734339, "grad_norm": 1.7200733423233032, "learning_rate": 0.0006847822394347059, "loss": 3.58, "step": 37130 }, { "epoch": 2.5231009648050007, "grad_norm": 1.4089125394821167, "learning_rate": 0.0006847397744258731, "loss": 3.8226, "step": 37135 }, { "epoch": 2.5234406848756623, "grad_norm": 3.186164140701294, "learning_rate": 0.0006846973094170403, "loss": 3.6939, "step": 37140 }, { "epoch": 2.5237804049463244, "grad_norm": 1.2599388360977173, "learning_rate": 0.0006846548444082077, "loss": 3.5024, "step": 37145 }, { "epoch": 2.524120125016986, "grad_norm": 1.7415597438812256, "learning_rate": 0.0006846123793993749, "loss": 3.6973, "step": 37150 }, { "epoch": 2.5244598450876476, "grad_norm": 1.296648383140564, "learning_rate": 0.0006845699143905421, "loss": 3.581, "step": 37155 }, { "epoch": 2.5247995651583093, "grad_norm": 1.2531391382217407, "learning_rate": 0.0006845274493817096, "loss": 3.4252, "step": 37160 }, { "epoch": 2.5251392852289714, "grad_norm": 1.4252827167510986, "learning_rate": 0.0006844849843728768, "loss": 3.7246, "step": 37165 }, { "epoch": 2.525479005299633, "grad_norm": 1.4999923706054688, "learning_rate": 0.000684442519364044, "loss": 3.4617, "step": 37170 }, { "epoch": 2.5258187253702946, "grad_norm": 1.6667959690093994, "learning_rate": 0.0006844000543552114, "loss": 3.3944, "step": 37175 }, { "epoch": 2.5261584454409567, "grad_norm": 2.7242631912231445, "learning_rate": 0.0006843575893463786, "loss": 3.4865, "step": 37180 }, { "epoch": 2.5264981655116183, "grad_norm": 1.1168785095214844, "learning_rate": 0.0006843151243375458, "loss": 3.4928, "step": 37185 }, { "epoch": 2.52683788558228, "grad_norm": 1.0849090814590454, "learning_rate": 0.0006842726593287131, "loss": 3.4599, "step": 37190 }, { "epoch": 2.527177605652942, "grad_norm": 1.4881234169006348, "learning_rate": 0.0006842301943198805, "loss": 3.7222, "step": 37195 }, { "epoch": 2.5275173257236037, "grad_norm": 1.984581708908081, "learning_rate": 0.0006841877293110477, "loss": 3.3809, "step": 37200 }, { "epoch": 2.5278570457942653, "grad_norm": 1.1450554132461548, "learning_rate": 0.000684145264302215, "loss": 3.6519, "step": 37205 }, { "epoch": 2.5281967658649274, "grad_norm": 1.564503788948059, "learning_rate": 0.0006841027992933823, "loss": 3.4931, "step": 37210 }, { "epoch": 2.528536485935589, "grad_norm": 1.4942710399627686, "learning_rate": 0.0006840603342845495, "loss": 3.3887, "step": 37215 }, { "epoch": 2.5288762060062506, "grad_norm": 1.3782466650009155, "learning_rate": 0.0006840178692757168, "loss": 3.6397, "step": 37220 }, { "epoch": 2.5292159260769127, "grad_norm": 1.2328745126724243, "learning_rate": 0.000683975404266884, "loss": 3.3716, "step": 37225 }, { "epoch": 2.5295556461475743, "grad_norm": 1.447065830230713, "learning_rate": 0.0006839329392580514, "loss": 3.3788, "step": 37230 }, { "epoch": 2.529895366218236, "grad_norm": 1.8768391609191895, "learning_rate": 0.0006838904742492187, "loss": 3.3842, "step": 37235 }, { "epoch": 2.530235086288898, "grad_norm": 1.6562049388885498, "learning_rate": 0.0006838480092403859, "loss": 3.6062, "step": 37240 }, { "epoch": 2.5305748063595597, "grad_norm": 2.0979011058807373, "learning_rate": 0.0006838055442315532, "loss": 3.3825, "step": 37245 }, { "epoch": 2.5309145264302213, "grad_norm": 1.2164126634597778, "learning_rate": 0.0006837630792227205, "loss": 3.4455, "step": 37250 }, { "epoch": 2.5312542465008834, "grad_norm": 1.3419170379638672, "learning_rate": 0.0006837206142138877, "loss": 3.6773, "step": 37255 }, { "epoch": 2.531593966571545, "grad_norm": 1.5111395120620728, "learning_rate": 0.000683678149205055, "loss": 3.6503, "step": 37260 }, { "epoch": 2.5319336866422066, "grad_norm": 2.6463332176208496, "learning_rate": 0.0006836356841962224, "loss": 3.4605, "step": 37265 }, { "epoch": 2.5322734067128687, "grad_norm": 1.580416202545166, "learning_rate": 0.0006835932191873896, "loss": 3.5241, "step": 37270 }, { "epoch": 2.5326131267835303, "grad_norm": 1.3932522535324097, "learning_rate": 0.0006835507541785568, "loss": 3.5406, "step": 37275 }, { "epoch": 2.532952846854192, "grad_norm": 3.6314949989318848, "learning_rate": 0.0006835082891697242, "loss": 3.5198, "step": 37280 }, { "epoch": 2.533292566924854, "grad_norm": 1.7326455116271973, "learning_rate": 0.0006834658241608914, "loss": 3.3949, "step": 37285 }, { "epoch": 2.5336322869955157, "grad_norm": 1.2548497915267944, "learning_rate": 0.0006834233591520586, "loss": 3.5385, "step": 37290 }, { "epoch": 2.5339720070661773, "grad_norm": 1.1515834331512451, "learning_rate": 0.000683380894143226, "loss": 3.3754, "step": 37295 }, { "epoch": 2.5343117271368394, "grad_norm": 1.5951591730117798, "learning_rate": 0.0006833384291343933, "loss": 3.4, "step": 37300 }, { "epoch": 2.534651447207501, "grad_norm": 1.2535436153411865, "learning_rate": 0.0006832959641255605, "loss": 3.6163, "step": 37305 }, { "epoch": 2.5349911672781626, "grad_norm": 1.1988486051559448, "learning_rate": 0.0006832534991167279, "loss": 3.3706, "step": 37310 }, { "epoch": 2.5353308873488247, "grad_norm": 1.7406816482543945, "learning_rate": 0.0006832110341078951, "loss": 3.247, "step": 37315 }, { "epoch": 2.5356706074194864, "grad_norm": 1.1345397233963013, "learning_rate": 0.0006831685690990623, "loss": 3.5752, "step": 37320 }, { "epoch": 2.536010327490148, "grad_norm": 1.346472144126892, "learning_rate": 0.0006831261040902296, "loss": 3.4514, "step": 37325 }, { "epoch": 2.53635004756081, "grad_norm": 1.312490463256836, "learning_rate": 0.0006830836390813969, "loss": 3.6409, "step": 37330 }, { "epoch": 2.5366897676314717, "grad_norm": 1.2469514608383179, "learning_rate": 0.0006830411740725643, "loss": 3.5434, "step": 37335 }, { "epoch": 2.5370294877021333, "grad_norm": 1.886361837387085, "learning_rate": 0.0006829987090637315, "loss": 3.6468, "step": 37340 }, { "epoch": 2.5373692077727954, "grad_norm": 1.5537463426589966, "learning_rate": 0.0006829562440548988, "loss": 3.8297, "step": 37345 }, { "epoch": 2.537708927843457, "grad_norm": 1.2632168531417847, "learning_rate": 0.0006829137790460661, "loss": 3.3062, "step": 37350 }, { "epoch": 2.5380486479141187, "grad_norm": 1.044006109237671, "learning_rate": 0.0006828713140372333, "loss": 3.7265, "step": 37355 }, { "epoch": 2.5383883679847807, "grad_norm": 1.317194938659668, "learning_rate": 0.0006828288490284006, "loss": 3.5454, "step": 37360 }, { "epoch": 2.5387280880554424, "grad_norm": 1.2087032794952393, "learning_rate": 0.0006827948770213345, "loss": 3.4151, "step": 37365 }, { "epoch": 2.539067808126104, "grad_norm": 1.566637396812439, "learning_rate": 0.0006827524120125017, "loss": 3.5259, "step": 37370 }, { "epoch": 2.539407528196766, "grad_norm": 1.5748076438903809, "learning_rate": 0.0006827099470036689, "loss": 3.6171, "step": 37375 }, { "epoch": 2.5397472482674277, "grad_norm": 1.0954149961471558, "learning_rate": 0.0006826674819948363, "loss": 3.4978, "step": 37380 }, { "epoch": 2.5400869683380893, "grad_norm": 1.2551934719085693, "learning_rate": 0.0006826250169860035, "loss": 3.5752, "step": 37385 }, { "epoch": 2.5404266884087514, "grad_norm": 1.999128818511963, "learning_rate": 0.0006825825519771708, "loss": 3.6562, "step": 37390 }, { "epoch": 2.540766408479413, "grad_norm": 1.9774384498596191, "learning_rate": 0.0006825400869683382, "loss": 3.6333, "step": 37395 }, { "epoch": 2.5411061285500747, "grad_norm": 1.4287505149841309, "learning_rate": 0.0006824976219595054, "loss": 3.7791, "step": 37400 }, { "epoch": 2.5414458486207367, "grad_norm": 1.2228450775146484, "learning_rate": 0.0006824551569506726, "loss": 3.2299, "step": 37405 }, { "epoch": 2.5417855686913984, "grad_norm": 1.3263635635375977, "learning_rate": 0.00068241269194184, "loss": 3.567, "step": 37410 }, { "epoch": 2.54212528876206, "grad_norm": 1.2394496202468872, "learning_rate": 0.0006823702269330072, "loss": 3.8365, "step": 37415 }, { "epoch": 2.542465008832722, "grad_norm": 1.6045504808425903, "learning_rate": 0.0006823277619241744, "loss": 3.7959, "step": 37420 }, { "epoch": 2.5428047289033837, "grad_norm": 1.3865423202514648, "learning_rate": 0.0006822852969153418, "loss": 3.6335, "step": 37425 }, { "epoch": 2.5431444489740453, "grad_norm": 1.167803168296814, "learning_rate": 0.0006822428319065091, "loss": 3.6039, "step": 37430 }, { "epoch": 2.5434841690447074, "grad_norm": 1.7410860061645508, "learning_rate": 0.0006822003668976763, "loss": 3.5527, "step": 37435 }, { "epoch": 2.543823889115369, "grad_norm": 1.4510279893875122, "learning_rate": 0.0006821579018888436, "loss": 3.4629, "step": 37440 }, { "epoch": 2.5441636091860307, "grad_norm": 1.458910346031189, "learning_rate": 0.0006821154368800109, "loss": 3.3611, "step": 37445 }, { "epoch": 2.5445033292566928, "grad_norm": 2.5578718185424805, "learning_rate": 0.0006820729718711781, "loss": 3.4617, "step": 37450 }, { "epoch": 2.5448430493273544, "grad_norm": 1.8839722871780396, "learning_rate": 0.0006820305068623454, "loss": 3.4971, "step": 37455 }, { "epoch": 2.545182769398016, "grad_norm": 1.226304531097412, "learning_rate": 0.0006819880418535128, "loss": 3.5525, "step": 37460 }, { "epoch": 2.5455224894686777, "grad_norm": 1.4840372800827026, "learning_rate": 0.00068194557684468, "loss": 3.5916, "step": 37465 }, { "epoch": 2.5458622095393397, "grad_norm": 1.2740674018859863, "learning_rate": 0.0006819031118358473, "loss": 3.6184, "step": 37470 }, { "epoch": 2.5462019296100014, "grad_norm": 3.60670804977417, "learning_rate": 0.0006818606468270145, "loss": 3.4155, "step": 37475 }, { "epoch": 2.546541649680663, "grad_norm": 1.3212717771530151, "learning_rate": 0.0006818266748199485, "loss": 3.3168, "step": 37480 }, { "epoch": 2.546881369751325, "grad_norm": 1.1407380104064941, "learning_rate": 0.0006817842098111157, "loss": 3.7936, "step": 37485 }, { "epoch": 2.5472210898219867, "grad_norm": 1.343122959136963, "learning_rate": 0.0006817417448022829, "loss": 3.7622, "step": 37490 }, { "epoch": 2.5475608098926483, "grad_norm": 1.3026206493377686, "learning_rate": 0.0006816992797934503, "loss": 3.4359, "step": 37495 }, { "epoch": 2.54790052996331, "grad_norm": 1.3581881523132324, "learning_rate": 0.0006816568147846175, "loss": 3.5633, "step": 37500 }, { "epoch": 2.548240250033972, "grad_norm": 1.311859369277954, "learning_rate": 0.0006816143497757847, "loss": 3.703, "step": 37505 }, { "epoch": 2.5485799701046337, "grad_norm": 1.72725248336792, "learning_rate": 0.000681571884766952, "loss": 3.6587, "step": 37510 }, { "epoch": 2.5489196901752953, "grad_norm": 1.2514100074768066, "learning_rate": 0.0006815294197581194, "loss": 3.5471, "step": 37515 }, { "epoch": 2.5492594102459574, "grad_norm": 1.1267051696777344, "learning_rate": 0.0006814869547492866, "loss": 3.4266, "step": 37520 }, { "epoch": 2.549599130316619, "grad_norm": 1.6904842853546143, "learning_rate": 0.0006814444897404539, "loss": 3.7294, "step": 37525 }, { "epoch": 2.5499388503872806, "grad_norm": 1.575927495956421, "learning_rate": 0.0006814020247316212, "loss": 3.5227, "step": 37530 }, { "epoch": 2.5502785704579427, "grad_norm": 1.189525842666626, "learning_rate": 0.0006813595597227884, "loss": 3.6957, "step": 37535 }, { "epoch": 2.5506182905286043, "grad_norm": 1.2547171115875244, "learning_rate": 0.0006813170947139557, "loss": 3.5829, "step": 37540 }, { "epoch": 2.550958010599266, "grad_norm": 1.2317951917648315, "learning_rate": 0.000681274629705123, "loss": 3.518, "step": 37545 }, { "epoch": 2.551297730669928, "grad_norm": 1.4859521389007568, "learning_rate": 0.0006812321646962903, "loss": 3.3143, "step": 37550 }, { "epoch": 2.5516374507405897, "grad_norm": 1.4220468997955322, "learning_rate": 0.0006811896996874576, "loss": 3.2602, "step": 37555 }, { "epoch": 2.5519771708112513, "grad_norm": 1.0694260597229004, "learning_rate": 0.0006811472346786248, "loss": 3.239, "step": 37560 }, { "epoch": 2.5523168908819134, "grad_norm": 1.133461356163025, "learning_rate": 0.0006811047696697921, "loss": 3.4012, "step": 37565 }, { "epoch": 2.552656610952575, "grad_norm": 1.646306037902832, "learning_rate": 0.0006810623046609594, "loss": 3.3508, "step": 37570 }, { "epoch": 2.5529963310232366, "grad_norm": 1.932547926902771, "learning_rate": 0.0006810198396521266, "loss": 3.4187, "step": 37575 }, { "epoch": 2.5533360510938987, "grad_norm": 1.17507803440094, "learning_rate": 0.0006809773746432939, "loss": 3.8236, "step": 37580 }, { "epoch": 2.5536757711645603, "grad_norm": 1.1229819059371948, "learning_rate": 0.0006809349096344613, "loss": 3.1554, "step": 37585 }, { "epoch": 2.554015491235222, "grad_norm": 1.232085108757019, "learning_rate": 0.0006808924446256285, "loss": 3.5429, "step": 37590 }, { "epoch": 2.554355211305884, "grad_norm": 2.6075942516326904, "learning_rate": 0.0006808499796167957, "loss": 3.5127, "step": 37595 }, { "epoch": 2.5546949313765457, "grad_norm": 1.2069438695907593, "learning_rate": 0.0006808075146079631, "loss": 3.2759, "step": 37600 }, { "epoch": 2.5550346514472073, "grad_norm": 2.2847421169281006, "learning_rate": 0.0006807650495991303, "loss": 3.6307, "step": 37605 }, { "epoch": 2.5553743715178694, "grad_norm": 1.6879973411560059, "learning_rate": 0.0006807225845902975, "loss": 3.6171, "step": 37610 }, { "epoch": 2.555714091588531, "grad_norm": 1.8362715244293213, "learning_rate": 0.0006806801195814649, "loss": 3.315, "step": 37615 }, { "epoch": 2.5560538116591927, "grad_norm": 1.1252936124801636, "learning_rate": 0.0006806376545726322, "loss": 3.6767, "step": 37620 }, { "epoch": 2.5563935317298547, "grad_norm": 2.0602941513061523, "learning_rate": 0.0006805951895637994, "loss": 3.7262, "step": 37625 }, { "epoch": 2.5567332518005164, "grad_norm": 2.5844388008117676, "learning_rate": 0.0006805527245549668, "loss": 3.5093, "step": 37630 }, { "epoch": 2.557072971871178, "grad_norm": 1.4027003049850464, "learning_rate": 0.000680510259546134, "loss": 3.6332, "step": 37635 }, { "epoch": 2.55741269194184, "grad_norm": 1.5090121030807495, "learning_rate": 0.0006804677945373012, "loss": 3.4447, "step": 37640 }, { "epoch": 2.5577524120125017, "grad_norm": 3.117579460144043, "learning_rate": 0.0006804253295284685, "loss": 3.5918, "step": 37645 }, { "epoch": 2.5580921320831633, "grad_norm": 1.9480692148208618, "learning_rate": 0.0006803828645196358, "loss": 3.4435, "step": 37650 }, { "epoch": 2.5584318521538254, "grad_norm": 1.1908479928970337, "learning_rate": 0.0006803403995108031, "loss": 3.591, "step": 37655 }, { "epoch": 2.558771572224487, "grad_norm": 1.4794821739196777, "learning_rate": 0.0006802979345019704, "loss": 3.4699, "step": 37660 }, { "epoch": 2.5591112922951487, "grad_norm": 1.1877319812774658, "learning_rate": 0.0006802554694931377, "loss": 3.3483, "step": 37665 }, { "epoch": 2.5594510123658107, "grad_norm": 1.101788878440857, "learning_rate": 0.0006802130044843049, "loss": 3.4947, "step": 37670 }, { "epoch": 2.5597907324364724, "grad_norm": 1.2580114603042603, "learning_rate": 0.0006801705394754722, "loss": 3.6603, "step": 37675 }, { "epoch": 2.560130452507134, "grad_norm": 2.074571132659912, "learning_rate": 0.0006801280744666395, "loss": 3.5979, "step": 37680 }, { "epoch": 2.560470172577796, "grad_norm": 1.3015588521957397, "learning_rate": 0.0006800856094578067, "loss": 3.592, "step": 37685 }, { "epoch": 2.5608098926484577, "grad_norm": 1.7454099655151367, "learning_rate": 0.0006800431444489741, "loss": 3.7293, "step": 37690 }, { "epoch": 2.5611496127191193, "grad_norm": 1.8255172967910767, "learning_rate": 0.0006800006794401414, "loss": 3.4936, "step": 37695 }, { "epoch": 2.5614893327897814, "grad_norm": 1.3224393129348755, "learning_rate": 0.0006799582144313086, "loss": 3.5413, "step": 37700 }, { "epoch": 2.561829052860443, "grad_norm": 1.4299525022506714, "learning_rate": 0.0006799157494224759, "loss": 3.4804, "step": 37705 }, { "epoch": 2.5621687729311047, "grad_norm": 5.595602989196777, "learning_rate": 0.0006798732844136431, "loss": 3.4097, "step": 37710 }, { "epoch": 2.5625084930017668, "grad_norm": 1.8690546751022339, "learning_rate": 0.0006798308194048104, "loss": 3.5769, "step": 37715 }, { "epoch": 2.5628482130724284, "grad_norm": 1.469395637512207, "learning_rate": 0.0006797883543959777, "loss": 3.526, "step": 37720 }, { "epoch": 2.56318793314309, "grad_norm": 1.4943931102752686, "learning_rate": 0.000679745889387145, "loss": 3.298, "step": 37725 }, { "epoch": 2.563527653213752, "grad_norm": 1.2835266590118408, "learning_rate": 0.0006797034243783123, "loss": 3.6364, "step": 37730 }, { "epoch": 2.5638673732844137, "grad_norm": 1.6670897006988525, "learning_rate": 0.0006796609593694796, "loss": 3.6023, "step": 37735 }, { "epoch": 2.5642070933550754, "grad_norm": 1.072485089302063, "learning_rate": 0.0006796184943606468, "loss": 3.6148, "step": 37740 }, { "epoch": 2.5645468134257374, "grad_norm": 1.7950960397720337, "learning_rate": 0.0006795760293518142, "loss": 3.5858, "step": 37745 }, { "epoch": 2.564886533496399, "grad_norm": 4.994467735290527, "learning_rate": 0.0006795335643429814, "loss": 3.3969, "step": 37750 }, { "epoch": 2.5652262535670607, "grad_norm": 1.3378316164016724, "learning_rate": 0.0006794910993341486, "loss": 3.4637, "step": 37755 }, { "epoch": 2.5655659736377228, "grad_norm": 1.3731327056884766, "learning_rate": 0.000679448634325316, "loss": 3.5433, "step": 37760 }, { "epoch": 2.5659056937083844, "grad_norm": 1.3775455951690674, "learning_rate": 0.0006794061693164833, "loss": 3.5245, "step": 37765 }, { "epoch": 2.566245413779046, "grad_norm": 1.4703843593597412, "learning_rate": 0.0006793637043076505, "loss": 3.484, "step": 37770 }, { "epoch": 2.566585133849708, "grad_norm": 1.5672969818115234, "learning_rate": 0.0006793212392988178, "loss": 3.7729, "step": 37775 }, { "epoch": 2.5669248539203697, "grad_norm": 1.323467493057251, "learning_rate": 0.0006792787742899851, "loss": 3.6632, "step": 37780 }, { "epoch": 2.5672645739910314, "grad_norm": 1.0684257745742798, "learning_rate": 0.0006792363092811523, "loss": 3.4951, "step": 37785 }, { "epoch": 2.5676042940616934, "grad_norm": 1.1111961603164673, "learning_rate": 0.0006791938442723196, "loss": 3.839, "step": 37790 }, { "epoch": 2.567944014132355, "grad_norm": 3.144162654876709, "learning_rate": 0.000679151379263487, "loss": 3.5271, "step": 37795 }, { "epoch": 2.5682837342030167, "grad_norm": 1.6435216665267944, "learning_rate": 0.0006791089142546542, "loss": 3.7445, "step": 37800 }, { "epoch": 2.5686234542736783, "grad_norm": 1.347633957862854, "learning_rate": 0.0006790664492458215, "loss": 3.4636, "step": 37805 }, { "epoch": 2.5689631743443404, "grad_norm": 1.0837359428405762, "learning_rate": 0.0006790239842369887, "loss": 3.5481, "step": 37810 }, { "epoch": 2.569302894415002, "grad_norm": 1.3102173805236816, "learning_rate": 0.000678981519228156, "loss": 3.5959, "step": 37815 }, { "epoch": 2.5696426144856637, "grad_norm": 1.3747135400772095, "learning_rate": 0.0006789390542193233, "loss": 3.5684, "step": 37820 }, { "epoch": 2.5699823345563257, "grad_norm": 1.2460042238235474, "learning_rate": 0.0006788965892104905, "loss": 3.508, "step": 37825 }, { "epoch": 2.5703220546269874, "grad_norm": 1.3706481456756592, "learning_rate": 0.0006788541242016579, "loss": 3.2737, "step": 37830 }, { "epoch": 2.570661774697649, "grad_norm": 1.3453344106674194, "learning_rate": 0.0006788116591928252, "loss": 3.4084, "step": 37835 }, { "epoch": 2.5710014947683106, "grad_norm": 1.4109532833099365, "learning_rate": 0.0006787691941839924, "loss": 3.3998, "step": 37840 }, { "epoch": 2.5713412148389727, "grad_norm": 1.524124264717102, "learning_rate": 0.0006787267291751596, "loss": 3.4037, "step": 37845 }, { "epoch": 2.5716809349096343, "grad_norm": 1.4752131700515747, "learning_rate": 0.000678684264166327, "loss": 3.6831, "step": 37850 }, { "epoch": 2.572020654980296, "grad_norm": 3.127210855484009, "learning_rate": 0.0006786417991574942, "loss": 3.6764, "step": 37855 }, { "epoch": 2.572360375050958, "grad_norm": 1.1280921697616577, "learning_rate": 0.0006785993341486614, "loss": 3.4209, "step": 37860 }, { "epoch": 2.5727000951216197, "grad_norm": 1.6548471450805664, "learning_rate": 0.0006785568691398289, "loss": 3.75, "step": 37865 }, { "epoch": 2.5730398151922813, "grad_norm": 1.3736861944198608, "learning_rate": 0.0006785144041309961, "loss": 3.5342, "step": 37870 }, { "epoch": 2.5733795352629434, "grad_norm": 1.3883932828903198, "learning_rate": 0.0006784719391221633, "loss": 3.5559, "step": 37875 }, { "epoch": 2.573719255333605, "grad_norm": 2.1088778972625732, "learning_rate": 0.0006784294741133307, "loss": 3.3826, "step": 37880 }, { "epoch": 2.5740589754042666, "grad_norm": 1.372380256652832, "learning_rate": 0.0006783870091044979, "loss": 3.4769, "step": 37885 }, { "epoch": 2.5743986954749287, "grad_norm": 1.0843000411987305, "learning_rate": 0.0006783445440956651, "loss": 3.5002, "step": 37890 }, { "epoch": 2.5747384155455904, "grad_norm": 1.7010118961334229, "learning_rate": 0.0006783020790868326, "loss": 3.6206, "step": 37895 }, { "epoch": 2.575078135616252, "grad_norm": 1.5694336891174316, "learning_rate": 0.0006782596140779998, "loss": 3.7083, "step": 37900 }, { "epoch": 2.575417855686914, "grad_norm": 1.2552396059036255, "learning_rate": 0.000678217149069167, "loss": 3.7588, "step": 37905 }, { "epoch": 2.5757575757575757, "grad_norm": 1.5314821004867554, "learning_rate": 0.0006781746840603343, "loss": 3.5741, "step": 37910 }, { "epoch": 2.5760972958282373, "grad_norm": 1.4321472644805908, "learning_rate": 0.0006781322190515016, "loss": 3.5514, "step": 37915 }, { "epoch": 2.5764370158988994, "grad_norm": 2.012218713760376, "learning_rate": 0.0006780897540426688, "loss": 3.4586, "step": 37920 }, { "epoch": 2.576776735969561, "grad_norm": 1.3687676191329956, "learning_rate": 0.0006780472890338361, "loss": 3.5559, "step": 37925 }, { "epoch": 2.5771164560402227, "grad_norm": 1.3924027681350708, "learning_rate": 0.0006780048240250035, "loss": 3.334, "step": 37930 }, { "epoch": 2.5774561761108847, "grad_norm": 1.5229958295822144, "learning_rate": 0.0006779623590161707, "loss": 3.3342, "step": 37935 }, { "epoch": 2.5777958961815464, "grad_norm": 1.2806282043457031, "learning_rate": 0.000677919894007338, "loss": 3.4879, "step": 37940 }, { "epoch": 2.578135616252208, "grad_norm": 1.3031607866287231, "learning_rate": 0.0006778774289985052, "loss": 3.3442, "step": 37945 }, { "epoch": 2.57847533632287, "grad_norm": 1.2201615571975708, "learning_rate": 0.0006778349639896725, "loss": 3.4249, "step": 37950 }, { "epoch": 2.5788150563935317, "grad_norm": 1.1927930116653442, "learning_rate": 0.0006777924989808398, "loss": 3.449, "step": 37955 }, { "epoch": 2.5791547764641933, "grad_norm": 1.129019856452942, "learning_rate": 0.000677750033972007, "loss": 3.2749, "step": 37960 }, { "epoch": 2.5794944965348554, "grad_norm": 1.3279162645339966, "learning_rate": 0.0006777075689631744, "loss": 3.7709, "step": 37965 }, { "epoch": 2.579834216605517, "grad_norm": 1.4585386514663696, "learning_rate": 0.0006776651039543417, "loss": 3.3848, "step": 37970 }, { "epoch": 2.5801739366761787, "grad_norm": 1.2452832460403442, "learning_rate": 0.0006776226389455089, "loss": 3.4555, "step": 37975 }, { "epoch": 2.5805136567468407, "grad_norm": 1.221066951751709, "learning_rate": 0.0006775801739366762, "loss": 3.557, "step": 37980 }, { "epoch": 2.5808533768175024, "grad_norm": 1.2872718572616577, "learning_rate": 0.0006775377089278435, "loss": 3.3988, "step": 37985 }, { "epoch": 2.581193096888164, "grad_norm": 1.5989488363265991, "learning_rate": 0.0006774952439190107, "loss": 3.6, "step": 37990 }, { "epoch": 2.581532816958826, "grad_norm": 1.4005012512207031, "learning_rate": 0.0006774527789101779, "loss": 3.6816, "step": 37995 }, { "epoch": 2.5818725370294877, "grad_norm": 1.6561951637268066, "learning_rate": 0.0006774103139013454, "loss": 3.3964, "step": 38000 }, { "epoch": 2.5822122571001493, "grad_norm": 1.5844582319259644, "learning_rate": 0.0006773678488925126, "loss": 3.3888, "step": 38005 }, { "epoch": 2.5825519771708114, "grad_norm": 1.0684438943862915, "learning_rate": 0.0006773253838836798, "loss": 3.805, "step": 38010 }, { "epoch": 2.582891697241473, "grad_norm": 1.4619941711425781, "learning_rate": 0.0006772829188748472, "loss": 3.6282, "step": 38015 }, { "epoch": 2.5832314173121347, "grad_norm": 1.181235909461975, "learning_rate": 0.0006772404538660144, "loss": 3.7727, "step": 38020 }, { "epoch": 2.5835711373827968, "grad_norm": 1.004764199256897, "learning_rate": 0.0006771979888571816, "loss": 3.5072, "step": 38025 }, { "epoch": 2.5839108574534584, "grad_norm": 1.3279281854629517, "learning_rate": 0.000677155523848349, "loss": 3.433, "step": 38030 }, { "epoch": 2.58425057752412, "grad_norm": 1.7615306377410889, "learning_rate": 0.0006771130588395163, "loss": 3.5442, "step": 38035 }, { "epoch": 2.584590297594782, "grad_norm": 1.294124722480774, "learning_rate": 0.0006770705938306835, "loss": 3.7025, "step": 38040 }, { "epoch": 2.5849300176654437, "grad_norm": 1.1921941041946411, "learning_rate": 0.0006770281288218508, "loss": 3.5697, "step": 38045 }, { "epoch": 2.5852697377361054, "grad_norm": 1.6587153673171997, "learning_rate": 0.0006769856638130181, "loss": 3.5081, "step": 38050 }, { "epoch": 2.5856094578067674, "grad_norm": 1.1551564931869507, "learning_rate": 0.0006769431988041853, "loss": 3.6948, "step": 38055 }, { "epoch": 2.585949177877429, "grad_norm": 4.042424201965332, "learning_rate": 0.0006769007337953526, "loss": 3.5379, "step": 38060 }, { "epoch": 2.5862888979480907, "grad_norm": 1.7056394815444946, "learning_rate": 0.0006768582687865199, "loss": 3.6717, "step": 38065 }, { "epoch": 2.5866286180187528, "grad_norm": 1.1492769718170166, "learning_rate": 0.0006768158037776872, "loss": 3.725, "step": 38070 }, { "epoch": 2.5869683380894144, "grad_norm": 1.261431336402893, "learning_rate": 0.0006767733387688545, "loss": 3.6406, "step": 38075 }, { "epoch": 2.587308058160076, "grad_norm": 1.2280464172363281, "learning_rate": 0.0006767308737600218, "loss": 3.3606, "step": 38080 }, { "epoch": 2.587647778230738, "grad_norm": 1.1413615942001343, "learning_rate": 0.0006766884087511891, "loss": 3.5521, "step": 38085 }, { "epoch": 2.5879874983013997, "grad_norm": 1.4088619947433472, "learning_rate": 0.0006766459437423563, "loss": 3.5115, "step": 38090 }, { "epoch": 2.5883272183720614, "grad_norm": 7.067935943603516, "learning_rate": 0.0006766034787335235, "loss": 3.4865, "step": 38095 }, { "epoch": 2.5886669384427234, "grad_norm": 1.141257882118225, "learning_rate": 0.0006765610137246909, "loss": 3.4532, "step": 38100 }, { "epoch": 2.589006658513385, "grad_norm": 1.3032773733139038, "learning_rate": 0.0006765185487158582, "loss": 3.7949, "step": 38105 }, { "epoch": 2.5893463785840467, "grad_norm": 2.071237564086914, "learning_rate": 0.0006764760837070254, "loss": 3.7484, "step": 38110 }, { "epoch": 2.589686098654709, "grad_norm": 1.5855073928833008, "learning_rate": 0.0006764336186981928, "loss": 3.5069, "step": 38115 }, { "epoch": 2.5900258187253704, "grad_norm": 1.6389318704605103, "learning_rate": 0.00067639115368936, "loss": 3.4261, "step": 38120 }, { "epoch": 2.590365538796032, "grad_norm": 1.3575741052627563, "learning_rate": 0.0006763486886805272, "loss": 3.6902, "step": 38125 }, { "epoch": 2.590705258866694, "grad_norm": 1.5256046056747437, "learning_rate": 0.0006763062236716946, "loss": 3.5509, "step": 38130 }, { "epoch": 2.5910449789373557, "grad_norm": 1.3736499547958374, "learning_rate": 0.0006762637586628618, "loss": 3.5657, "step": 38135 }, { "epoch": 2.5913846990080174, "grad_norm": 1.5281082391738892, "learning_rate": 0.0006762212936540291, "loss": 3.4897, "step": 38140 }, { "epoch": 2.591724419078679, "grad_norm": 1.5276060104370117, "learning_rate": 0.0006761788286451964, "loss": 3.7642, "step": 38145 }, { "epoch": 2.592064139149341, "grad_norm": 1.4922845363616943, "learning_rate": 0.0006761363636363637, "loss": 3.5368, "step": 38150 }, { "epoch": 2.5924038592200027, "grad_norm": 1.1370230913162231, "learning_rate": 0.0006760938986275309, "loss": 3.529, "step": 38155 }, { "epoch": 2.5927435792906643, "grad_norm": 4.5471038818359375, "learning_rate": 0.0006760514336186982, "loss": 3.5835, "step": 38160 }, { "epoch": 2.5930832993613264, "grad_norm": 1.1814111471176147, "learning_rate": 0.0006760089686098655, "loss": 3.4656, "step": 38165 }, { "epoch": 2.593423019431988, "grad_norm": 1.9596272706985474, "learning_rate": 0.0006759665036010327, "loss": 3.4807, "step": 38170 }, { "epoch": 2.5937627395026497, "grad_norm": 1.4461323022842407, "learning_rate": 0.0006759240385922001, "loss": 3.3122, "step": 38175 }, { "epoch": 2.5941024595733113, "grad_norm": 1.4637818336486816, "learning_rate": 0.0006758815735833674, "loss": 3.5514, "step": 38180 }, { "epoch": 2.5944421796439734, "grad_norm": 1.2592204809188843, "learning_rate": 0.0006758391085745346, "loss": 3.3353, "step": 38185 }, { "epoch": 2.594781899714635, "grad_norm": 1.3379935026168823, "learning_rate": 0.0006757966435657019, "loss": 3.4604, "step": 38190 }, { "epoch": 2.5951216197852967, "grad_norm": 1.571203589439392, "learning_rate": 0.0006757541785568691, "loss": 3.6865, "step": 38195 }, { "epoch": 2.5954613398559587, "grad_norm": 1.324519157409668, "learning_rate": 0.0006757117135480364, "loss": 3.4324, "step": 38200 }, { "epoch": 2.5958010599266204, "grad_norm": 1.1978721618652344, "learning_rate": 0.0006756692485392037, "loss": 3.4467, "step": 38205 }, { "epoch": 2.596140779997282, "grad_norm": 1.2131848335266113, "learning_rate": 0.000675626783530371, "loss": 3.6402, "step": 38210 }, { "epoch": 2.596480500067944, "grad_norm": 1.5094692707061768, "learning_rate": 0.0006755843185215383, "loss": 3.4663, "step": 38215 }, { "epoch": 2.5968202201386057, "grad_norm": 1.1591490507125854, "learning_rate": 0.0006755418535127056, "loss": 3.4577, "step": 38220 }, { "epoch": 2.5971599402092673, "grad_norm": 1.2647302150726318, "learning_rate": 0.0006754993885038728, "loss": 3.4922, "step": 38225 }, { "epoch": 2.5974996602799294, "grad_norm": 1.16392982006073, "learning_rate": 0.00067545692349504, "loss": 3.6629, "step": 38230 }, { "epoch": 2.597839380350591, "grad_norm": 1.2756314277648926, "learning_rate": 0.0006754144584862074, "loss": 3.5977, "step": 38235 }, { "epoch": 2.5981791004212527, "grad_norm": 1.269472360610962, "learning_rate": 0.0006753719934773746, "loss": 3.518, "step": 38240 }, { "epoch": 2.5985188204919147, "grad_norm": 1.2585065364837646, "learning_rate": 0.0006753295284685419, "loss": 3.6417, "step": 38245 }, { "epoch": 2.5988585405625764, "grad_norm": 1.5864834785461426, "learning_rate": 0.0006752870634597093, "loss": 3.756, "step": 38250 }, { "epoch": 2.599198260633238, "grad_norm": 1.2997462749481201, "learning_rate": 0.0006752445984508765, "loss": 3.653, "step": 38255 }, { "epoch": 2.5995379807039, "grad_norm": 1.127570390701294, "learning_rate": 0.0006752021334420437, "loss": 3.5578, "step": 38260 }, { "epoch": 2.5998777007745617, "grad_norm": 1.2987815141677856, "learning_rate": 0.0006751596684332111, "loss": 3.4029, "step": 38265 }, { "epoch": 2.6002174208452233, "grad_norm": 1.3670161962509155, "learning_rate": 0.0006751172034243783, "loss": 3.8013, "step": 38270 }, { "epoch": 2.6005571409158854, "grad_norm": 0.9767473340034485, "learning_rate": 0.0006750747384155455, "loss": 3.3438, "step": 38275 }, { "epoch": 2.600896860986547, "grad_norm": 1.3109313249588013, "learning_rate": 0.000675032273406713, "loss": 3.4684, "step": 38280 }, { "epoch": 2.6012365810572087, "grad_norm": 1.275728464126587, "learning_rate": 0.0006749898083978802, "loss": 3.6772, "step": 38285 }, { "epoch": 2.6015763011278707, "grad_norm": 1.8097622394561768, "learning_rate": 0.0006749473433890474, "loss": 3.3093, "step": 38290 }, { "epoch": 2.6019160211985324, "grad_norm": 1.2774426937103271, "learning_rate": 0.0006749048783802147, "loss": 3.5206, "step": 38295 }, { "epoch": 2.602255741269194, "grad_norm": 1.775702714920044, "learning_rate": 0.000674862413371382, "loss": 3.6778, "step": 38300 }, { "epoch": 2.602595461339856, "grad_norm": 1.7249951362609863, "learning_rate": 0.0006748199483625492, "loss": 3.4213, "step": 38305 }, { "epoch": 2.6029351814105177, "grad_norm": 1.1172765493392944, "learning_rate": 0.0006747774833537165, "loss": 3.4308, "step": 38310 }, { "epoch": 2.6032749014811793, "grad_norm": 1.7278223037719727, "learning_rate": 0.0006747350183448839, "loss": 3.4454, "step": 38315 }, { "epoch": 2.6036146215518414, "grad_norm": 1.2577571868896484, "learning_rate": 0.0006746925533360511, "loss": 3.5509, "step": 38320 }, { "epoch": 2.603954341622503, "grad_norm": 1.5364305973052979, "learning_rate": 0.0006746500883272184, "loss": 3.7653, "step": 38325 }, { "epoch": 2.6042940616931647, "grad_norm": 1.0692338943481445, "learning_rate": 0.0006746076233183856, "loss": 3.6791, "step": 38330 }, { "epoch": 2.6046337817638268, "grad_norm": 1.171305775642395, "learning_rate": 0.0006745651583095529, "loss": 3.6317, "step": 38335 }, { "epoch": 2.6049735018344884, "grad_norm": 1.4606801271438599, "learning_rate": 0.0006745226933007202, "loss": 3.5348, "step": 38340 }, { "epoch": 2.60531322190515, "grad_norm": 1.4058030843734741, "learning_rate": 0.0006744802282918874, "loss": 3.5316, "step": 38345 }, { "epoch": 2.605652941975812, "grad_norm": 1.62006413936615, "learning_rate": 0.0006744377632830548, "loss": 3.4288, "step": 38350 }, { "epoch": 2.6059926620464737, "grad_norm": 1.5648421049118042, "learning_rate": 0.0006743952982742221, "loss": 3.4749, "step": 38355 }, { "epoch": 2.6063323821171354, "grad_norm": 1.3326505422592163, "learning_rate": 0.0006743528332653893, "loss": 3.4606, "step": 38360 }, { "epoch": 2.6066721021877974, "grad_norm": 1.5064362287521362, "learning_rate": 0.0006743103682565566, "loss": 3.4175, "step": 38365 }, { "epoch": 2.607011822258459, "grad_norm": 0.9435018301010132, "learning_rate": 0.0006742679032477239, "loss": 3.748, "step": 38370 }, { "epoch": 2.6073515423291207, "grad_norm": 1.0860581398010254, "learning_rate": 0.0006742254382388911, "loss": 3.9766, "step": 38375 }, { "epoch": 2.6076912623997828, "grad_norm": 1.2047017812728882, "learning_rate": 0.0006741829732300583, "loss": 3.6527, "step": 38380 }, { "epoch": 2.6080309824704444, "grad_norm": 1.8047372102737427, "learning_rate": 0.0006741405082212258, "loss": 3.6041, "step": 38385 }, { "epoch": 2.608370702541106, "grad_norm": 1.424353837966919, "learning_rate": 0.000674098043212393, "loss": 3.4564, "step": 38390 }, { "epoch": 2.608710422611768, "grad_norm": 1.2695438861846924, "learning_rate": 0.0006740555782035602, "loss": 3.8136, "step": 38395 }, { "epoch": 2.6090501426824297, "grad_norm": 1.0184439420700073, "learning_rate": 0.0006740131131947276, "loss": 3.7133, "step": 38400 }, { "epoch": 2.6093898627530914, "grad_norm": 1.2374283075332642, "learning_rate": 0.0006739706481858948, "loss": 3.5089, "step": 38405 }, { "epoch": 2.6097295828237534, "grad_norm": 1.1045995950698853, "learning_rate": 0.000673928183177062, "loss": 3.6054, "step": 38410 }, { "epoch": 2.610069302894415, "grad_norm": 1.2940348386764526, "learning_rate": 0.0006738857181682295, "loss": 3.6603, "step": 38415 }, { "epoch": 2.6104090229650767, "grad_norm": 1.3072936534881592, "learning_rate": 0.0006738432531593967, "loss": 3.4323, "step": 38420 }, { "epoch": 2.610748743035739, "grad_norm": 1.4295904636383057, "learning_rate": 0.000673800788150564, "loss": 3.5191, "step": 38425 }, { "epoch": 2.6110884631064004, "grad_norm": 1.1432580947875977, "learning_rate": 0.0006737583231417313, "loss": 3.2492, "step": 38430 }, { "epoch": 2.611428183177062, "grad_norm": 1.2679188251495361, "learning_rate": 0.0006737158581328985, "loss": 3.3298, "step": 38435 }, { "epoch": 2.611767903247724, "grad_norm": 1.5641887187957764, "learning_rate": 0.0006736733931240658, "loss": 3.5761, "step": 38440 }, { "epoch": 2.6121076233183858, "grad_norm": 1.4825223684310913, "learning_rate": 0.000673630928115233, "loss": 3.414, "step": 38445 }, { "epoch": 2.6124473433890474, "grad_norm": 1.1170315742492676, "learning_rate": 0.0006735884631064004, "loss": 3.46, "step": 38450 }, { "epoch": 2.6127870634597095, "grad_norm": 1.1855138540267944, "learning_rate": 0.0006735459980975677, "loss": 3.5532, "step": 38455 }, { "epoch": 2.613126783530371, "grad_norm": 1.2154043912887573, "learning_rate": 0.0006735035330887349, "loss": 3.6571, "step": 38460 }, { "epoch": 2.6134665036010327, "grad_norm": 1.5177557468414307, "learning_rate": 0.0006734610680799022, "loss": 3.5778, "step": 38465 }, { "epoch": 2.613806223671695, "grad_norm": 1.5141512155532837, "learning_rate": 0.0006734186030710695, "loss": 3.5632, "step": 38470 }, { "epoch": 2.6141459437423564, "grad_norm": 1.3549832105636597, "learning_rate": 0.0006733761380622367, "loss": 3.4167, "step": 38475 }, { "epoch": 2.614485663813018, "grad_norm": 1.5162854194641113, "learning_rate": 0.0006733336730534039, "loss": 3.7253, "step": 38480 }, { "epoch": 2.6148253838836797, "grad_norm": 1.0999146699905396, "learning_rate": 0.0006732912080445714, "loss": 3.4889, "step": 38485 }, { "epoch": 2.6151651039543418, "grad_norm": 1.7364377975463867, "learning_rate": 0.0006732487430357386, "loss": 3.3564, "step": 38490 }, { "epoch": 2.6155048240250034, "grad_norm": 1.5206528902053833, "learning_rate": 0.0006732062780269058, "loss": 3.557, "step": 38495 }, { "epoch": 2.615844544095665, "grad_norm": 1.7276803255081177, "learning_rate": 0.0006731638130180732, "loss": 3.3721, "step": 38500 }, { "epoch": 2.616184264166327, "grad_norm": 3.465099334716797, "learning_rate": 0.0006731213480092404, "loss": 3.2207, "step": 38505 }, { "epoch": 2.6165239842369887, "grad_norm": 1.6247478723526, "learning_rate": 0.0006730788830004076, "loss": 3.4643, "step": 38510 }, { "epoch": 2.6168637043076504, "grad_norm": 1.9681293964385986, "learning_rate": 0.000673036417991575, "loss": 3.3004, "step": 38515 }, { "epoch": 2.617203424378312, "grad_norm": 1.3959864377975464, "learning_rate": 0.0006729939529827423, "loss": 3.5268, "step": 38520 }, { "epoch": 2.617543144448974, "grad_norm": 1.240269660949707, "learning_rate": 0.0006729514879739095, "loss": 3.4107, "step": 38525 }, { "epoch": 2.6178828645196357, "grad_norm": 1.4965561628341675, "learning_rate": 0.0006729090229650769, "loss": 3.4971, "step": 38530 }, { "epoch": 2.6182225845902973, "grad_norm": 0.9003165364265442, "learning_rate": 0.0006728665579562441, "loss": 3.2336, "step": 38535 }, { "epoch": 2.6185623046609594, "grad_norm": 0.9823610186576843, "learning_rate": 0.0006728240929474113, "loss": 3.5736, "step": 38540 }, { "epoch": 2.618902024731621, "grad_norm": 1.3424623012542725, "learning_rate": 0.0006727816279385786, "loss": 3.5101, "step": 38545 }, { "epoch": 2.6192417448022827, "grad_norm": 1.024600625038147, "learning_rate": 0.0006727391629297459, "loss": 3.4411, "step": 38550 }, { "epoch": 2.6195814648729447, "grad_norm": 1.2852634191513062, "learning_rate": 0.0006726966979209132, "loss": 3.6425, "step": 38555 }, { "epoch": 2.6199211849436064, "grad_norm": 1.305174469947815, "learning_rate": 0.0006726542329120805, "loss": 3.4906, "step": 38560 }, { "epoch": 2.620260905014268, "grad_norm": 1.4367320537567139, "learning_rate": 0.0006726117679032478, "loss": 3.4785, "step": 38565 }, { "epoch": 2.62060062508493, "grad_norm": 1.3215386867523193, "learning_rate": 0.000672569302894415, "loss": 3.8002, "step": 38570 }, { "epoch": 2.6209403451555917, "grad_norm": 1.2272512912750244, "learning_rate": 0.0006725268378855823, "loss": 3.6062, "step": 38575 }, { "epoch": 2.6212800652262533, "grad_norm": 1.4971247911453247, "learning_rate": 0.0006724843728767495, "loss": 3.708, "step": 38580 }, { "epoch": 2.6216197852969154, "grad_norm": 1.322189450263977, "learning_rate": 0.0006724419078679168, "loss": 3.6273, "step": 38585 }, { "epoch": 2.621959505367577, "grad_norm": 1.3555082082748413, "learning_rate": 0.0006723994428590842, "loss": 3.5181, "step": 38590 }, { "epoch": 2.6222992254382387, "grad_norm": 3.6583592891693115, "learning_rate": 0.0006723569778502514, "loss": 3.6555, "step": 38595 }, { "epoch": 2.6226389455089008, "grad_norm": 1.1943366527557373, "learning_rate": 0.0006723145128414187, "loss": 3.6511, "step": 38600 }, { "epoch": 2.6229786655795624, "grad_norm": 3.5227108001708984, "learning_rate": 0.000672272047832586, "loss": 3.7572, "step": 38605 }, { "epoch": 2.623318385650224, "grad_norm": 1.0949527025222778, "learning_rate": 0.0006722295828237532, "loss": 3.576, "step": 38610 }, { "epoch": 2.623658105720886, "grad_norm": 1.1652601957321167, "learning_rate": 0.0006721871178149205, "loss": 3.4985, "step": 38615 }, { "epoch": 2.6239978257915477, "grad_norm": 1.4372808933258057, "learning_rate": 0.0006721446528060878, "loss": 3.5459, "step": 38620 }, { "epoch": 2.6243375458622094, "grad_norm": 1.4224904775619507, "learning_rate": 0.0006721021877972551, "loss": 3.8464, "step": 38625 }, { "epoch": 2.6246772659328714, "grad_norm": 1.1994138956069946, "learning_rate": 0.0006720597227884223, "loss": 3.8328, "step": 38630 }, { "epoch": 2.625016986003533, "grad_norm": 1.4123380184173584, "learning_rate": 0.0006720172577795897, "loss": 3.8911, "step": 38635 }, { "epoch": 2.6253567060741947, "grad_norm": 1.4569634199142456, "learning_rate": 0.0006719747927707569, "loss": 3.6456, "step": 38640 }, { "epoch": 2.6256964261448568, "grad_norm": 1.2673671245574951, "learning_rate": 0.0006719323277619241, "loss": 3.6811, "step": 38645 }, { "epoch": 2.6260361462155184, "grad_norm": 1.8715722560882568, "learning_rate": 0.0006718898627530915, "loss": 3.7343, "step": 38650 }, { "epoch": 2.62637586628618, "grad_norm": 1.6130346059799194, "learning_rate": 0.0006718473977442587, "loss": 3.2575, "step": 38655 }, { "epoch": 2.626715586356842, "grad_norm": 1.503601312637329, "learning_rate": 0.000671804932735426, "loss": 3.4429, "step": 38660 }, { "epoch": 2.6270553064275037, "grad_norm": 1.1858882904052734, "learning_rate": 0.0006717624677265934, "loss": 3.3299, "step": 38665 }, { "epoch": 2.6273950264981654, "grad_norm": 1.4952459335327148, "learning_rate": 0.0006717200027177606, "loss": 3.5797, "step": 38670 }, { "epoch": 2.6277347465688274, "grad_norm": 1.7730802297592163, "learning_rate": 0.0006716775377089278, "loss": 3.3989, "step": 38675 }, { "epoch": 2.628074466639489, "grad_norm": 1.656293272972107, "learning_rate": 0.0006716350727000951, "loss": 3.6156, "step": 38680 }, { "epoch": 2.6284141867101507, "grad_norm": 1.0894502401351929, "learning_rate": 0.0006715926076912624, "loss": 3.6846, "step": 38685 }, { "epoch": 2.6287539067808128, "grad_norm": 1.1660118103027344, "learning_rate": 0.0006715501426824296, "loss": 3.6358, "step": 38690 }, { "epoch": 2.6290936268514744, "grad_norm": 1.4512678384780884, "learning_rate": 0.000671507677673597, "loss": 3.4621, "step": 38695 }, { "epoch": 2.629433346922136, "grad_norm": 1.1129144430160522, "learning_rate": 0.0006714652126647643, "loss": 3.4279, "step": 38700 }, { "epoch": 2.629773066992798, "grad_norm": 1.2212610244750977, "learning_rate": 0.0006714227476559315, "loss": 3.699, "step": 38705 }, { "epoch": 2.6301127870634597, "grad_norm": 0.8923410773277283, "learning_rate": 0.0006713802826470988, "loss": 3.7049, "step": 38710 }, { "epoch": 2.6304525071341214, "grad_norm": 1.5105630159378052, "learning_rate": 0.000671337817638266, "loss": 3.5437, "step": 38715 }, { "epoch": 2.6307922272047835, "grad_norm": 1.4431933164596558, "learning_rate": 0.0006712953526294333, "loss": 3.5797, "step": 38720 }, { "epoch": 2.631131947275445, "grad_norm": 1.0997875928878784, "learning_rate": 0.0006712528876206006, "loss": 3.5411, "step": 38725 }, { "epoch": 2.6314716673461067, "grad_norm": 1.5313808917999268, "learning_rate": 0.000671210422611768, "loss": 3.5023, "step": 38730 }, { "epoch": 2.631811387416769, "grad_norm": 1.7347458600997925, "learning_rate": 0.0006711679576029352, "loss": 3.4531, "step": 38735 }, { "epoch": 2.6321511074874304, "grad_norm": 1.4166830778121948, "learning_rate": 0.0006711254925941025, "loss": 3.5344, "step": 38740 }, { "epoch": 2.632490827558092, "grad_norm": 1.2326005697250366, "learning_rate": 0.0006710830275852697, "loss": 3.5815, "step": 38745 }, { "epoch": 2.632830547628754, "grad_norm": 1.2677743434906006, "learning_rate": 0.000671040562576437, "loss": 3.4819, "step": 38750 }, { "epoch": 2.6331702676994158, "grad_norm": 1.8143209218978882, "learning_rate": 0.0006709980975676043, "loss": 3.2459, "step": 38755 }, { "epoch": 2.6335099877700774, "grad_norm": 1.3992700576782227, "learning_rate": 0.0006709556325587715, "loss": 3.5429, "step": 38760 }, { "epoch": 2.6338497078407395, "grad_norm": 1.2332407236099243, "learning_rate": 0.000670913167549939, "loss": 3.4253, "step": 38765 }, { "epoch": 2.634189427911401, "grad_norm": 1.5495727062225342, "learning_rate": 0.0006708707025411062, "loss": 3.6359, "step": 38770 }, { "epoch": 2.6345291479820627, "grad_norm": 1.1938645839691162, "learning_rate": 0.0006708282375322734, "loss": 3.5963, "step": 38775 }, { "epoch": 2.634868868052725, "grad_norm": 1.1491531133651733, "learning_rate": 0.0006707857725234407, "loss": 3.7632, "step": 38780 }, { "epoch": 2.6352085881233864, "grad_norm": 1.4506570100784302, "learning_rate": 0.000670743307514608, "loss": 3.811, "step": 38785 }, { "epoch": 2.635548308194048, "grad_norm": 1.513342022895813, "learning_rate": 0.0006707008425057752, "loss": 3.6227, "step": 38790 }, { "epoch": 2.63588802826471, "grad_norm": 1.4337553977966309, "learning_rate": 0.0006706583774969425, "loss": 3.4743, "step": 38795 }, { "epoch": 2.6362277483353718, "grad_norm": 1.2843639850616455, "learning_rate": 0.0006706159124881099, "loss": 3.6083, "step": 38800 }, { "epoch": 2.6365674684060334, "grad_norm": 1.3214070796966553, "learning_rate": 0.0006705734474792771, "loss": 3.5046, "step": 38805 }, { "epoch": 2.6369071884766955, "grad_norm": 1.8855773210525513, "learning_rate": 0.0006705309824704444, "loss": 3.3983, "step": 38810 }, { "epoch": 2.637246908547357, "grad_norm": 1.349176287651062, "learning_rate": 0.0006704885174616117, "loss": 3.335, "step": 38815 }, { "epoch": 2.6375866286180187, "grad_norm": 1.4042528867721558, "learning_rate": 0.0006704460524527789, "loss": 3.833, "step": 38820 }, { "epoch": 2.6379263486886804, "grad_norm": 1.3883358240127563, "learning_rate": 0.0006704035874439462, "loss": 3.6137, "step": 38825 }, { "epoch": 2.6382660687593424, "grad_norm": 1.331310749053955, "learning_rate": 0.0006703611224351134, "loss": 3.5342, "step": 38830 }, { "epoch": 2.638605788830004, "grad_norm": 1.194939374923706, "learning_rate": 0.0006703186574262808, "loss": 3.7184, "step": 38835 }, { "epoch": 2.6389455089006657, "grad_norm": 1.2355693578720093, "learning_rate": 0.0006702761924174481, "loss": 3.3784, "step": 38840 }, { "epoch": 2.639285228971328, "grad_norm": 1.076104998588562, "learning_rate": 0.0006702337274086153, "loss": 3.462, "step": 38845 }, { "epoch": 2.6396249490419894, "grad_norm": 1.5583139657974243, "learning_rate": 0.0006701912623997826, "loss": 3.4501, "step": 38850 }, { "epoch": 2.639964669112651, "grad_norm": 1.067095398902893, "learning_rate": 0.0006701487973909499, "loss": 3.5919, "step": 38855 }, { "epoch": 2.640304389183313, "grad_norm": 1.3577167987823486, "learning_rate": 0.0006701063323821171, "loss": 3.4847, "step": 38860 }, { "epoch": 2.6406441092539747, "grad_norm": 1.2988749742507935, "learning_rate": 0.0006700638673732843, "loss": 3.5324, "step": 38865 }, { "epoch": 2.6409838293246364, "grad_norm": 1.1339457035064697, "learning_rate": 0.0006700214023644518, "loss": 3.466, "step": 38870 }, { "epoch": 2.641323549395298, "grad_norm": 1.3939626216888428, "learning_rate": 0.000669978937355619, "loss": 3.7257, "step": 38875 }, { "epoch": 2.64166326946596, "grad_norm": 1.3106683492660522, "learning_rate": 0.0006699364723467862, "loss": 3.5277, "step": 38880 }, { "epoch": 2.6420029895366217, "grad_norm": 1.2340505123138428, "learning_rate": 0.0006698940073379536, "loss": 3.702, "step": 38885 }, { "epoch": 2.6423427096072833, "grad_norm": 1.052255392074585, "learning_rate": 0.0006698515423291208, "loss": 3.6494, "step": 38890 }, { "epoch": 2.6426824296779454, "grad_norm": 1.6104508638381958, "learning_rate": 0.000669809077320288, "loss": 3.5138, "step": 38895 }, { "epoch": 2.643022149748607, "grad_norm": 1.3984192609786987, "learning_rate": 0.0006697666123114554, "loss": 3.1972, "step": 38900 }, { "epoch": 2.6433618698192687, "grad_norm": 1.3931444883346558, "learning_rate": 0.0006697241473026227, "loss": 3.3439, "step": 38905 }, { "epoch": 2.6437015898899308, "grad_norm": 1.0844610929489136, "learning_rate": 0.0006696816822937899, "loss": 3.4523, "step": 38910 }, { "epoch": 2.6440413099605924, "grad_norm": 1.2432193756103516, "learning_rate": 0.0006696392172849573, "loss": 3.469, "step": 38915 }, { "epoch": 2.644381030031254, "grad_norm": 1.3437328338623047, "learning_rate": 0.0006695967522761245, "loss": 3.712, "step": 38920 }, { "epoch": 2.644720750101916, "grad_norm": 1.2593427896499634, "learning_rate": 0.0006695542872672917, "loss": 3.3158, "step": 38925 }, { "epoch": 2.6450604701725777, "grad_norm": 1.2684084177017212, "learning_rate": 0.000669511822258459, "loss": 3.285, "step": 38930 }, { "epoch": 2.6454001902432394, "grad_norm": 1.3020271062850952, "learning_rate": 0.0006694693572496263, "loss": 3.4141, "step": 38935 }, { "epoch": 2.6457399103139014, "grad_norm": 1.2318713665008545, "learning_rate": 0.0006694268922407936, "loss": 3.3485, "step": 38940 }, { "epoch": 2.646079630384563, "grad_norm": 1.0916882753372192, "learning_rate": 0.0006693844272319609, "loss": 3.5647, "step": 38945 }, { "epoch": 2.6464193504552247, "grad_norm": 2.022631883621216, "learning_rate": 0.0006693419622231282, "loss": 3.4598, "step": 38950 }, { "epoch": 2.6467590705258868, "grad_norm": 1.450468897819519, "learning_rate": 0.0006692994972142954, "loss": 3.5141, "step": 38955 }, { "epoch": 2.6470987905965484, "grad_norm": 1.5739115476608276, "learning_rate": 0.0006692570322054627, "loss": 3.2993, "step": 38960 }, { "epoch": 2.64743851066721, "grad_norm": 1.4703819751739502, "learning_rate": 0.00066921456719663, "loss": 3.4628, "step": 38965 }, { "epoch": 2.647778230737872, "grad_norm": 1.2896651029586792, "learning_rate": 0.0006691721021877972, "loss": 3.5187, "step": 38970 }, { "epoch": 2.6481179508085337, "grad_norm": 1.435084581375122, "learning_rate": 0.0006691296371789646, "loss": 3.4782, "step": 38975 }, { "epoch": 2.6484576708791954, "grad_norm": 1.2680094242095947, "learning_rate": 0.0006690871721701318, "loss": 3.6805, "step": 38980 }, { "epoch": 2.6487973909498574, "grad_norm": 1.0538182258605957, "learning_rate": 0.0006690447071612991, "loss": 3.4761, "step": 38985 }, { "epoch": 2.649137111020519, "grad_norm": 1.368415117263794, "learning_rate": 0.0006690022421524664, "loss": 3.5184, "step": 38990 }, { "epoch": 2.6494768310911807, "grad_norm": 1.8446094989776611, "learning_rate": 0.0006689597771436336, "loss": 3.3718, "step": 38995 }, { "epoch": 2.649816551161843, "grad_norm": 1.381648302078247, "learning_rate": 0.0006689173121348009, "loss": 3.7242, "step": 39000 }, { "epoch": 2.6501562712325044, "grad_norm": 1.6870583295822144, "learning_rate": 0.0006688748471259683, "loss": 3.4204, "step": 39005 }, { "epoch": 2.650495991303166, "grad_norm": 1.350600004196167, "learning_rate": 0.0006688323821171355, "loss": 3.656, "step": 39010 }, { "epoch": 2.650835711373828, "grad_norm": 1.41726553440094, "learning_rate": 0.0006687899171083027, "loss": 3.5806, "step": 39015 }, { "epoch": 2.6511754314444897, "grad_norm": 1.5922762155532837, "learning_rate": 0.0006687474520994701, "loss": 3.3147, "step": 39020 }, { "epoch": 2.6515151515151514, "grad_norm": 1.2417442798614502, "learning_rate": 0.0006687049870906373, "loss": 3.7258, "step": 39025 }, { "epoch": 2.6518548715858135, "grad_norm": 1.313957929611206, "learning_rate": 0.0006686625220818045, "loss": 3.6777, "step": 39030 }, { "epoch": 2.652194591656475, "grad_norm": 1.3032748699188232, "learning_rate": 0.0006686200570729719, "loss": 3.6839, "step": 39035 }, { "epoch": 2.6525343117271367, "grad_norm": 1.4166139364242554, "learning_rate": 0.0006685775920641392, "loss": 3.656, "step": 39040 }, { "epoch": 2.652874031797799, "grad_norm": 1.4025695323944092, "learning_rate": 0.0006685351270553064, "loss": 3.5218, "step": 39045 }, { "epoch": 2.6532137518684604, "grad_norm": 1.474056363105774, "learning_rate": 0.0006684926620464738, "loss": 3.4502, "step": 39050 }, { "epoch": 2.653553471939122, "grad_norm": 1.9164615869522095, "learning_rate": 0.000668450197037641, "loss": 3.1491, "step": 39055 }, { "epoch": 2.653893192009784, "grad_norm": 1.6697112321853638, "learning_rate": 0.0006684077320288082, "loss": 3.7665, "step": 39060 }, { "epoch": 2.6542329120804458, "grad_norm": 1.1795434951782227, "learning_rate": 0.0006683652670199755, "loss": 3.2807, "step": 39065 }, { "epoch": 2.6545726321511074, "grad_norm": 1.205457091331482, "learning_rate": 0.0006683228020111428, "loss": 3.8129, "step": 39070 }, { "epoch": 2.6549123522217695, "grad_norm": 1.7717663049697876, "learning_rate": 0.0006682803370023101, "loss": 3.4053, "step": 39075 }, { "epoch": 2.655252072292431, "grad_norm": 1.2266684770584106, "learning_rate": 0.0006682378719934774, "loss": 3.5622, "step": 39080 }, { "epoch": 2.6555917923630927, "grad_norm": 1.0873608589172363, "learning_rate": 0.0006681954069846447, "loss": 3.5345, "step": 39085 }, { "epoch": 2.655931512433755, "grad_norm": 1.1063361167907715, "learning_rate": 0.0006681529419758119, "loss": 3.5354, "step": 39090 }, { "epoch": 2.6562712325044164, "grad_norm": 1.2524373531341553, "learning_rate": 0.0006681104769669792, "loss": 3.5639, "step": 39095 }, { "epoch": 2.656610952575078, "grad_norm": 1.7220427989959717, "learning_rate": 0.0006680680119581465, "loss": 3.4547, "step": 39100 }, { "epoch": 2.65695067264574, "grad_norm": 1.0948504209518433, "learning_rate": 0.0006680255469493138, "loss": 3.4282, "step": 39105 }, { "epoch": 2.6572903927164018, "grad_norm": 1.538894534111023, "learning_rate": 0.0006679830819404811, "loss": 3.3064, "step": 39110 }, { "epoch": 2.6576301127870634, "grad_norm": 1.369382619857788, "learning_rate": 0.0006679406169316484, "loss": 3.4156, "step": 39115 }, { "epoch": 2.6579698328577255, "grad_norm": 1.2554817199707031, "learning_rate": 0.0006678981519228157, "loss": 3.685, "step": 39120 }, { "epoch": 2.658309552928387, "grad_norm": 1.1063803434371948, "learning_rate": 0.0006678556869139829, "loss": 3.2759, "step": 39125 }, { "epoch": 2.6586492729990487, "grad_norm": 1.329575538635254, "learning_rate": 0.0006678132219051501, "loss": 3.4343, "step": 39130 }, { "epoch": 2.658988993069711, "grad_norm": 1.3335331678390503, "learning_rate": 0.0006677707568963175, "loss": 3.6422, "step": 39135 }, { "epoch": 2.6593287131403724, "grad_norm": 1.0867016315460205, "learning_rate": 0.0006677282918874847, "loss": 3.4193, "step": 39140 }, { "epoch": 2.659668433211034, "grad_norm": 1.3554095029830933, "learning_rate": 0.000667685826878652, "loss": 3.3651, "step": 39145 }, { "epoch": 2.660008153281696, "grad_norm": 1.2865588665008545, "learning_rate": 0.0006676433618698194, "loss": 3.2236, "step": 39150 }, { "epoch": 2.660347873352358, "grad_norm": 1.0522624254226685, "learning_rate": 0.0006676008968609866, "loss": 3.9899, "step": 39155 }, { "epoch": 2.6606875934230194, "grad_norm": 1.379828929901123, "learning_rate": 0.0006675584318521538, "loss": 3.4477, "step": 39160 }, { "epoch": 2.661027313493681, "grad_norm": 1.588876724243164, "learning_rate": 0.0006675159668433212, "loss": 3.8154, "step": 39165 }, { "epoch": 2.661367033564343, "grad_norm": 1.5940783023834229, "learning_rate": 0.0006674735018344884, "loss": 3.4869, "step": 39170 }, { "epoch": 2.6617067536350048, "grad_norm": 1.2253726720809937, "learning_rate": 0.0006674310368256556, "loss": 3.3832, "step": 39175 }, { "epoch": 2.6620464737056664, "grad_norm": 1.2800434827804565, "learning_rate": 0.000667388571816823, "loss": 3.3361, "step": 39180 }, { "epoch": 2.6623861937763285, "grad_norm": 1.5316522121429443, "learning_rate": 0.0006673461068079903, "loss": 3.2253, "step": 39185 }, { "epoch": 2.66272591384699, "grad_norm": 1.7053797245025635, "learning_rate": 0.0006673036417991575, "loss": 3.423, "step": 39190 }, { "epoch": 2.6630656339176517, "grad_norm": 1.3629220724105835, "learning_rate": 0.0006672611767903248, "loss": 3.6011, "step": 39195 }, { "epoch": 2.663405353988314, "grad_norm": 1.459367036819458, "learning_rate": 0.0006672187117814921, "loss": 3.7156, "step": 39200 }, { "epoch": 2.6637450740589754, "grad_norm": 1.5908010005950928, "learning_rate": 0.0006671762467726593, "loss": 3.4296, "step": 39205 }, { "epoch": 2.664084794129637, "grad_norm": 1.2312359809875488, "learning_rate": 0.0006671337817638266, "loss": 3.5181, "step": 39210 }, { "epoch": 2.6644245142002987, "grad_norm": 1.3526923656463623, "learning_rate": 0.000667091316754994, "loss": 3.4306, "step": 39215 }, { "epoch": 2.6647642342709608, "grad_norm": 1.1893725395202637, "learning_rate": 0.0006670488517461612, "loss": 3.3189, "step": 39220 }, { "epoch": 2.6651039543416224, "grad_norm": 1.211728572845459, "learning_rate": 0.0006670063867373285, "loss": 3.784, "step": 39225 }, { "epoch": 2.665443674412284, "grad_norm": 1.4311519861221313, "learning_rate": 0.0006669639217284957, "loss": 3.4011, "step": 39230 }, { "epoch": 2.665783394482946, "grad_norm": 1.4717676639556885, "learning_rate": 0.000666921456719663, "loss": 3.4571, "step": 39235 }, { "epoch": 2.6661231145536077, "grad_norm": 1.2552779912948608, "learning_rate": 0.0006668789917108303, "loss": 3.419, "step": 39240 }, { "epoch": 2.6664628346242694, "grad_norm": 1.1557202339172363, "learning_rate": 0.0006668365267019975, "loss": 3.5198, "step": 39245 }, { "epoch": 2.6668025546949314, "grad_norm": 1.3521724939346313, "learning_rate": 0.0006667940616931649, "loss": 3.6268, "step": 39250 }, { "epoch": 2.667142274765593, "grad_norm": 1.3085535764694214, "learning_rate": 0.0006667515966843322, "loss": 3.2917, "step": 39255 }, { "epoch": 2.6674819948362547, "grad_norm": 1.7987877130508423, "learning_rate": 0.0006667091316754994, "loss": 3.7511, "step": 39260 }, { "epoch": 2.6678217149069168, "grad_norm": 1.3115047216415405, "learning_rate": 0.0006666666666666666, "loss": 3.4242, "step": 39265 }, { "epoch": 2.6681614349775784, "grad_norm": 1.243958592414856, "learning_rate": 0.000666624201657834, "loss": 3.3954, "step": 39270 }, { "epoch": 2.66850115504824, "grad_norm": 1.2080384492874146, "learning_rate": 0.0006665817366490012, "loss": 3.6885, "step": 39275 }, { "epoch": 2.668840875118902, "grad_norm": 1.6177312135696411, "learning_rate": 0.0006665392716401684, "loss": 3.5696, "step": 39280 }, { "epoch": 2.6691805951895637, "grad_norm": 1.2001675367355347, "learning_rate": 0.0006664968066313359, "loss": 3.6762, "step": 39285 }, { "epoch": 2.6695203152602254, "grad_norm": 1.3503018617630005, "learning_rate": 0.0006664543416225031, "loss": 3.5268, "step": 39290 }, { "epoch": 2.6698600353308874, "grad_norm": 1.2764136791229248, "learning_rate": 0.0006664118766136703, "loss": 3.7423, "step": 39295 }, { "epoch": 2.670199755401549, "grad_norm": 1.3245280981063843, "learning_rate": 0.0006663694116048377, "loss": 3.5071, "step": 39300 }, { "epoch": 2.6705394754722107, "grad_norm": 1.3560500144958496, "learning_rate": 0.0006663269465960049, "loss": 3.4853, "step": 39305 }, { "epoch": 2.670879195542873, "grad_norm": 1.177856683731079, "learning_rate": 0.0006662844815871721, "loss": 3.4858, "step": 39310 }, { "epoch": 2.6712189156135344, "grad_norm": 1.4675261974334717, "learning_rate": 0.0006662420165783394, "loss": 3.615, "step": 39315 }, { "epoch": 2.671558635684196, "grad_norm": 1.5014175176620483, "learning_rate": 0.0006661995515695068, "loss": 3.9037, "step": 39320 }, { "epoch": 2.671898355754858, "grad_norm": 1.210137128829956, "learning_rate": 0.000666157086560674, "loss": 3.529, "step": 39325 }, { "epoch": 2.6722380758255198, "grad_norm": 1.055242896080017, "learning_rate": 0.0006661146215518413, "loss": 3.5818, "step": 39330 }, { "epoch": 2.6725777958961814, "grad_norm": 1.3468987941741943, "learning_rate": 0.0006660721565430086, "loss": 3.4718, "step": 39335 }, { "epoch": 2.6729175159668435, "grad_norm": 1.5637924671173096, "learning_rate": 0.0006660296915341758, "loss": 3.6768, "step": 39340 }, { "epoch": 2.673257236037505, "grad_norm": 3.7612733840942383, "learning_rate": 0.0006659872265253431, "loss": 3.4045, "step": 39345 }, { "epoch": 2.6735969561081667, "grad_norm": 1.4487501382827759, "learning_rate": 0.0006659447615165104, "loss": 3.4683, "step": 39350 }, { "epoch": 2.673936676178829, "grad_norm": 1.1756947040557861, "learning_rate": 0.0006659022965076777, "loss": 3.7298, "step": 39355 }, { "epoch": 2.6742763962494904, "grad_norm": 1.18185293674469, "learning_rate": 0.000665859831498845, "loss": 3.3816, "step": 39360 }, { "epoch": 2.674616116320152, "grad_norm": 1.233362078666687, "learning_rate": 0.0006658173664900122, "loss": 3.4412, "step": 39365 }, { "epoch": 2.674955836390814, "grad_norm": 1.3272589445114136, "learning_rate": 0.0006657749014811795, "loss": 3.5492, "step": 39370 }, { "epoch": 2.6752955564614758, "grad_norm": 1.5331774950027466, "learning_rate": 0.0006657324364723468, "loss": 3.665, "step": 39375 }, { "epoch": 2.6756352765321374, "grad_norm": 1.496610164642334, "learning_rate": 0.000665689971463514, "loss": 3.5149, "step": 39380 }, { "epoch": 2.6759749966027995, "grad_norm": 1.342835545539856, "learning_rate": 0.0006656475064546813, "loss": 3.3835, "step": 39385 }, { "epoch": 2.676314716673461, "grad_norm": 1.473641276359558, "learning_rate": 0.0006656050414458487, "loss": 3.6937, "step": 39390 }, { "epoch": 2.6766544367441227, "grad_norm": 1.4111310243606567, "learning_rate": 0.0006655625764370159, "loss": 3.7235, "step": 39395 }, { "epoch": 2.676994156814785, "grad_norm": 1.2843883037567139, "learning_rate": 0.0006655201114281832, "loss": 3.6877, "step": 39400 }, { "epoch": 2.6773338768854464, "grad_norm": 1.3238033056259155, "learning_rate": 0.0006654776464193505, "loss": 3.4281, "step": 39405 }, { "epoch": 2.677673596956108, "grad_norm": 1.4188319444656372, "learning_rate": 0.0006654351814105177, "loss": 3.2969, "step": 39410 }, { "epoch": 2.67801331702677, "grad_norm": 1.0397745370864868, "learning_rate": 0.0006653927164016849, "loss": 3.6314, "step": 39415 }, { "epoch": 2.6783530370974318, "grad_norm": 1.4970868825912476, "learning_rate": 0.0006653502513928523, "loss": 3.6546, "step": 39420 }, { "epoch": 2.6786927571680934, "grad_norm": 1.3944214582443237, "learning_rate": 0.0006653077863840196, "loss": 3.6348, "step": 39425 }, { "epoch": 2.6790324772387555, "grad_norm": 1.9846768379211426, "learning_rate": 0.0006652653213751868, "loss": 3.5773, "step": 39430 }, { "epoch": 2.679372197309417, "grad_norm": 1.335087537765503, "learning_rate": 0.0006652228563663542, "loss": 3.5322, "step": 39435 }, { "epoch": 2.6797119173800787, "grad_norm": 1.1923173666000366, "learning_rate": 0.0006651803913575214, "loss": 3.5188, "step": 39440 }, { "epoch": 2.680051637450741, "grad_norm": 1.3704804182052612, "learning_rate": 0.0006651379263486887, "loss": 3.7024, "step": 39445 }, { "epoch": 2.6803913575214025, "grad_norm": 1.1043956279754639, "learning_rate": 0.000665095461339856, "loss": 3.6563, "step": 39450 }, { "epoch": 2.680731077592064, "grad_norm": 1.3809432983398438, "learning_rate": 0.0006650529963310232, "loss": 3.4929, "step": 39455 }, { "epoch": 2.681070797662726, "grad_norm": 1.2324035167694092, "learning_rate": 0.0006650105313221906, "loss": 3.5847, "step": 39460 }, { "epoch": 2.681410517733388, "grad_norm": 1.2418358325958252, "learning_rate": 0.0006649680663133578, "loss": 3.4039, "step": 39465 }, { "epoch": 2.6817502378040494, "grad_norm": 1.247300624847412, "learning_rate": 0.0006649256013045251, "loss": 3.6085, "step": 39470 }, { "epoch": 2.6820899578747115, "grad_norm": 1.170094609260559, "learning_rate": 0.0006648831362956924, "loss": 3.4958, "step": 39475 }, { "epoch": 2.682429677945373, "grad_norm": 1.254896640777588, "learning_rate": 0.0006648406712868596, "loss": 3.3525, "step": 39480 }, { "epoch": 2.6827693980160348, "grad_norm": 1.4991791248321533, "learning_rate": 0.0006647982062780269, "loss": 3.6308, "step": 39485 }, { "epoch": 2.683109118086697, "grad_norm": 1.2129530906677246, "learning_rate": 0.0006647557412691942, "loss": 3.6968, "step": 39490 }, { "epoch": 2.6834488381573585, "grad_norm": 1.4286609888076782, "learning_rate": 0.0006647132762603615, "loss": 3.4812, "step": 39495 }, { "epoch": 2.68378855822802, "grad_norm": 1.2124245166778564, "learning_rate": 0.0006646708112515288, "loss": 3.4445, "step": 39500 }, { "epoch": 2.6841282782986817, "grad_norm": 1.338842511177063, "learning_rate": 0.0006646283462426961, "loss": 3.4047, "step": 39505 }, { "epoch": 2.684467998369344, "grad_norm": 1.4746965169906616, "learning_rate": 0.0006645858812338633, "loss": 3.5884, "step": 39510 }, { "epoch": 2.6848077184400054, "grad_norm": 1.5033150911331177, "learning_rate": 0.0006645434162250305, "loss": 3.4116, "step": 39515 }, { "epoch": 2.685147438510667, "grad_norm": 1.5189274549484253, "learning_rate": 0.0006645009512161979, "loss": 3.3804, "step": 39520 }, { "epoch": 2.685487158581329, "grad_norm": 1.4032527208328247, "learning_rate": 0.0006644584862073651, "loss": 3.383, "step": 39525 }, { "epoch": 2.6858268786519908, "grad_norm": 1.4020413160324097, "learning_rate": 0.0006644160211985324, "loss": 3.5247, "step": 39530 }, { "epoch": 2.6861665987226524, "grad_norm": 1.3167966604232788, "learning_rate": 0.0006643735561896998, "loss": 3.7148, "step": 39535 }, { "epoch": 2.6865063187933145, "grad_norm": 1.3626222610473633, "learning_rate": 0.000664331091180867, "loss": 3.5368, "step": 39540 }, { "epoch": 2.686846038863976, "grad_norm": 1.7663084268569946, "learning_rate": 0.0006642886261720342, "loss": 3.4149, "step": 39545 }, { "epoch": 2.6871857589346377, "grad_norm": 1.7347196340560913, "learning_rate": 0.0006642461611632016, "loss": 3.5379, "step": 39550 }, { "epoch": 2.6875254790052994, "grad_norm": 1.8981635570526123, "learning_rate": 0.0006642036961543688, "loss": 3.5821, "step": 39555 }, { "epoch": 2.6878651990759614, "grad_norm": 1.5057591199874878, "learning_rate": 0.000664161231145536, "loss": 3.5441, "step": 39560 }, { "epoch": 2.688204919146623, "grad_norm": 1.2261472940444946, "learning_rate": 0.0006641187661367034, "loss": 3.6413, "step": 39565 }, { "epoch": 2.6885446392172847, "grad_norm": 11.091893196105957, "learning_rate": 0.0006640763011278707, "loss": 3.5801, "step": 39570 }, { "epoch": 2.688884359287947, "grad_norm": 1.1489238739013672, "learning_rate": 0.0006640338361190379, "loss": 3.4722, "step": 39575 }, { "epoch": 2.6892240793586084, "grad_norm": 1.2031611204147339, "learning_rate": 0.0006639913711102052, "loss": 3.3964, "step": 39580 }, { "epoch": 2.68956379942927, "grad_norm": 1.4312992095947266, "learning_rate": 0.0006639489061013725, "loss": 3.8029, "step": 39585 }, { "epoch": 2.689903519499932, "grad_norm": 1.5016204118728638, "learning_rate": 0.0006639064410925397, "loss": 3.5226, "step": 39590 }, { "epoch": 2.6902432395705937, "grad_norm": 1.4577447175979614, "learning_rate": 0.0006638639760837071, "loss": 3.6041, "step": 39595 }, { "epoch": 2.6905829596412554, "grad_norm": 1.4026812314987183, "learning_rate": 0.0006638215110748744, "loss": 3.3702, "step": 39600 }, { "epoch": 2.6909226797119175, "grad_norm": 1.238875150680542, "learning_rate": 0.0006637790460660416, "loss": 3.5168, "step": 39605 }, { "epoch": 2.691262399782579, "grad_norm": 1.5552138090133667, "learning_rate": 0.0006637365810572089, "loss": 3.559, "step": 39610 }, { "epoch": 2.6916021198532407, "grad_norm": 1.2707799673080444, "learning_rate": 0.0006636941160483761, "loss": 3.7031, "step": 39615 }, { "epoch": 2.691941839923903, "grad_norm": 1.10240638256073, "learning_rate": 0.0006636516510395434, "loss": 3.559, "step": 39620 }, { "epoch": 2.6922815599945644, "grad_norm": 1.5460705757141113, "learning_rate": 0.0006636091860307107, "loss": 3.5696, "step": 39625 }, { "epoch": 2.692621280065226, "grad_norm": 1.2313307523727417, "learning_rate": 0.000663566721021878, "loss": 3.4589, "step": 39630 }, { "epoch": 2.692961000135888, "grad_norm": 1.3317456245422363, "learning_rate": 0.0006635242560130453, "loss": 3.4072, "step": 39635 }, { "epoch": 2.6933007202065498, "grad_norm": 1.5440348386764526, "learning_rate": 0.0006634817910042126, "loss": 3.4883, "step": 39640 }, { "epoch": 2.6936404402772114, "grad_norm": 1.4294445514678955, "learning_rate": 0.0006634393259953798, "loss": 3.2553, "step": 39645 }, { "epoch": 2.6939801603478735, "grad_norm": 9.707416534423828, "learning_rate": 0.000663396860986547, "loss": 3.6023, "step": 39650 }, { "epoch": 2.694319880418535, "grad_norm": 1.5259164571762085, "learning_rate": 0.0006633543959777144, "loss": 3.5158, "step": 39655 }, { "epoch": 2.6946596004891967, "grad_norm": 1.609613299369812, "learning_rate": 0.0006633119309688816, "loss": 3.5967, "step": 39660 }, { "epoch": 2.694999320559859, "grad_norm": 1.0868079662322998, "learning_rate": 0.0006632694659600489, "loss": 3.592, "step": 39665 }, { "epoch": 2.6953390406305204, "grad_norm": 1.2440316677093506, "learning_rate": 0.0006632270009512163, "loss": 3.4706, "step": 39670 }, { "epoch": 2.695678760701182, "grad_norm": 1.45805025100708, "learning_rate": 0.0006631845359423835, "loss": 3.3481, "step": 39675 }, { "epoch": 2.696018480771844, "grad_norm": 1.2050836086273193, "learning_rate": 0.0006631420709335507, "loss": 3.371, "step": 39680 }, { "epoch": 2.6963582008425058, "grad_norm": 1.7538074254989624, "learning_rate": 0.0006630996059247181, "loss": 3.4872, "step": 39685 }, { "epoch": 2.6966979209131674, "grad_norm": 1.3675777912139893, "learning_rate": 0.0006630571409158853, "loss": 3.4292, "step": 39690 }, { "epoch": 2.6970376409838295, "grad_norm": 1.1351481676101685, "learning_rate": 0.0006630146759070525, "loss": 3.4876, "step": 39695 }, { "epoch": 2.697377361054491, "grad_norm": 1.0761357545852661, "learning_rate": 0.00066297221089822, "loss": 3.6668, "step": 39700 }, { "epoch": 2.6977170811251527, "grad_norm": 1.0561619997024536, "learning_rate": 0.0006629297458893872, "loss": 3.4112, "step": 39705 }, { "epoch": 2.698056801195815, "grad_norm": 1.3397142887115479, "learning_rate": 0.0006628872808805544, "loss": 3.5881, "step": 39710 }, { "epoch": 2.6983965212664764, "grad_norm": 1.697618842124939, "learning_rate": 0.0006628448158717217, "loss": 3.4619, "step": 39715 }, { "epoch": 2.698736241337138, "grad_norm": 1.963734745979309, "learning_rate": 0.000662802350862889, "loss": 3.7661, "step": 39720 }, { "epoch": 2.6990759614078, "grad_norm": 1.0660039186477661, "learning_rate": 0.0006627598858540562, "loss": 3.519, "step": 39725 }, { "epoch": 2.699415681478462, "grad_norm": 8.466361999511719, "learning_rate": 0.0006627174208452235, "loss": 3.3514, "step": 39730 }, { "epoch": 2.6997554015491234, "grad_norm": 1.521019697189331, "learning_rate": 0.0006626749558363909, "loss": 3.4913, "step": 39735 }, { "epoch": 2.7000951216197855, "grad_norm": 1.2728502750396729, "learning_rate": 0.0006626324908275581, "loss": 3.2296, "step": 39740 }, { "epoch": 2.700434841690447, "grad_norm": 1.3386800289154053, "learning_rate": 0.0006625900258187254, "loss": 3.5579, "step": 39745 }, { "epoch": 2.7007745617611087, "grad_norm": 1.6785848140716553, "learning_rate": 0.0006625475608098926, "loss": 3.6213, "step": 39750 }, { "epoch": 2.701114281831771, "grad_norm": 1.075898289680481, "learning_rate": 0.0006625050958010599, "loss": 3.5402, "step": 39755 }, { "epoch": 2.7014540019024325, "grad_norm": 1.4750779867172241, "learning_rate": 0.0006624626307922272, "loss": 3.5656, "step": 39760 }, { "epoch": 2.701793721973094, "grad_norm": 1.1563799381256104, "learning_rate": 0.0006624201657833944, "loss": 3.5709, "step": 39765 }, { "epoch": 2.702133442043756, "grad_norm": 1.5571075677871704, "learning_rate": 0.0006623777007745618, "loss": 3.4681, "step": 39770 }, { "epoch": 2.702473162114418, "grad_norm": 1.2148817777633667, "learning_rate": 0.0006623352357657291, "loss": 3.351, "step": 39775 }, { "epoch": 2.7028128821850794, "grad_norm": 1.8116918802261353, "learning_rate": 0.0006622927707568963, "loss": 3.3157, "step": 39780 }, { "epoch": 2.7031526022557415, "grad_norm": 1.446163296699524, "learning_rate": 0.0006622503057480637, "loss": 3.4674, "step": 39785 }, { "epoch": 2.703492322326403, "grad_norm": 1.4836435317993164, "learning_rate": 0.0006622078407392309, "loss": 3.6862, "step": 39790 }, { "epoch": 2.7038320423970648, "grad_norm": 1.5904213190078735, "learning_rate": 0.0006621653757303981, "loss": 3.4727, "step": 39795 }, { "epoch": 2.704171762467727, "grad_norm": 1.780988335609436, "learning_rate": 0.0006621229107215655, "loss": 3.5544, "step": 39800 }, { "epoch": 2.7045114825383885, "grad_norm": 1.7165489196777344, "learning_rate": 0.0006620804457127328, "loss": 3.503, "step": 39805 }, { "epoch": 2.70485120260905, "grad_norm": 1.9460735321044922, "learning_rate": 0.0006620379807039, "loss": 3.4814, "step": 39810 }, { "epoch": 2.705190922679712, "grad_norm": 1.3030153512954712, "learning_rate": 0.0006619955156950673, "loss": 3.421, "step": 39815 }, { "epoch": 2.705530642750374, "grad_norm": 1.5347309112548828, "learning_rate": 0.0006619530506862346, "loss": 3.6058, "step": 39820 }, { "epoch": 2.7058703628210354, "grad_norm": 1.294051170349121, "learning_rate": 0.0006619105856774018, "loss": 3.1773, "step": 39825 }, { "epoch": 2.7062100828916975, "grad_norm": 1.2379765510559082, "learning_rate": 0.0006618681206685691, "loss": 3.3818, "step": 39830 }, { "epoch": 2.706549802962359, "grad_norm": 1.3585906028747559, "learning_rate": 0.0006618256556597364, "loss": 3.5534, "step": 39835 }, { "epoch": 2.7068895230330208, "grad_norm": 1.184828519821167, "learning_rate": 0.0006617831906509037, "loss": 3.2824, "step": 39840 }, { "epoch": 2.7072292431036824, "grad_norm": 1.1958531141281128, "learning_rate": 0.000661740725642071, "loss": 3.5174, "step": 39845 }, { "epoch": 2.7075689631743445, "grad_norm": 1.428289532661438, "learning_rate": 0.0006616982606332383, "loss": 3.6149, "step": 39850 }, { "epoch": 2.707908683245006, "grad_norm": 6.175607681274414, "learning_rate": 0.0006616557956244055, "loss": 3.4449, "step": 39855 }, { "epoch": 2.7082484033156677, "grad_norm": 1.3293434381484985, "learning_rate": 0.0006616133306155728, "loss": 3.4535, "step": 39860 }, { "epoch": 2.70858812338633, "grad_norm": 1.3270481824874878, "learning_rate": 0.00066157086560674, "loss": 3.5369, "step": 39865 }, { "epoch": 2.7089278434569914, "grad_norm": 1.6250053644180298, "learning_rate": 0.0006615284005979073, "loss": 3.6655, "step": 39870 }, { "epoch": 2.709267563527653, "grad_norm": 1.2843793630599976, "learning_rate": 0.0006614859355890747, "loss": 3.4385, "step": 39875 }, { "epoch": 2.709607283598315, "grad_norm": 1.5714424848556519, "learning_rate": 0.0006614434705802419, "loss": 3.5721, "step": 39880 }, { "epoch": 2.709947003668977, "grad_norm": 1.3495559692382812, "learning_rate": 0.0006614010055714092, "loss": 3.5817, "step": 39885 }, { "epoch": 2.7102867237396384, "grad_norm": 1.2224239110946655, "learning_rate": 0.0006613585405625765, "loss": 3.7257, "step": 39890 }, { "epoch": 2.7106264438103, "grad_norm": 7.85469388961792, "learning_rate": 0.0006613160755537437, "loss": 3.5341, "step": 39895 }, { "epoch": 2.710966163880962, "grad_norm": 1.3360432386398315, "learning_rate": 0.0006612736105449109, "loss": 3.3632, "step": 39900 }, { "epoch": 2.7113058839516238, "grad_norm": 1.7583286762237549, "learning_rate": 0.0006612311455360783, "loss": 3.5432, "step": 39905 }, { "epoch": 2.7116456040222854, "grad_norm": 1.3612693548202515, "learning_rate": 0.0006611886805272456, "loss": 3.5274, "step": 39910 }, { "epoch": 2.7119853240929475, "grad_norm": 1.4628961086273193, "learning_rate": 0.0006611462155184128, "loss": 3.561, "step": 39915 }, { "epoch": 2.712325044163609, "grad_norm": 1.1249079704284668, "learning_rate": 0.0006611037505095802, "loss": 3.7141, "step": 39920 }, { "epoch": 2.7126647642342707, "grad_norm": 1.053703784942627, "learning_rate": 0.0006610612855007474, "loss": 3.6772, "step": 39925 }, { "epoch": 2.713004484304933, "grad_norm": 1.3966859579086304, "learning_rate": 0.0006610188204919146, "loss": 3.6932, "step": 39930 }, { "epoch": 2.7133442043755944, "grad_norm": 1.694478154182434, "learning_rate": 0.000660976355483082, "loss": 3.6303, "step": 39935 }, { "epoch": 2.713683924446256, "grad_norm": 1.2011455297470093, "learning_rate": 0.0006609338904742492, "loss": 3.498, "step": 39940 }, { "epoch": 2.714023644516918, "grad_norm": 1.1832947731018066, "learning_rate": 0.0006608914254654165, "loss": 3.226, "step": 39945 }, { "epoch": 2.7143633645875798, "grad_norm": 1.8105908632278442, "learning_rate": 0.0006608489604565839, "loss": 3.4407, "step": 39950 }, { "epoch": 2.7147030846582414, "grad_norm": 1.3440295457839966, "learning_rate": 0.0006608064954477511, "loss": 3.4777, "step": 39955 }, { "epoch": 2.7150428047289035, "grad_norm": 1.1035571098327637, "learning_rate": 0.0006607640304389183, "loss": 3.6824, "step": 39960 }, { "epoch": 2.715382524799565, "grad_norm": 1.2033947706222534, "learning_rate": 0.0006607215654300856, "loss": 3.5328, "step": 39965 }, { "epoch": 2.7157222448702267, "grad_norm": 1.0389152765274048, "learning_rate": 0.0006606791004212529, "loss": 3.1691, "step": 39970 }, { "epoch": 2.716061964940889, "grad_norm": 1.1974657773971558, "learning_rate": 0.0006606366354124201, "loss": 3.6539, "step": 39975 }, { "epoch": 2.7164016850115504, "grad_norm": 1.8622523546218872, "learning_rate": 0.0006605941704035875, "loss": 3.4465, "step": 39980 }, { "epoch": 2.716741405082212, "grad_norm": 1.331335425376892, "learning_rate": 0.0006605517053947548, "loss": 3.4184, "step": 39985 }, { "epoch": 2.717081125152874, "grad_norm": 1.2187726497650146, "learning_rate": 0.000660509240385922, "loss": 3.5299, "step": 39990 }, { "epoch": 2.7174208452235358, "grad_norm": 1.3136043548583984, "learning_rate": 0.0006604667753770893, "loss": 3.5874, "step": 39995 }, { "epoch": 2.7177605652941974, "grad_norm": 1.142999529838562, "learning_rate": 0.0006604243103682565, "loss": 3.659, "step": 40000 }, { "epoch": 2.7181002853648595, "grad_norm": 1.7048978805541992, "learning_rate": 0.0006603818453594238, "loss": 3.5032, "step": 40005 }, { "epoch": 2.718440005435521, "grad_norm": 1.5417505502700806, "learning_rate": 0.0006603393803505911, "loss": 3.4524, "step": 40010 }, { "epoch": 2.7187797255061827, "grad_norm": 1.2031922340393066, "learning_rate": 0.0006602969153417584, "loss": 3.4189, "step": 40015 }, { "epoch": 2.719119445576845, "grad_norm": 1.4021005630493164, "learning_rate": 0.0006602544503329257, "loss": 3.5523, "step": 40020 }, { "epoch": 2.7194591656475064, "grad_norm": 1.1031564474105835, "learning_rate": 0.000660211985324093, "loss": 3.7384, "step": 40025 }, { "epoch": 2.719798885718168, "grad_norm": 1.7240012884140015, "learning_rate": 0.0006601695203152602, "loss": 3.5662, "step": 40030 }, { "epoch": 2.72013860578883, "grad_norm": 1.2638328075408936, "learning_rate": 0.0006601270553064275, "loss": 3.3165, "step": 40035 }, { "epoch": 2.720478325859492, "grad_norm": 1.6533582210540771, "learning_rate": 0.0006600845902975948, "loss": 3.6824, "step": 40040 }, { "epoch": 2.7208180459301534, "grad_norm": 1.2535103559494019, "learning_rate": 0.000660042125288762, "loss": 3.4414, "step": 40045 }, { "epoch": 2.7211577660008155, "grad_norm": 1.182121753692627, "learning_rate": 0.0006599996602799293, "loss": 3.4533, "step": 40050 }, { "epoch": 2.721497486071477, "grad_norm": 1.4211952686309814, "learning_rate": 0.0006599571952710967, "loss": 3.548, "step": 40055 }, { "epoch": 2.7218372061421388, "grad_norm": 1.757750153541565, "learning_rate": 0.0006599147302622639, "loss": 3.4478, "step": 40060 }, { "epoch": 2.722176926212801, "grad_norm": 1.2557510137557983, "learning_rate": 0.0006598722652534311, "loss": 3.5313, "step": 40065 }, { "epoch": 2.7225166462834625, "grad_norm": 1.1725077629089355, "learning_rate": 0.0006598298002445985, "loss": 3.6234, "step": 40070 }, { "epoch": 2.722856366354124, "grad_norm": 4.612145900726318, "learning_rate": 0.0006597873352357657, "loss": 3.6308, "step": 40075 }, { "epoch": 2.723196086424786, "grad_norm": 1.503049373626709, "learning_rate": 0.0006597448702269329, "loss": 3.4229, "step": 40080 }, { "epoch": 2.723535806495448, "grad_norm": 1.4886085987091064, "learning_rate": 0.0006597024052181004, "loss": 3.7646, "step": 40085 }, { "epoch": 2.7238755265661094, "grad_norm": 1.9825657606124878, "learning_rate": 0.0006596599402092676, "loss": 3.5478, "step": 40090 }, { "epoch": 2.7242152466367715, "grad_norm": 1.0944141149520874, "learning_rate": 0.0006596174752004348, "loss": 3.6154, "step": 40095 }, { "epoch": 2.724554966707433, "grad_norm": 4.70925760269165, "learning_rate": 0.0006595750101916021, "loss": 3.4985, "step": 40100 }, { "epoch": 2.7248946867780948, "grad_norm": 1.2940666675567627, "learning_rate": 0.0006595325451827694, "loss": 3.5397, "step": 40105 }, { "epoch": 2.725234406848757, "grad_norm": 1.6219724416732788, "learning_rate": 0.0006594900801739366, "loss": 3.6282, "step": 40110 }, { "epoch": 2.7255741269194185, "grad_norm": 1.2354636192321777, "learning_rate": 0.000659447615165104, "loss": 3.5716, "step": 40115 }, { "epoch": 2.72591384699008, "grad_norm": 1.4379971027374268, "learning_rate": 0.0006594051501562713, "loss": 3.42, "step": 40120 }, { "epoch": 2.726253567060742, "grad_norm": 1.3064402341842651, "learning_rate": 0.0006593626851474386, "loss": 3.3791, "step": 40125 }, { "epoch": 2.726593287131404, "grad_norm": 1.219205379486084, "learning_rate": 0.0006593202201386058, "loss": 3.7476, "step": 40130 }, { "epoch": 2.7269330072020654, "grad_norm": 1.3128021955490112, "learning_rate": 0.000659277755129773, "loss": 3.4742, "step": 40135 }, { "epoch": 2.7272727272727275, "grad_norm": 1.5862873792648315, "learning_rate": 0.0006592352901209404, "loss": 3.3954, "step": 40140 }, { "epoch": 2.727612447343389, "grad_norm": 1.3798861503601074, "learning_rate": 0.0006591928251121076, "loss": 3.4689, "step": 40145 }, { "epoch": 2.7279521674140508, "grad_norm": 1.421517252922058, "learning_rate": 0.000659150360103275, "loss": 3.5799, "step": 40150 }, { "epoch": 2.728291887484713, "grad_norm": 0.9608018398284912, "learning_rate": 0.0006591078950944423, "loss": 3.5239, "step": 40155 }, { "epoch": 2.7286316075553745, "grad_norm": 1.2339215278625488, "learning_rate": 0.0006590654300856095, "loss": 3.4866, "step": 40160 }, { "epoch": 2.728971327626036, "grad_norm": 1.5617536306381226, "learning_rate": 0.0006590229650767767, "loss": 3.7103, "step": 40165 }, { "epoch": 2.729311047696698, "grad_norm": 1.8131769895553589, "learning_rate": 0.0006589805000679441, "loss": 3.5546, "step": 40170 }, { "epoch": 2.72965076776736, "grad_norm": 1.2668339014053345, "learning_rate": 0.0006589380350591113, "loss": 3.7105, "step": 40175 }, { "epoch": 2.7299904878380215, "grad_norm": 1.8031785488128662, "learning_rate": 0.0006588955700502785, "loss": 3.6906, "step": 40180 }, { "epoch": 2.730330207908683, "grad_norm": 1.1179007291793823, "learning_rate": 0.000658853105041446, "loss": 3.5224, "step": 40185 }, { "epoch": 2.730669927979345, "grad_norm": 1.1823170185089111, "learning_rate": 0.0006588106400326132, "loss": 3.7099, "step": 40190 }, { "epoch": 2.731009648050007, "grad_norm": 1.9412344694137573, "learning_rate": 0.0006587681750237804, "loss": 3.5841, "step": 40195 }, { "epoch": 2.7313493681206684, "grad_norm": 1.2610692977905273, "learning_rate": 0.0006587257100149477, "loss": 3.7662, "step": 40200 }, { "epoch": 2.7316890881913305, "grad_norm": 1.3401086330413818, "learning_rate": 0.000658683245006115, "loss": 3.394, "step": 40205 }, { "epoch": 2.732028808261992, "grad_norm": 1.1255199909210205, "learning_rate": 0.0006586407799972822, "loss": 3.6311, "step": 40210 }, { "epoch": 2.7323685283326538, "grad_norm": 1.413347601890564, "learning_rate": 0.0006585983149884495, "loss": 3.5929, "step": 40215 }, { "epoch": 2.732708248403316, "grad_norm": 1.2622110843658447, "learning_rate": 0.0006585558499796169, "loss": 3.552, "step": 40220 }, { "epoch": 2.7330479684739775, "grad_norm": 1.4012130498886108, "learning_rate": 0.0006585133849707841, "loss": 3.5193, "step": 40225 }, { "epoch": 2.733387688544639, "grad_norm": 1.4873708486557007, "learning_rate": 0.0006584709199619514, "loss": 3.4301, "step": 40230 }, { "epoch": 2.7337274086153007, "grad_norm": 1.4764102697372437, "learning_rate": 0.0006584284549531187, "loss": 3.5171, "step": 40235 }, { "epoch": 2.734067128685963, "grad_norm": 1.2814579010009766, "learning_rate": 0.0006583859899442859, "loss": 3.6426, "step": 40240 }, { "epoch": 2.7344068487566244, "grad_norm": 1.4200184345245361, "learning_rate": 0.0006583435249354532, "loss": 3.3953, "step": 40245 }, { "epoch": 2.734746568827286, "grad_norm": 1.5842024087905884, "learning_rate": 0.0006583010599266204, "loss": 3.6491, "step": 40250 }, { "epoch": 2.735086288897948, "grad_norm": 1.404781699180603, "learning_rate": 0.0006582585949177878, "loss": 3.6662, "step": 40255 }, { "epoch": 2.7354260089686098, "grad_norm": 0.9958060383796692, "learning_rate": 0.0006582161299089551, "loss": 3.5531, "step": 40260 }, { "epoch": 2.7357657290392714, "grad_norm": 1.3561931848526, "learning_rate": 0.0006581736649001223, "loss": 3.5034, "step": 40265 }, { "epoch": 2.7361054491099335, "grad_norm": 1.221972107887268, "learning_rate": 0.0006581311998912896, "loss": 3.5772, "step": 40270 }, { "epoch": 2.736445169180595, "grad_norm": 1.2555513381958008, "learning_rate": 0.0006580887348824569, "loss": 3.3186, "step": 40275 }, { "epoch": 2.7367848892512567, "grad_norm": 1.4166096448898315, "learning_rate": 0.0006580462698736241, "loss": 3.6905, "step": 40280 }, { "epoch": 2.737124609321919, "grad_norm": 1.3054839372634888, "learning_rate": 0.0006580038048647913, "loss": 3.4873, "step": 40285 }, { "epoch": 2.7374643293925804, "grad_norm": 1.1880781650543213, "learning_rate": 0.0006579613398559588, "loss": 3.6252, "step": 40290 }, { "epoch": 2.737804049463242, "grad_norm": 2.0461533069610596, "learning_rate": 0.000657918874847126, "loss": 3.3908, "step": 40295 }, { "epoch": 2.738143769533904, "grad_norm": 1.431204915046692, "learning_rate": 0.0006578764098382932, "loss": 3.4893, "step": 40300 }, { "epoch": 2.738483489604566, "grad_norm": 1.33506178855896, "learning_rate": 0.0006578339448294606, "loss": 3.4464, "step": 40305 }, { "epoch": 2.7388232096752274, "grad_norm": 1.2922866344451904, "learning_rate": 0.0006577914798206278, "loss": 3.6792, "step": 40310 }, { "epoch": 2.7391629297458895, "grad_norm": 1.2662043571472168, "learning_rate": 0.000657749014811795, "loss": 3.543, "step": 40315 }, { "epoch": 2.739502649816551, "grad_norm": 2.0471713542938232, "learning_rate": 0.0006577065498029624, "loss": 3.2496, "step": 40320 }, { "epoch": 2.7398423698872127, "grad_norm": 1.3269455432891846, "learning_rate": 0.0006576640847941297, "loss": 3.5671, "step": 40325 }, { "epoch": 2.740182089957875, "grad_norm": 1.4778119325637817, "learning_rate": 0.0006576216197852969, "loss": 3.6524, "step": 40330 }, { "epoch": 2.7405218100285365, "grad_norm": 1.0982624292373657, "learning_rate": 0.0006575791547764643, "loss": 3.5627, "step": 40335 }, { "epoch": 2.740861530099198, "grad_norm": 1.9013645648956299, "learning_rate": 0.0006575366897676315, "loss": 3.4355, "step": 40340 }, { "epoch": 2.74120125016986, "grad_norm": 1.486530065536499, "learning_rate": 0.0006574942247587987, "loss": 3.5946, "step": 40345 }, { "epoch": 2.741540970240522, "grad_norm": 1.9712947607040405, "learning_rate": 0.000657451759749966, "loss": 3.1073, "step": 40350 }, { "epoch": 2.7418806903111834, "grad_norm": 1.19248628616333, "learning_rate": 0.0006574092947411333, "loss": 3.3599, "step": 40355 }, { "epoch": 2.7422204103818455, "grad_norm": 1.6148494482040405, "learning_rate": 0.0006573668297323006, "loss": 3.6941, "step": 40360 }, { "epoch": 2.742560130452507, "grad_norm": 1.767586588859558, "learning_rate": 0.0006573243647234679, "loss": 3.6112, "step": 40365 }, { "epoch": 2.7428998505231688, "grad_norm": 1.0878536701202393, "learning_rate": 0.0006572818997146352, "loss": 3.6744, "step": 40370 }, { "epoch": 2.743239570593831, "grad_norm": 1.7342476844787598, "learning_rate": 0.0006572394347058024, "loss": 3.5865, "step": 40375 }, { "epoch": 2.7435792906644925, "grad_norm": 1.2989532947540283, "learning_rate": 0.0006571969696969697, "loss": 3.6793, "step": 40380 }, { "epoch": 2.743919010735154, "grad_norm": 1.7962323427200317, "learning_rate": 0.000657154504688137, "loss": 3.5142, "step": 40385 }, { "epoch": 2.744258730805816, "grad_norm": 1.3526002168655396, "learning_rate": 0.0006571120396793042, "loss": 3.7585, "step": 40390 }, { "epoch": 2.744598450876478, "grad_norm": 1.6272838115692139, "learning_rate": 0.0006570695746704716, "loss": 3.5842, "step": 40395 }, { "epoch": 2.7449381709471394, "grad_norm": 1.4772732257843018, "learning_rate": 0.0006570271096616388, "loss": 3.2646, "step": 40400 }, { "epoch": 2.7452778910178015, "grad_norm": 1.2412620782852173, "learning_rate": 0.0006569846446528061, "loss": 3.5534, "step": 40405 }, { "epoch": 2.745617611088463, "grad_norm": 1.4730550050735474, "learning_rate": 0.0006569421796439734, "loss": 3.5057, "step": 40410 }, { "epoch": 2.7459573311591248, "grad_norm": 1.1836732625961304, "learning_rate": 0.0006568997146351406, "loss": 3.5198, "step": 40415 }, { "epoch": 2.746297051229787, "grad_norm": 1.0432162284851074, "learning_rate": 0.0006568572496263079, "loss": 3.5788, "step": 40420 }, { "epoch": 2.7466367713004485, "grad_norm": 1.9320905208587646, "learning_rate": 0.0006568147846174752, "loss": 3.6357, "step": 40425 }, { "epoch": 2.74697649137111, "grad_norm": 1.2194552421569824, "learning_rate": 0.0006567723196086425, "loss": 3.2596, "step": 40430 }, { "epoch": 2.747316211441772, "grad_norm": 1.426287055015564, "learning_rate": 0.0006567298545998097, "loss": 3.6118, "step": 40435 }, { "epoch": 2.747655931512434, "grad_norm": 1.3049604892730713, "learning_rate": 0.0006566873895909771, "loss": 3.6944, "step": 40440 }, { "epoch": 2.7479956515830954, "grad_norm": 1.2099112272262573, "learning_rate": 0.0006566449245821443, "loss": 3.5819, "step": 40445 }, { "epoch": 2.7483353716537575, "grad_norm": 3.8838934898376465, "learning_rate": 0.0006566024595733115, "loss": 3.5492, "step": 40450 }, { "epoch": 2.748675091724419, "grad_norm": 1.1012177467346191, "learning_rate": 0.0006565599945644789, "loss": 3.5699, "step": 40455 }, { "epoch": 2.749014811795081, "grad_norm": 1.5642966032028198, "learning_rate": 0.0006565175295556461, "loss": 3.7641, "step": 40460 }, { "epoch": 2.749354531865743, "grad_norm": 1.6196337938308716, "learning_rate": 0.0006564750645468135, "loss": 3.458, "step": 40465 }, { "epoch": 2.7496942519364045, "grad_norm": 1.9278788566589355, "learning_rate": 0.0006564325995379808, "loss": 3.1932, "step": 40470 }, { "epoch": 2.750033972007066, "grad_norm": 1.1271065473556519, "learning_rate": 0.000656390134529148, "loss": 3.6255, "step": 40475 }, { "epoch": 2.750373692077728, "grad_norm": 1.6754984855651855, "learning_rate": 0.0006563476695203153, "loss": 3.6012, "step": 40480 }, { "epoch": 2.75071341214839, "grad_norm": 1.045612096786499, "learning_rate": 0.0006563052045114825, "loss": 3.6196, "step": 40485 }, { "epoch": 2.7510531322190515, "grad_norm": 1.1947062015533447, "learning_rate": 0.0006562627395026498, "loss": 3.4776, "step": 40490 }, { "epoch": 2.7513928522897135, "grad_norm": 1.169070839881897, "learning_rate": 0.0006562202744938171, "loss": 3.5564, "step": 40495 }, { "epoch": 2.751732572360375, "grad_norm": 1.3542919158935547, "learning_rate": 0.0006561778094849844, "loss": 3.5507, "step": 40500 }, { "epoch": 2.752072292431037, "grad_norm": 1.3070917129516602, "learning_rate": 0.0006561353444761517, "loss": 3.3826, "step": 40505 }, { "epoch": 2.752412012501699, "grad_norm": 1.0754092931747437, "learning_rate": 0.000656092879467319, "loss": 3.5012, "step": 40510 }, { "epoch": 2.7527517325723605, "grad_norm": 1.429463505744934, "learning_rate": 0.0006560504144584862, "loss": 3.5914, "step": 40515 }, { "epoch": 2.753091452643022, "grad_norm": 1.261589765548706, "learning_rate": 0.0006560079494496535, "loss": 3.3595, "step": 40520 }, { "epoch": 2.7534311727136838, "grad_norm": 1.6609058380126953, "learning_rate": 0.0006559654844408208, "loss": 3.5551, "step": 40525 }, { "epoch": 2.753770892784346, "grad_norm": 1.1344952583312988, "learning_rate": 0.000655923019431988, "loss": 3.5164, "step": 40530 }, { "epoch": 2.7541106128550075, "grad_norm": 0.9971867203712463, "learning_rate": 0.0006558805544231554, "loss": 3.6622, "step": 40535 }, { "epoch": 2.754450332925669, "grad_norm": 1.3275322914123535, "learning_rate": 0.0006558380894143227, "loss": 3.5032, "step": 40540 }, { "epoch": 2.754790052996331, "grad_norm": 1.3235312700271606, "learning_rate": 0.0006557956244054899, "loss": 3.4272, "step": 40545 }, { "epoch": 2.755129773066993, "grad_norm": 1.3557071685791016, "learning_rate": 0.0006557531593966571, "loss": 3.7069, "step": 40550 }, { "epoch": 2.7554694931376544, "grad_norm": 1.6826895475387573, "learning_rate": 0.0006557106943878245, "loss": 3.6467, "step": 40555 }, { "epoch": 2.7558092132083165, "grad_norm": 1.4956830739974976, "learning_rate": 0.0006556682293789917, "loss": 3.4772, "step": 40560 }, { "epoch": 2.756148933278978, "grad_norm": 1.6450413465499878, "learning_rate": 0.0006556257643701589, "loss": 3.4304, "step": 40565 }, { "epoch": 2.7564886533496398, "grad_norm": 1.1074481010437012, "learning_rate": 0.0006555832993613264, "loss": 3.3378, "step": 40570 }, { "epoch": 2.7568283734203014, "grad_norm": 1.674331545829773, "learning_rate": 0.0006555408343524936, "loss": 3.5276, "step": 40575 }, { "epoch": 2.7571680934909635, "grad_norm": 1.5857897996902466, "learning_rate": 0.0006554983693436608, "loss": 3.3925, "step": 40580 }, { "epoch": 2.757507813561625, "grad_norm": 1.676171064376831, "learning_rate": 0.0006554559043348282, "loss": 3.4342, "step": 40585 }, { "epoch": 2.7578475336322867, "grad_norm": 1.4877722263336182, "learning_rate": 0.0006554134393259954, "loss": 3.4647, "step": 40590 }, { "epoch": 2.758187253702949, "grad_norm": 3.8500444889068604, "learning_rate": 0.0006553709743171626, "loss": 3.5649, "step": 40595 }, { "epoch": 2.7585269737736104, "grad_norm": 1.261753797531128, "learning_rate": 0.0006553285093083299, "loss": 3.6814, "step": 40600 }, { "epoch": 2.758866693844272, "grad_norm": 1.01082181930542, "learning_rate": 0.0006552860442994973, "loss": 3.5912, "step": 40605 }, { "epoch": 2.759206413914934, "grad_norm": 1.4588255882263184, "learning_rate": 0.0006552435792906645, "loss": 3.5458, "step": 40610 }, { "epoch": 2.759546133985596, "grad_norm": 1.2867465019226074, "learning_rate": 0.0006552011142818318, "loss": 3.1549, "step": 40615 }, { "epoch": 2.7598858540562574, "grad_norm": 1.4323869943618774, "learning_rate": 0.0006551586492729991, "loss": 3.6594, "step": 40620 }, { "epoch": 2.7602255741269195, "grad_norm": 1.4505207538604736, "learning_rate": 0.0006551161842641663, "loss": 3.4327, "step": 40625 }, { "epoch": 2.760565294197581, "grad_norm": 1.4188705682754517, "learning_rate": 0.0006550737192553336, "loss": 3.3926, "step": 40630 }, { "epoch": 2.7609050142682428, "grad_norm": 1.244950771331787, "learning_rate": 0.0006550312542465008, "loss": 3.4892, "step": 40635 }, { "epoch": 2.761244734338905, "grad_norm": 1.0538935661315918, "learning_rate": 0.0006549887892376682, "loss": 3.1609, "step": 40640 }, { "epoch": 2.7615844544095665, "grad_norm": 1.5171501636505127, "learning_rate": 0.0006549463242288355, "loss": 3.6209, "step": 40645 }, { "epoch": 2.761924174480228, "grad_norm": 1.266069769859314, "learning_rate": 0.0006549038592200027, "loss": 3.4761, "step": 40650 }, { "epoch": 2.76226389455089, "grad_norm": 1.1197198629379272, "learning_rate": 0.00065486139421117, "loss": 3.5411, "step": 40655 }, { "epoch": 2.762603614621552, "grad_norm": 1.3564136028289795, "learning_rate": 0.0006548189292023373, "loss": 3.5014, "step": 40660 }, { "epoch": 2.7629433346922134, "grad_norm": 1.0574862957000732, "learning_rate": 0.0006547764641935045, "loss": 3.525, "step": 40665 }, { "epoch": 2.7632830547628755, "grad_norm": 1.2487380504608154, "learning_rate": 0.0006547339991846717, "loss": 3.5348, "step": 40670 }, { "epoch": 2.763622774833537, "grad_norm": 3.03436541557312, "learning_rate": 0.0006546915341758392, "loss": 3.3587, "step": 40675 }, { "epoch": 2.7639624949041988, "grad_norm": 1.1078577041625977, "learning_rate": 0.0006546490691670064, "loss": 3.3458, "step": 40680 }, { "epoch": 2.764302214974861, "grad_norm": 1.5030802488327026, "learning_rate": 0.0006546066041581736, "loss": 3.6808, "step": 40685 }, { "epoch": 2.7646419350455225, "grad_norm": 1.2196097373962402, "learning_rate": 0.000654564139149341, "loss": 3.7051, "step": 40690 }, { "epoch": 2.764981655116184, "grad_norm": 1.1008459329605103, "learning_rate": 0.0006545216741405082, "loss": 3.5484, "step": 40695 }, { "epoch": 2.765321375186846, "grad_norm": 1.360984206199646, "learning_rate": 0.0006544792091316754, "loss": 3.3367, "step": 40700 }, { "epoch": 2.765661095257508, "grad_norm": 1.3027750253677368, "learning_rate": 0.0006544367441228429, "loss": 3.5796, "step": 40705 }, { "epoch": 2.7660008153281694, "grad_norm": 1.409600019454956, "learning_rate": 0.0006543942791140101, "loss": 3.4562, "step": 40710 }, { "epoch": 2.7663405353988315, "grad_norm": 1.1875638961791992, "learning_rate": 0.0006543518141051773, "loss": 3.557, "step": 40715 }, { "epoch": 2.766680255469493, "grad_norm": 3.818732738494873, "learning_rate": 0.0006543093490963447, "loss": 3.4908, "step": 40720 }, { "epoch": 2.7670199755401548, "grad_norm": 1.4978922605514526, "learning_rate": 0.0006542668840875119, "loss": 3.2731, "step": 40725 }, { "epoch": 2.767359695610817, "grad_norm": 1.3475465774536133, "learning_rate": 0.0006542244190786791, "loss": 3.4466, "step": 40730 }, { "epoch": 2.7676994156814785, "grad_norm": 2.194058418273926, "learning_rate": 0.0006541819540698464, "loss": 3.5174, "step": 40735 }, { "epoch": 2.76803913575214, "grad_norm": 1.250526785850525, "learning_rate": 0.0006541394890610138, "loss": 3.532, "step": 40740 }, { "epoch": 2.768378855822802, "grad_norm": 1.138447642326355, "learning_rate": 0.000654097024052181, "loss": 3.5646, "step": 40745 }, { "epoch": 2.768718575893464, "grad_norm": 1.2821965217590332, "learning_rate": 0.0006540545590433483, "loss": 3.5877, "step": 40750 }, { "epoch": 2.7690582959641254, "grad_norm": 1.566141963005066, "learning_rate": 0.0006540120940345156, "loss": 3.4301, "step": 40755 }, { "epoch": 2.7693980160347875, "grad_norm": 1.489009976387024, "learning_rate": 0.0006539696290256828, "loss": 3.5236, "step": 40760 }, { "epoch": 2.769737736105449, "grad_norm": 1.3215748071670532, "learning_rate": 0.0006539271640168501, "loss": 3.593, "step": 40765 }, { "epoch": 2.770077456176111, "grad_norm": 1.211052417755127, "learning_rate": 0.0006538846990080174, "loss": 3.6631, "step": 40770 }, { "epoch": 2.770417176246773, "grad_norm": 1.5458589792251587, "learning_rate": 0.0006538422339991847, "loss": 3.6692, "step": 40775 }, { "epoch": 2.7707568963174345, "grad_norm": 1.528642177581787, "learning_rate": 0.000653799768990352, "loss": 3.5559, "step": 40780 }, { "epoch": 2.771096616388096, "grad_norm": 1.3651419878005981, "learning_rate": 0.0006537573039815192, "loss": 3.5049, "step": 40785 }, { "epoch": 2.771436336458758, "grad_norm": 1.039027214050293, "learning_rate": 0.0006537148389726865, "loss": 3.6813, "step": 40790 }, { "epoch": 2.77177605652942, "grad_norm": 1.1780219078063965, "learning_rate": 0.0006536723739638538, "loss": 3.6822, "step": 40795 }, { "epoch": 2.7721157766000815, "grad_norm": 1.2229604721069336, "learning_rate": 0.000653629908955021, "loss": 3.4138, "step": 40800 }, { "epoch": 2.7724554966707435, "grad_norm": 1.3873603343963623, "learning_rate": 0.0006535874439461884, "loss": 3.4117, "step": 40805 }, { "epoch": 2.772795216741405, "grad_norm": 1.1485872268676758, "learning_rate": 0.0006535449789373557, "loss": 3.3264, "step": 40810 }, { "epoch": 2.773134936812067, "grad_norm": 1.3971219062805176, "learning_rate": 0.0006535025139285229, "loss": 3.6448, "step": 40815 }, { "epoch": 2.773474656882729, "grad_norm": 1.0521498918533325, "learning_rate": 0.0006534600489196903, "loss": 3.6835, "step": 40820 }, { "epoch": 2.7738143769533905, "grad_norm": 1.3183071613311768, "learning_rate": 0.0006534175839108575, "loss": 3.4014, "step": 40825 }, { "epoch": 2.774154097024052, "grad_norm": 1.7119907140731812, "learning_rate": 0.0006533751189020247, "loss": 3.406, "step": 40830 }, { "epoch": 2.774493817094714, "grad_norm": 1.3404558897018433, "learning_rate": 0.000653332653893192, "loss": 3.6562, "step": 40835 }, { "epoch": 2.774833537165376, "grad_norm": 1.2546205520629883, "learning_rate": 0.0006532901888843593, "loss": 3.7458, "step": 40840 }, { "epoch": 2.7751732572360375, "grad_norm": 1.3477329015731812, "learning_rate": 0.0006532477238755266, "loss": 3.7736, "step": 40845 }, { "epoch": 2.7755129773066995, "grad_norm": 1.4107004404067993, "learning_rate": 0.0006532052588666939, "loss": 3.4047, "step": 40850 }, { "epoch": 2.775852697377361, "grad_norm": 1.8105634450912476, "learning_rate": 0.0006531627938578612, "loss": 3.4974, "step": 40855 }, { "epoch": 2.776192417448023, "grad_norm": 1.2706249952316284, "learning_rate": 0.0006531203288490284, "loss": 3.4213, "step": 40860 }, { "epoch": 2.7765321375186844, "grad_norm": 1.3219292163848877, "learning_rate": 0.0006530778638401957, "loss": 3.7589, "step": 40865 }, { "epoch": 2.7768718575893465, "grad_norm": 1.1851211786270142, "learning_rate": 0.000653035398831363, "loss": 3.4887, "step": 40870 }, { "epoch": 2.777211577660008, "grad_norm": 1.7417054176330566, "learning_rate": 0.0006529929338225302, "loss": 3.6037, "step": 40875 }, { "epoch": 2.7775512977306698, "grad_norm": 1.4924030303955078, "learning_rate": 0.0006529504688136976, "loss": 3.5516, "step": 40880 }, { "epoch": 2.777891017801332, "grad_norm": 1.6751817464828491, "learning_rate": 0.0006529080038048648, "loss": 3.2493, "step": 40885 }, { "epoch": 2.7782307378719935, "grad_norm": 1.5672595500946045, "learning_rate": 0.0006528655387960321, "loss": 3.6228, "step": 40890 }, { "epoch": 2.778570457942655, "grad_norm": 1.1840927600860596, "learning_rate": 0.0006528230737871994, "loss": 3.6753, "step": 40895 }, { "epoch": 2.778910178013317, "grad_norm": 1.3593019247055054, "learning_rate": 0.0006527806087783666, "loss": 3.1274, "step": 40900 }, { "epoch": 2.779249898083979, "grad_norm": 1.3193100690841675, "learning_rate": 0.0006527381437695339, "loss": 3.6424, "step": 40905 }, { "epoch": 2.7795896181546405, "grad_norm": 1.0598087310791016, "learning_rate": 0.0006526956787607012, "loss": 3.5279, "step": 40910 }, { "epoch": 2.779929338225302, "grad_norm": 1.832696557044983, "learning_rate": 0.0006526532137518685, "loss": 3.533, "step": 40915 }, { "epoch": 2.780269058295964, "grad_norm": 1.4428324699401855, "learning_rate": 0.0006526107487430358, "loss": 3.1377, "step": 40920 }, { "epoch": 2.780608778366626, "grad_norm": 1.3776966333389282, "learning_rate": 0.0006525682837342031, "loss": 3.6467, "step": 40925 }, { "epoch": 2.7809484984372874, "grad_norm": 1.2215338945388794, "learning_rate": 0.0006525258187253703, "loss": 3.663, "step": 40930 }, { "epoch": 2.7812882185079495, "grad_norm": 1.2966285943984985, "learning_rate": 0.0006524833537165375, "loss": 3.6201, "step": 40935 }, { "epoch": 2.781627938578611, "grad_norm": 1.043588638305664, "learning_rate": 0.0006524408887077049, "loss": 3.6153, "step": 40940 }, { "epoch": 2.7819676586492728, "grad_norm": 1.4493777751922607, "learning_rate": 0.0006523984236988721, "loss": 3.2416, "step": 40945 }, { "epoch": 2.782307378719935, "grad_norm": 1.9931730031967163, "learning_rate": 0.0006523559586900394, "loss": 3.7147, "step": 40950 }, { "epoch": 2.7826470987905965, "grad_norm": 1.18448007106781, "learning_rate": 0.0006523134936812068, "loss": 3.5285, "step": 40955 }, { "epoch": 2.782986818861258, "grad_norm": 1.2001686096191406, "learning_rate": 0.000652271028672374, "loss": 3.5315, "step": 40960 }, { "epoch": 2.78332653893192, "grad_norm": 1.4512097835540771, "learning_rate": 0.0006522285636635412, "loss": 3.4683, "step": 40965 }, { "epoch": 2.783666259002582, "grad_norm": 0.9563106298446655, "learning_rate": 0.0006521860986547086, "loss": 3.3954, "step": 40970 }, { "epoch": 2.7840059790732434, "grad_norm": 1.192248821258545, "learning_rate": 0.0006521436336458758, "loss": 3.5637, "step": 40975 }, { "epoch": 2.7843456991439055, "grad_norm": 1.3404330015182495, "learning_rate": 0.000652101168637043, "loss": 3.2605, "step": 40980 }, { "epoch": 2.784685419214567, "grad_norm": 1.1449562311172485, "learning_rate": 0.0006520587036282104, "loss": 3.6095, "step": 40985 }, { "epoch": 2.7850251392852288, "grad_norm": 1.3902069330215454, "learning_rate": 0.0006520162386193777, "loss": 3.4851, "step": 40990 }, { "epoch": 2.785364859355891, "grad_norm": 1.3975579738616943, "learning_rate": 0.0006519737736105449, "loss": 3.7251, "step": 40995 }, { "epoch": 2.7857045794265525, "grad_norm": 1.6288989782333374, "learning_rate": 0.0006519313086017122, "loss": 3.3402, "step": 41000 }, { "epoch": 2.786044299497214, "grad_norm": 1.1631875038146973, "learning_rate": 0.0006518888435928795, "loss": 3.8808, "step": 41005 }, { "epoch": 2.786384019567876, "grad_norm": 1.2766172885894775, "learning_rate": 0.0006518463785840467, "loss": 3.6581, "step": 41010 }, { "epoch": 2.786723739638538, "grad_norm": 1.1326518058776855, "learning_rate": 0.000651803913575214, "loss": 3.7033, "step": 41015 }, { "epoch": 2.7870634597091994, "grad_norm": 1.2942001819610596, "learning_rate": 0.0006517614485663814, "loss": 3.4867, "step": 41020 }, { "epoch": 2.7874031797798615, "grad_norm": 1.2123888731002808, "learning_rate": 0.0006517189835575486, "loss": 3.6749, "step": 41025 }, { "epoch": 2.787742899850523, "grad_norm": 1.6320186853408813, "learning_rate": 0.0006516765185487159, "loss": 3.2579, "step": 41030 }, { "epoch": 2.788082619921185, "grad_norm": 1.3646836280822754, "learning_rate": 0.0006516340535398831, "loss": 3.7434, "step": 41035 }, { "epoch": 2.788422339991847, "grad_norm": 1.3440982103347778, "learning_rate": 0.0006515915885310504, "loss": 3.5763, "step": 41040 }, { "epoch": 2.7887620600625085, "grad_norm": 1.235756516456604, "learning_rate": 0.0006515491235222177, "loss": 3.0897, "step": 41045 }, { "epoch": 2.78910178013317, "grad_norm": 1.338692307472229, "learning_rate": 0.0006515066585133849, "loss": 3.2615, "step": 41050 }, { "epoch": 2.789441500203832, "grad_norm": 1.2799146175384521, "learning_rate": 0.0006514641935045523, "loss": 3.6741, "step": 41055 }, { "epoch": 2.789781220274494, "grad_norm": 1.1418100595474243, "learning_rate": 0.0006514217284957196, "loss": 3.6889, "step": 41060 }, { "epoch": 2.7901209403451555, "grad_norm": 1.1010605096817017, "learning_rate": 0.0006513792634868868, "loss": 3.5128, "step": 41065 }, { "epoch": 2.7904606604158175, "grad_norm": 1.144650936126709, "learning_rate": 0.000651336798478054, "loss": 3.506, "step": 41070 }, { "epoch": 2.790800380486479, "grad_norm": 1.514557957649231, "learning_rate": 0.0006512943334692214, "loss": 3.4191, "step": 41075 }, { "epoch": 2.791140100557141, "grad_norm": 1.3270083665847778, "learning_rate": 0.0006512518684603886, "loss": 3.5148, "step": 41080 }, { "epoch": 2.791479820627803, "grad_norm": 1.9860293865203857, "learning_rate": 0.0006512094034515558, "loss": 3.4618, "step": 41085 }, { "epoch": 2.7918195406984645, "grad_norm": 1.1208226680755615, "learning_rate": 0.0006511669384427233, "loss": 3.482, "step": 41090 }, { "epoch": 2.792159260769126, "grad_norm": 1.3676133155822754, "learning_rate": 0.0006511244734338905, "loss": 3.6493, "step": 41095 }, { "epoch": 2.792498980839788, "grad_norm": 1.6766915321350098, "learning_rate": 0.0006510820084250577, "loss": 3.3552, "step": 41100 }, { "epoch": 2.79283870091045, "grad_norm": 1.158030390739441, "learning_rate": 0.0006510395434162251, "loss": 3.5883, "step": 41105 }, { "epoch": 2.7931784209811115, "grad_norm": 1.1948574781417847, "learning_rate": 0.0006509970784073923, "loss": 3.5168, "step": 41110 }, { "epoch": 2.7935181410517735, "grad_norm": 1.1855534315109253, "learning_rate": 0.0006509546133985595, "loss": 3.7195, "step": 41115 }, { "epoch": 2.793857861122435, "grad_norm": 1.1843522787094116, "learning_rate": 0.0006509121483897268, "loss": 3.4707, "step": 41120 }, { "epoch": 2.794197581193097, "grad_norm": 1.0915719270706177, "learning_rate": 0.0006508696833808942, "loss": 3.5677, "step": 41125 }, { "epoch": 2.794537301263759, "grad_norm": 1.2522072792053223, "learning_rate": 0.0006508272183720614, "loss": 3.1848, "step": 41130 }, { "epoch": 2.7948770213344205, "grad_norm": 1.5780901908874512, "learning_rate": 0.0006507847533632287, "loss": 3.4428, "step": 41135 }, { "epoch": 2.795216741405082, "grad_norm": 1.3404488563537598, "learning_rate": 0.000650742288354396, "loss": 3.3305, "step": 41140 }, { "epoch": 2.795556461475744, "grad_norm": 1.8638509511947632, "learning_rate": 0.0006506998233455633, "loss": 3.2778, "step": 41145 }, { "epoch": 2.795896181546406, "grad_norm": 1.240408182144165, "learning_rate": 0.0006506573583367305, "loss": 3.4985, "step": 41150 }, { "epoch": 2.7962359016170675, "grad_norm": 1.7249555587768555, "learning_rate": 0.0006506148933278978, "loss": 3.3754, "step": 41155 }, { "epoch": 2.7965756216877296, "grad_norm": 1.2122136354446411, "learning_rate": 0.0006505724283190652, "loss": 3.4315, "step": 41160 }, { "epoch": 2.796915341758391, "grad_norm": 2.248418092727661, "learning_rate": 0.0006505299633102324, "loss": 3.5705, "step": 41165 }, { "epoch": 2.797255061829053, "grad_norm": 1.2031059265136719, "learning_rate": 0.0006504874983013996, "loss": 3.5452, "step": 41170 }, { "epoch": 2.797594781899715, "grad_norm": 1.1319642066955566, "learning_rate": 0.000650445033292567, "loss": 3.4392, "step": 41175 }, { "epoch": 2.7979345019703765, "grad_norm": 1.5157344341278076, "learning_rate": 0.0006504025682837342, "loss": 3.6947, "step": 41180 }, { "epoch": 2.798274222041038, "grad_norm": 1.2821751832962036, "learning_rate": 0.0006503601032749014, "loss": 3.3982, "step": 41185 }, { "epoch": 2.7986139421117002, "grad_norm": 1.0660369396209717, "learning_rate": 0.0006503176382660688, "loss": 3.5796, "step": 41190 }, { "epoch": 2.798953662182362, "grad_norm": 1.17893648147583, "learning_rate": 0.0006502751732572361, "loss": 3.7667, "step": 41195 }, { "epoch": 2.7992933822530235, "grad_norm": 1.3248491287231445, "learning_rate": 0.0006502327082484033, "loss": 3.4712, "step": 41200 }, { "epoch": 2.799633102323685, "grad_norm": 1.1585966348648071, "learning_rate": 0.0006501902432395707, "loss": 3.5667, "step": 41205 }, { "epoch": 2.799972822394347, "grad_norm": 1.229562759399414, "learning_rate": 0.0006501477782307379, "loss": 3.6093, "step": 41210 }, { "epoch": 2.800312542465009, "grad_norm": 1.6896390914916992, "learning_rate": 0.0006501053132219051, "loss": 3.3384, "step": 41215 }, { "epoch": 2.8006522625356705, "grad_norm": 2.75342059135437, "learning_rate": 0.0006500628482130724, "loss": 3.3501, "step": 41220 }, { "epoch": 2.8009919826063325, "grad_norm": 1.1807262897491455, "learning_rate": 0.0006500203832042397, "loss": 3.4754, "step": 41225 }, { "epoch": 2.801331702676994, "grad_norm": 1.5117628574371338, "learning_rate": 0.000649977918195407, "loss": 3.477, "step": 41230 }, { "epoch": 2.801671422747656, "grad_norm": 1.3199800252914429, "learning_rate": 0.0006499354531865743, "loss": 3.4574, "step": 41235 }, { "epoch": 2.802011142818318, "grad_norm": 1.5656083822250366, "learning_rate": 0.0006498929881777416, "loss": 3.4616, "step": 41240 }, { "epoch": 2.8023508628889795, "grad_norm": 2.4468255043029785, "learning_rate": 0.0006498505231689088, "loss": 3.4039, "step": 41245 }, { "epoch": 2.802690582959641, "grad_norm": 1.2356762886047363, "learning_rate": 0.0006498080581600761, "loss": 3.4098, "step": 41250 }, { "epoch": 2.8030303030303028, "grad_norm": 1.2070261240005493, "learning_rate": 0.0006497655931512434, "loss": 3.4846, "step": 41255 }, { "epoch": 2.803370023100965, "grad_norm": 1.338067650794983, "learning_rate": 0.0006497231281424106, "loss": 3.8946, "step": 41260 }, { "epoch": 2.8037097431716265, "grad_norm": 1.300917148590088, "learning_rate": 0.000649680663133578, "loss": 3.5086, "step": 41265 }, { "epoch": 2.804049463242288, "grad_norm": 1.160622477531433, "learning_rate": 0.0006496381981247453, "loss": 3.3344, "step": 41270 }, { "epoch": 2.80438918331295, "grad_norm": 1.2466540336608887, "learning_rate": 0.0006495957331159125, "loss": 3.5124, "step": 41275 }, { "epoch": 2.804728903383612, "grad_norm": 2.191422700881958, "learning_rate": 0.0006495532681070798, "loss": 3.5463, "step": 41280 }, { "epoch": 2.8050686234542734, "grad_norm": 1.4172961711883545, "learning_rate": 0.000649510803098247, "loss": 3.4606, "step": 41285 }, { "epoch": 2.8054083435249355, "grad_norm": 1.563240885734558, "learning_rate": 0.0006494683380894143, "loss": 3.6712, "step": 41290 }, { "epoch": 2.805748063595597, "grad_norm": 1.6048262119293213, "learning_rate": 0.0006494258730805817, "loss": 3.1648, "step": 41295 }, { "epoch": 2.8060877836662588, "grad_norm": 1.1455016136169434, "learning_rate": 0.0006493834080717489, "loss": 3.5926, "step": 41300 }, { "epoch": 2.806427503736921, "grad_norm": 1.1204266548156738, "learning_rate": 0.0006493409430629162, "loss": 3.6304, "step": 41305 }, { "epoch": 2.8067672238075825, "grad_norm": 1.1848649978637695, "learning_rate": 0.0006492984780540835, "loss": 3.3861, "step": 41310 }, { "epoch": 2.807106943878244, "grad_norm": 2.6947054862976074, "learning_rate": 0.0006492560130452507, "loss": 3.4374, "step": 41315 }, { "epoch": 2.807446663948906, "grad_norm": 1.4816253185272217, "learning_rate": 0.0006492135480364179, "loss": 3.401, "step": 41320 }, { "epoch": 2.807786384019568, "grad_norm": 1.156017541885376, "learning_rate": 0.0006491710830275853, "loss": 3.603, "step": 41325 }, { "epoch": 2.8081261040902294, "grad_norm": 1.2140495777130127, "learning_rate": 0.0006491286180187526, "loss": 3.3782, "step": 41330 }, { "epoch": 2.8084658241608915, "grad_norm": 1.4910732507705688, "learning_rate": 0.0006490861530099198, "loss": 3.5114, "step": 41335 }, { "epoch": 2.808805544231553, "grad_norm": 1.2306660413742065, "learning_rate": 0.0006490436880010872, "loss": 3.5542, "step": 41340 }, { "epoch": 2.809145264302215, "grad_norm": 1.1195498704910278, "learning_rate": 0.0006490012229922544, "loss": 3.5939, "step": 41345 }, { "epoch": 2.809484984372877, "grad_norm": 1.5117372274398804, "learning_rate": 0.0006489587579834216, "loss": 3.3492, "step": 41350 }, { "epoch": 2.8098247044435385, "grad_norm": 1.7290802001953125, "learning_rate": 0.000648916292974589, "loss": 3.6392, "step": 41355 }, { "epoch": 2.8101644245142, "grad_norm": 1.5030463933944702, "learning_rate": 0.0006488738279657562, "loss": 3.4854, "step": 41360 }, { "epoch": 2.810504144584862, "grad_norm": 2.3643546104431152, "learning_rate": 0.0006488313629569235, "loss": 3.5133, "step": 41365 }, { "epoch": 2.810843864655524, "grad_norm": 1.4966785907745361, "learning_rate": 0.0006487888979480909, "loss": 3.3929, "step": 41370 }, { "epoch": 2.8111835847261855, "grad_norm": 1.5071364641189575, "learning_rate": 0.0006487464329392581, "loss": 3.4664, "step": 41375 }, { "epoch": 2.8115233047968475, "grad_norm": 1.1453667879104614, "learning_rate": 0.0006487039679304253, "loss": 3.6309, "step": 41380 }, { "epoch": 2.811863024867509, "grad_norm": 1.3361210823059082, "learning_rate": 0.0006486615029215926, "loss": 3.6899, "step": 41385 }, { "epoch": 2.812202744938171, "grad_norm": 1.11631178855896, "learning_rate": 0.0006486190379127599, "loss": 3.6487, "step": 41390 }, { "epoch": 2.812542465008833, "grad_norm": 1.1800073385238647, "learning_rate": 0.0006485765729039271, "loss": 3.6392, "step": 41395 }, { "epoch": 2.8128821850794945, "grad_norm": 1.1269842386245728, "learning_rate": 0.0006485341078950945, "loss": 3.6298, "step": 41400 }, { "epoch": 2.813221905150156, "grad_norm": 1.2096495628356934, "learning_rate": 0.0006484916428862618, "loss": 3.5048, "step": 41405 }, { "epoch": 2.813561625220818, "grad_norm": 1.118817925453186, "learning_rate": 0.000648449177877429, "loss": 3.6202, "step": 41410 }, { "epoch": 2.81390134529148, "grad_norm": 1.427557110786438, "learning_rate": 0.0006484067128685963, "loss": 3.6552, "step": 41415 }, { "epoch": 2.8142410653621415, "grad_norm": 1.8156085014343262, "learning_rate": 0.0006483642478597635, "loss": 3.7212, "step": 41420 }, { "epoch": 2.8145807854328035, "grad_norm": 1.2555748224258423, "learning_rate": 0.0006483217828509308, "loss": 3.4876, "step": 41425 }, { "epoch": 2.814920505503465, "grad_norm": 1.4415884017944336, "learning_rate": 0.0006482793178420981, "loss": 3.4561, "step": 41430 }, { "epoch": 2.815260225574127, "grad_norm": 1.2571607828140259, "learning_rate": 0.0006482368528332654, "loss": 3.4358, "step": 41435 }, { "epoch": 2.815599945644789, "grad_norm": 1.5676084756851196, "learning_rate": 0.0006481943878244327, "loss": 3.4937, "step": 41440 }, { "epoch": 2.8159396657154505, "grad_norm": 1.7819955348968506, "learning_rate": 0.0006481519228156, "loss": 3.5852, "step": 41445 }, { "epoch": 2.816279385786112, "grad_norm": 1.3002924919128418, "learning_rate": 0.0006481094578067672, "loss": 3.8293, "step": 41450 }, { "epoch": 2.816619105856774, "grad_norm": 1.3944910764694214, "learning_rate": 0.0006480669927979345, "loss": 3.5476, "step": 41455 }, { "epoch": 2.816958825927436, "grad_norm": 1.232418179512024, "learning_rate": 0.0006480245277891018, "loss": 3.7639, "step": 41460 }, { "epoch": 2.8172985459980975, "grad_norm": 1.3825358152389526, "learning_rate": 0.000647982062780269, "loss": 3.4801, "step": 41465 }, { "epoch": 2.8176382660687596, "grad_norm": 1.2336061000823975, "learning_rate": 0.0006479395977714363, "loss": 3.4848, "step": 41470 }, { "epoch": 2.817977986139421, "grad_norm": 0.9937402606010437, "learning_rate": 0.0006478971327626037, "loss": 3.7324, "step": 41475 }, { "epoch": 2.818317706210083, "grad_norm": 1.1673803329467773, "learning_rate": 0.0006478546677537709, "loss": 3.6144, "step": 41480 }, { "epoch": 2.818657426280745, "grad_norm": 1.4785079956054688, "learning_rate": 0.0006478122027449382, "loss": 3.6913, "step": 41485 }, { "epoch": 2.8189971463514065, "grad_norm": 1.3883084058761597, "learning_rate": 0.0006477697377361055, "loss": 3.514, "step": 41490 }, { "epoch": 2.819336866422068, "grad_norm": 1.1978588104248047, "learning_rate": 0.0006477272727272727, "loss": 3.6786, "step": 41495 }, { "epoch": 2.8196765864927302, "grad_norm": 1.3232163190841675, "learning_rate": 0.00064768480771844, "loss": 3.4692, "step": 41500 }, { "epoch": 2.820016306563392, "grad_norm": 1.2521170377731323, "learning_rate": 0.0006476423427096074, "loss": 3.6725, "step": 41505 }, { "epoch": 2.8203560266340535, "grad_norm": 1.2714592218399048, "learning_rate": 0.0006475998777007746, "loss": 3.5839, "step": 41510 }, { "epoch": 2.8206957467047156, "grad_norm": 1.4000681638717651, "learning_rate": 0.0006475574126919419, "loss": 3.5624, "step": 41515 }, { "epoch": 2.821035466775377, "grad_norm": 1.4634582996368408, "learning_rate": 0.0006475149476831091, "loss": 3.6453, "step": 41520 }, { "epoch": 2.821375186846039, "grad_norm": 1.1321817636489868, "learning_rate": 0.0006474724826742764, "loss": 3.5056, "step": 41525 }, { "epoch": 2.821714906916701, "grad_norm": 1.0792146921157837, "learning_rate": 0.0006474300176654437, "loss": 3.6239, "step": 41530 }, { "epoch": 2.8220546269873625, "grad_norm": 1.0308797359466553, "learning_rate": 0.0006473875526566109, "loss": 3.7256, "step": 41535 }, { "epoch": 2.822394347058024, "grad_norm": 1.2691985368728638, "learning_rate": 0.0006473450876477783, "loss": 3.4158, "step": 41540 }, { "epoch": 2.822734067128686, "grad_norm": 0.965430498123169, "learning_rate": 0.0006473026226389456, "loss": 3.3511, "step": 41545 }, { "epoch": 2.823073787199348, "grad_norm": 1.3905439376831055, "learning_rate": 0.0006472601576301128, "loss": 3.5065, "step": 41550 }, { "epoch": 2.8234135072700095, "grad_norm": 1.4400497674942017, "learning_rate": 0.00064721769262128, "loss": 3.5768, "step": 41555 }, { "epoch": 2.823753227340671, "grad_norm": 1.1196482181549072, "learning_rate": 0.0006471752276124474, "loss": 3.6057, "step": 41560 }, { "epoch": 2.824092947411333, "grad_norm": 1.259393572807312, "learning_rate": 0.0006471327626036146, "loss": 3.9108, "step": 41565 }, { "epoch": 2.824432667481995, "grad_norm": 2.1324987411499023, "learning_rate": 0.0006470902975947818, "loss": 3.3796, "step": 41570 }, { "epoch": 2.8247723875526565, "grad_norm": 1.8277239799499512, "learning_rate": 0.0006470478325859493, "loss": 3.5781, "step": 41575 }, { "epoch": 2.8251121076233185, "grad_norm": 1.1269066333770752, "learning_rate": 0.0006470053675771165, "loss": 3.5869, "step": 41580 }, { "epoch": 2.82545182769398, "grad_norm": 1.4920934438705444, "learning_rate": 0.0006469629025682837, "loss": 3.4482, "step": 41585 }, { "epoch": 2.825791547764642, "grad_norm": 1.5496854782104492, "learning_rate": 0.0006469204375594511, "loss": 3.6908, "step": 41590 }, { "epoch": 2.8261312678353034, "grad_norm": 1.3686226606369019, "learning_rate": 0.0006468779725506183, "loss": 3.4323, "step": 41595 }, { "epoch": 2.8264709879059655, "grad_norm": 1.2116516828536987, "learning_rate": 0.0006468355075417855, "loss": 3.7281, "step": 41600 }, { "epoch": 2.826810707976627, "grad_norm": 1.1143282651901245, "learning_rate": 0.0006467930425329529, "loss": 3.5487, "step": 41605 }, { "epoch": 2.8271504280472888, "grad_norm": 1.107736587524414, "learning_rate": 0.0006467505775241202, "loss": 3.4864, "step": 41610 }, { "epoch": 2.827490148117951, "grad_norm": 1.2043551206588745, "learning_rate": 0.0006467081125152874, "loss": 3.5406, "step": 41615 }, { "epoch": 2.8278298681886125, "grad_norm": 1.1508320569992065, "learning_rate": 0.0006466656475064547, "loss": 3.5096, "step": 41620 }, { "epoch": 2.828169588259274, "grad_norm": 1.4090098142623901, "learning_rate": 0.000646623182497622, "loss": 3.3193, "step": 41625 }, { "epoch": 2.828509308329936, "grad_norm": 1.1693936586380005, "learning_rate": 0.0006465807174887892, "loss": 3.7127, "step": 41630 }, { "epoch": 2.828849028400598, "grad_norm": 0.9782364368438721, "learning_rate": 0.0006465382524799565, "loss": 3.3963, "step": 41635 }, { "epoch": 2.8291887484712595, "grad_norm": 1.0785913467407227, "learning_rate": 0.0006464957874711238, "loss": 3.526, "step": 41640 }, { "epoch": 2.8295284685419215, "grad_norm": 1.3398175239562988, "learning_rate": 0.0006464533224622911, "loss": 3.5786, "step": 41645 }, { "epoch": 2.829868188612583, "grad_norm": 1.3278295993804932, "learning_rate": 0.0006464108574534584, "loss": 3.5779, "step": 41650 }, { "epoch": 2.830207908683245, "grad_norm": 1.315398097038269, "learning_rate": 0.0006463683924446257, "loss": 3.6011, "step": 41655 }, { "epoch": 2.830547628753907, "grad_norm": 3.464562177658081, "learning_rate": 0.0006463259274357929, "loss": 3.4548, "step": 41660 }, { "epoch": 2.8308873488245685, "grad_norm": 1.2886323928833008, "learning_rate": 0.0006462834624269602, "loss": 3.3109, "step": 41665 }, { "epoch": 2.83122706889523, "grad_norm": 1.5742709636688232, "learning_rate": 0.0006462409974181274, "loss": 3.7388, "step": 41670 }, { "epoch": 2.831566788965892, "grad_norm": 1.3715834617614746, "learning_rate": 0.0006461985324092947, "loss": 3.4787, "step": 41675 }, { "epoch": 2.831906509036554, "grad_norm": 1.3953057527542114, "learning_rate": 0.0006461560674004621, "loss": 3.6346, "step": 41680 }, { "epoch": 2.8322462291072155, "grad_norm": 1.7126717567443848, "learning_rate": 0.0006461136023916293, "loss": 3.5868, "step": 41685 }, { "epoch": 2.8325859491778775, "grad_norm": 1.2944397926330566, "learning_rate": 0.0006460711373827966, "loss": 3.6557, "step": 41690 }, { "epoch": 2.832925669248539, "grad_norm": 1.2741248607635498, "learning_rate": 0.0006460286723739639, "loss": 3.6804, "step": 41695 }, { "epoch": 2.833265389319201, "grad_norm": 2.799363374710083, "learning_rate": 0.0006459862073651311, "loss": 3.5219, "step": 41700 }, { "epoch": 2.833605109389863, "grad_norm": 1.16214919090271, "learning_rate": 0.0006459437423562983, "loss": 3.6256, "step": 41705 }, { "epoch": 2.8339448294605245, "grad_norm": 1.3863625526428223, "learning_rate": 0.0006459012773474657, "loss": 3.7131, "step": 41710 }, { "epoch": 2.834284549531186, "grad_norm": 1.4142109155654907, "learning_rate": 0.000645858812338633, "loss": 3.452, "step": 41715 }, { "epoch": 2.834624269601848, "grad_norm": 1.0129988193511963, "learning_rate": 0.0006458163473298002, "loss": 3.4869, "step": 41720 }, { "epoch": 2.83496398967251, "grad_norm": 1.0705904960632324, "learning_rate": 0.0006457738823209676, "loss": 3.6683, "step": 41725 }, { "epoch": 2.8353037097431715, "grad_norm": 1.2597919702529907, "learning_rate": 0.0006457314173121348, "loss": 3.7282, "step": 41730 }, { "epoch": 2.8356434298138335, "grad_norm": 1.6072068214416504, "learning_rate": 0.000645688952303302, "loss": 3.3699, "step": 41735 }, { "epoch": 2.835983149884495, "grad_norm": 1.1774343252182007, "learning_rate": 0.0006456464872944694, "loss": 3.5999, "step": 41740 }, { "epoch": 2.836322869955157, "grad_norm": 1.6215561628341675, "learning_rate": 0.0006456040222856366, "loss": 3.7303, "step": 41745 }, { "epoch": 2.836662590025819, "grad_norm": 1.1163173913955688, "learning_rate": 0.0006455615572768039, "loss": 3.5436, "step": 41750 }, { "epoch": 2.8370023100964805, "grad_norm": 1.1322407722473145, "learning_rate": 0.0006455190922679713, "loss": 3.394, "step": 41755 }, { "epoch": 2.837342030167142, "grad_norm": 1.7745270729064941, "learning_rate": 0.0006454766272591385, "loss": 3.8228, "step": 41760 }, { "epoch": 2.8376817502378042, "grad_norm": 1.340967059135437, "learning_rate": 0.0006454341622503057, "loss": 3.472, "step": 41765 }, { "epoch": 2.838021470308466, "grad_norm": 1.4526152610778809, "learning_rate": 0.000645391697241473, "loss": 3.6443, "step": 41770 }, { "epoch": 2.8383611903791275, "grad_norm": 1.200362205505371, "learning_rate": 0.0006453492322326403, "loss": 3.6319, "step": 41775 }, { "epoch": 2.8387009104497896, "grad_norm": 1.902900218963623, "learning_rate": 0.0006453067672238075, "loss": 3.6319, "step": 41780 }, { "epoch": 2.839040630520451, "grad_norm": 2.233109951019287, "learning_rate": 0.0006452643022149749, "loss": 3.649, "step": 41785 }, { "epoch": 2.839380350591113, "grad_norm": 1.129194736480713, "learning_rate": 0.0006452218372061422, "loss": 3.508, "step": 41790 }, { "epoch": 2.839720070661775, "grad_norm": 1.3206101655960083, "learning_rate": 0.0006451793721973094, "loss": 3.6132, "step": 41795 }, { "epoch": 2.8400597907324365, "grad_norm": 1.3709841966629028, "learning_rate": 0.0006451369071884767, "loss": 3.3154, "step": 41800 }, { "epoch": 2.840399510803098, "grad_norm": 1.3875755071640015, "learning_rate": 0.000645094442179644, "loss": 3.8187, "step": 41805 }, { "epoch": 2.8407392308737602, "grad_norm": 1.5083668231964111, "learning_rate": 0.0006450519771708112, "loss": 3.7407, "step": 41810 }, { "epoch": 2.841078950944422, "grad_norm": 1.0586695671081543, "learning_rate": 0.0006450095121619785, "loss": 3.8131, "step": 41815 }, { "epoch": 2.8414186710150835, "grad_norm": 1.6087539196014404, "learning_rate": 0.0006449670471531458, "loss": 3.2328, "step": 41820 }, { "epoch": 2.8417583910857456, "grad_norm": 1.3139195442199707, "learning_rate": 0.0006449245821443132, "loss": 3.4822, "step": 41825 }, { "epoch": 2.842098111156407, "grad_norm": 1.3868352174758911, "learning_rate": 0.0006448821171354804, "loss": 3.3088, "step": 41830 }, { "epoch": 2.842437831227069, "grad_norm": 1.1824203729629517, "learning_rate": 0.0006448396521266476, "loss": 3.4637, "step": 41835 }, { "epoch": 2.842777551297731, "grad_norm": 1.1879023313522339, "learning_rate": 0.000644797187117815, "loss": 3.6386, "step": 41840 }, { "epoch": 2.8431172713683925, "grad_norm": 1.4471176862716675, "learning_rate": 0.0006447547221089822, "loss": 3.5397, "step": 41845 }, { "epoch": 2.843456991439054, "grad_norm": 3.634300708770752, "learning_rate": 0.0006447122571001494, "loss": 3.4198, "step": 41850 }, { "epoch": 2.8437967115097162, "grad_norm": 1.226699709892273, "learning_rate": 0.0006446697920913169, "loss": 3.5819, "step": 41855 }, { "epoch": 2.844136431580378, "grad_norm": 1.2398626804351807, "learning_rate": 0.0006446273270824841, "loss": 3.1057, "step": 41860 }, { "epoch": 2.8444761516510395, "grad_norm": 1.2723075151443481, "learning_rate": 0.0006445848620736513, "loss": 3.3305, "step": 41865 }, { "epoch": 2.8448158717217016, "grad_norm": 1.607788324356079, "learning_rate": 0.0006445423970648186, "loss": 3.4743, "step": 41870 }, { "epoch": 2.845155591792363, "grad_norm": 1.386177659034729, "learning_rate": 0.0006444999320559859, "loss": 3.3436, "step": 41875 }, { "epoch": 2.845495311863025, "grad_norm": 1.3042763471603394, "learning_rate": 0.0006444574670471531, "loss": 3.3603, "step": 41880 }, { "epoch": 2.8458350319336865, "grad_norm": 1.2125928401947021, "learning_rate": 0.0006444150020383205, "loss": 3.4596, "step": 41885 }, { "epoch": 2.8461747520043486, "grad_norm": 1.396418571472168, "learning_rate": 0.0006443725370294878, "loss": 3.704, "step": 41890 }, { "epoch": 2.84651447207501, "grad_norm": 1.9163765907287598, "learning_rate": 0.000644330072020655, "loss": 3.6963, "step": 41895 }, { "epoch": 2.846854192145672, "grad_norm": 1.4214088916778564, "learning_rate": 0.0006442876070118223, "loss": 3.5293, "step": 41900 }, { "epoch": 2.847193912216334, "grad_norm": 1.1017696857452393, "learning_rate": 0.0006442451420029895, "loss": 3.4899, "step": 41905 }, { "epoch": 2.8475336322869955, "grad_norm": 1.3072160482406616, "learning_rate": 0.0006442026769941568, "loss": 3.4705, "step": 41910 }, { "epoch": 2.847873352357657, "grad_norm": 1.3729263544082642, "learning_rate": 0.0006441602119853241, "loss": 3.6819, "step": 41915 }, { "epoch": 2.8482130724283192, "grad_norm": 1.2714629173278809, "learning_rate": 0.0006441177469764914, "loss": 3.8974, "step": 41920 }, { "epoch": 2.848552792498981, "grad_norm": 1.24061119556427, "learning_rate": 0.0006440752819676587, "loss": 3.5242, "step": 41925 }, { "epoch": 2.8488925125696425, "grad_norm": 1.573043942451477, "learning_rate": 0.000644032816958826, "loss": 3.5965, "step": 41930 }, { "epoch": 2.849232232640304, "grad_norm": 1.3978174924850464, "learning_rate": 0.0006439903519499932, "loss": 3.4591, "step": 41935 }, { "epoch": 2.849571952710966, "grad_norm": 1.480808973312378, "learning_rate": 0.0006439478869411605, "loss": 3.4554, "step": 41940 }, { "epoch": 2.849911672781628, "grad_norm": 1.2465324401855469, "learning_rate": 0.0006439054219323278, "loss": 3.4606, "step": 41945 }, { "epoch": 2.8502513928522895, "grad_norm": 1.8333491086959839, "learning_rate": 0.000643862956923495, "loss": 3.4321, "step": 41950 }, { "epoch": 2.8505911129229515, "grad_norm": 1.1146098375320435, "learning_rate": 0.0006438204919146624, "loss": 3.5423, "step": 41955 }, { "epoch": 2.850930832993613, "grad_norm": 1.4676907062530518, "learning_rate": 0.0006437780269058297, "loss": 3.6794, "step": 41960 }, { "epoch": 2.851270553064275, "grad_norm": 1.386872410774231, "learning_rate": 0.0006437355618969969, "loss": 3.478, "step": 41965 }, { "epoch": 2.851610273134937, "grad_norm": 0.9704950451850891, "learning_rate": 0.0006436930968881641, "loss": 3.6666, "step": 41970 }, { "epoch": 2.8519499932055985, "grad_norm": 1.2809343338012695, "learning_rate": 0.0006436506318793315, "loss": 3.6291, "step": 41975 }, { "epoch": 2.85228971327626, "grad_norm": 1.0690516233444214, "learning_rate": 0.0006436081668704987, "loss": 3.217, "step": 41980 }, { "epoch": 2.852629433346922, "grad_norm": 1.5613218545913696, "learning_rate": 0.0006435657018616659, "loss": 3.6882, "step": 41985 }, { "epoch": 2.852969153417584, "grad_norm": 1.8479543924331665, "learning_rate": 0.0006435232368528334, "loss": 3.4185, "step": 41990 }, { "epoch": 2.8533088734882455, "grad_norm": 1.4126445055007935, "learning_rate": 0.0006434807718440006, "loss": 3.6226, "step": 41995 }, { "epoch": 2.8536485935589075, "grad_norm": 1.0001646280288696, "learning_rate": 0.0006434383068351678, "loss": 3.7539, "step": 42000 }, { "epoch": 2.853988313629569, "grad_norm": 0.9831754565238953, "learning_rate": 0.0006433958418263352, "loss": 3.5996, "step": 42005 }, { "epoch": 2.854328033700231, "grad_norm": 1.5051827430725098, "learning_rate": 0.0006433533768175024, "loss": 3.1669, "step": 42010 }, { "epoch": 2.854667753770893, "grad_norm": 1.344814419746399, "learning_rate": 0.0006433109118086696, "loss": 3.5273, "step": 42015 }, { "epoch": 2.8550074738415545, "grad_norm": 7.2944416999816895, "learning_rate": 0.0006432684467998369, "loss": 3.6669, "step": 42020 }, { "epoch": 2.855347193912216, "grad_norm": 1.3712999820709229, "learning_rate": 0.0006432259817910043, "loss": 3.7841, "step": 42025 }, { "epoch": 2.855686913982878, "grad_norm": 1.2418346405029297, "learning_rate": 0.0006431835167821715, "loss": 3.3107, "step": 42030 }, { "epoch": 2.85602663405354, "grad_norm": 1.3189661502838135, "learning_rate": 0.0006431410517733388, "loss": 3.2279, "step": 42035 }, { "epoch": 2.8563663541242015, "grad_norm": 1.2747724056243896, "learning_rate": 0.0006430985867645061, "loss": 3.5454, "step": 42040 }, { "epoch": 2.8567060741948636, "grad_norm": 1.3387978076934814, "learning_rate": 0.0006430561217556733, "loss": 3.7171, "step": 42045 }, { "epoch": 2.857045794265525, "grad_norm": 1.1831352710723877, "learning_rate": 0.0006430136567468406, "loss": 3.4439, "step": 42050 }, { "epoch": 2.857385514336187, "grad_norm": 1.3132967948913574, "learning_rate": 0.0006429711917380078, "loss": 3.5848, "step": 42055 }, { "epoch": 2.857725234406849, "grad_norm": 1.2201640605926514, "learning_rate": 0.0006429287267291752, "loss": 3.5041, "step": 42060 }, { "epoch": 2.8580649544775105, "grad_norm": 1.5986663103103638, "learning_rate": 0.0006428862617203425, "loss": 3.6253, "step": 42065 }, { "epoch": 2.858404674548172, "grad_norm": 1.5643627643585205, "learning_rate": 0.0006428437967115097, "loss": 3.5344, "step": 42070 }, { "epoch": 2.8587443946188342, "grad_norm": 2.1212947368621826, "learning_rate": 0.000642801331702677, "loss": 3.5025, "step": 42075 }, { "epoch": 2.859084114689496, "grad_norm": 1.48861825466156, "learning_rate": 0.0006427588666938443, "loss": 3.5319, "step": 42080 }, { "epoch": 2.8594238347601575, "grad_norm": 1.679157018661499, "learning_rate": 0.0006427164016850115, "loss": 3.3761, "step": 42085 }, { "epoch": 2.8597635548308196, "grad_norm": 1.1249089241027832, "learning_rate": 0.0006426739366761787, "loss": 3.5869, "step": 42090 }, { "epoch": 2.860103274901481, "grad_norm": 1.3629299402236938, "learning_rate": 0.0006426314716673462, "loss": 3.4845, "step": 42095 }, { "epoch": 2.860442994972143, "grad_norm": 1.3830231428146362, "learning_rate": 0.0006425890066585134, "loss": 3.5621, "step": 42100 }, { "epoch": 2.860782715042805, "grad_norm": 1.1218652725219727, "learning_rate": 0.0006425465416496806, "loss": 3.2323, "step": 42105 }, { "epoch": 2.8611224351134665, "grad_norm": 1.26720130443573, "learning_rate": 0.000642504076640848, "loss": 3.7909, "step": 42110 }, { "epoch": 2.861462155184128, "grad_norm": 1.4170265197753906, "learning_rate": 0.0006424616116320152, "loss": 3.5134, "step": 42115 }, { "epoch": 2.8618018752547902, "grad_norm": 6.874675273895264, "learning_rate": 0.0006424191466231824, "loss": 3.6232, "step": 42120 }, { "epoch": 2.862141595325452, "grad_norm": 1.2316075563430786, "learning_rate": 0.0006423766816143498, "loss": 3.5788, "step": 42125 }, { "epoch": 2.8624813153961135, "grad_norm": 1.3027806282043457, "learning_rate": 0.0006423342166055171, "loss": 3.5955, "step": 42130 }, { "epoch": 2.8628210354667756, "grad_norm": 1.542406678199768, "learning_rate": 0.0006422917515966843, "loss": 3.5777, "step": 42135 }, { "epoch": 2.863160755537437, "grad_norm": 1.5318810939788818, "learning_rate": 0.0006422492865878517, "loss": 3.1752, "step": 42140 }, { "epoch": 2.863500475608099, "grad_norm": 1.7692326307296753, "learning_rate": 0.0006422068215790189, "loss": 3.5167, "step": 42145 }, { "epoch": 2.863840195678761, "grad_norm": 1.7187966108322144, "learning_rate": 0.0006421643565701861, "loss": 3.437, "step": 42150 }, { "epoch": 2.8641799157494225, "grad_norm": 1.319646954536438, "learning_rate": 0.0006421218915613534, "loss": 3.6253, "step": 42155 }, { "epoch": 2.864519635820084, "grad_norm": 1.20946204662323, "learning_rate": 0.0006420794265525207, "loss": 3.4456, "step": 42160 }, { "epoch": 2.8648593558907463, "grad_norm": 1.2019014358520508, "learning_rate": 0.0006420369615436881, "loss": 3.2922, "step": 42165 }, { "epoch": 2.865199075961408, "grad_norm": 1.4609977006912231, "learning_rate": 0.0006419944965348553, "loss": 3.5204, "step": 42170 }, { "epoch": 2.8655387960320695, "grad_norm": 1.1038025617599487, "learning_rate": 0.0006419520315260226, "loss": 3.5056, "step": 42175 }, { "epoch": 2.8658785161027316, "grad_norm": 1.2815457582473755, "learning_rate": 0.0006419095665171899, "loss": 3.6593, "step": 42180 }, { "epoch": 2.866218236173393, "grad_norm": 1.2408849000930786, "learning_rate": 0.0006418671015083571, "loss": 3.2442, "step": 42185 }, { "epoch": 2.866557956244055, "grad_norm": 1.5215480327606201, "learning_rate": 0.0006418246364995244, "loss": 3.5364, "step": 42190 }, { "epoch": 2.866897676314717, "grad_norm": 1.4937330484390259, "learning_rate": 0.0006417821714906917, "loss": 3.5567, "step": 42195 }, { "epoch": 2.8672373963853786, "grad_norm": 1.6224828958511353, "learning_rate": 0.000641739706481859, "loss": 3.5105, "step": 42200 }, { "epoch": 2.86757711645604, "grad_norm": 1.4888546466827393, "learning_rate": 0.0006416972414730262, "loss": 3.4647, "step": 42205 }, { "epoch": 2.8679168365267023, "grad_norm": 1.4204341173171997, "learning_rate": 0.0006416547764641936, "loss": 3.5464, "step": 42210 }, { "epoch": 2.868256556597364, "grad_norm": 1.498420000076294, "learning_rate": 0.0006416123114553608, "loss": 3.4627, "step": 42215 }, { "epoch": 2.8685962766680255, "grad_norm": 1.4209169149398804, "learning_rate": 0.000641569846446528, "loss": 3.3055, "step": 42220 }, { "epoch": 2.868935996738687, "grad_norm": 1.3610239028930664, "learning_rate": 0.0006415273814376954, "loss": 3.6093, "step": 42225 }, { "epoch": 2.8692757168093492, "grad_norm": 1.65567946434021, "learning_rate": 0.0006414849164288626, "loss": 3.406, "step": 42230 }, { "epoch": 2.869615436880011, "grad_norm": 1.1972793340682983, "learning_rate": 0.0006414424514200299, "loss": 3.5309, "step": 42235 }, { "epoch": 2.8699551569506725, "grad_norm": 1.3480504751205444, "learning_rate": 0.0006413999864111973, "loss": 3.6188, "step": 42240 }, { "epoch": 2.8702948770213346, "grad_norm": 1.6629385948181152, "learning_rate": 0.0006413575214023645, "loss": 3.6519, "step": 42245 }, { "epoch": 2.870634597091996, "grad_norm": 1.2976957559585571, "learning_rate": 0.0006413150563935317, "loss": 3.4522, "step": 42250 }, { "epoch": 2.870974317162658, "grad_norm": 1.4366239309310913, "learning_rate": 0.000641272591384699, "loss": 3.6369, "step": 42255 }, { "epoch": 2.87131403723332, "grad_norm": 1.1843347549438477, "learning_rate": 0.0006412301263758663, "loss": 3.4665, "step": 42260 }, { "epoch": 2.8716537573039815, "grad_norm": 1.2660995721817017, "learning_rate": 0.0006411876613670335, "loss": 3.4597, "step": 42265 }, { "epoch": 2.871993477374643, "grad_norm": 1.648606300354004, "learning_rate": 0.0006411451963582009, "loss": 3.4019, "step": 42270 }, { "epoch": 2.872333197445305, "grad_norm": 1.3289839029312134, "learning_rate": 0.0006411027313493682, "loss": 3.6518, "step": 42275 }, { "epoch": 2.872672917515967, "grad_norm": 1.1309610605239868, "learning_rate": 0.0006410602663405354, "loss": 3.5784, "step": 42280 }, { "epoch": 2.8730126375866285, "grad_norm": 1.6523432731628418, "learning_rate": 0.0006410178013317027, "loss": 3.449, "step": 42285 }, { "epoch": 2.87335235765729, "grad_norm": 1.2916451692581177, "learning_rate": 0.00064097533632287, "loss": 3.4462, "step": 42290 }, { "epoch": 2.873692077727952, "grad_norm": 1.2760522365570068, "learning_rate": 0.0006409328713140372, "loss": 3.5622, "step": 42295 }, { "epoch": 2.874031797798614, "grad_norm": 1.2350510358810425, "learning_rate": 0.0006408904063052045, "loss": 3.6896, "step": 42300 }, { "epoch": 2.8743715178692755, "grad_norm": 1.4094699621200562, "learning_rate": 0.0006408479412963718, "loss": 3.5263, "step": 42305 }, { "epoch": 2.8747112379399375, "grad_norm": 1.4192026853561401, "learning_rate": 0.0006408054762875391, "loss": 3.5077, "step": 42310 }, { "epoch": 2.875050958010599, "grad_norm": 1.3551206588745117, "learning_rate": 0.0006407630112787064, "loss": 3.566, "step": 42315 }, { "epoch": 2.875390678081261, "grad_norm": 1.5490059852600098, "learning_rate": 0.0006407205462698736, "loss": 3.5603, "step": 42320 }, { "epoch": 2.875730398151923, "grad_norm": 1.1185131072998047, "learning_rate": 0.0006406780812610409, "loss": 3.4551, "step": 42325 }, { "epoch": 2.8760701182225845, "grad_norm": 1.2545192241668701, "learning_rate": 0.0006406356162522082, "loss": 3.5169, "step": 42330 }, { "epoch": 2.876409838293246, "grad_norm": 1.5073297023773193, "learning_rate": 0.0006405931512433754, "loss": 3.3708, "step": 42335 }, { "epoch": 2.876749558363908, "grad_norm": 1.1429128646850586, "learning_rate": 0.0006405506862345428, "loss": 3.5848, "step": 42340 }, { "epoch": 2.87708927843457, "grad_norm": 1.719088077545166, "learning_rate": 0.0006405082212257101, "loss": 3.327, "step": 42345 }, { "epoch": 2.8774289985052315, "grad_norm": 1.4150733947753906, "learning_rate": 0.0006404657562168773, "loss": 3.7275, "step": 42350 }, { "epoch": 2.8777687185758936, "grad_norm": 1.2134373188018799, "learning_rate": 0.0006404232912080445, "loss": 3.5154, "step": 42355 }, { "epoch": 2.878108438646555, "grad_norm": 1.111391544342041, "learning_rate": 0.0006403808261992119, "loss": 3.5923, "step": 42360 }, { "epoch": 2.878448158717217, "grad_norm": 1.1118199825286865, "learning_rate": 0.0006403383611903791, "loss": 3.4843, "step": 42365 }, { "epoch": 2.878787878787879, "grad_norm": 1.5240578651428223, "learning_rate": 0.0006402958961815463, "loss": 3.6012, "step": 42370 }, { "epoch": 2.8791275988585405, "grad_norm": 1.2773354053497314, "learning_rate": 0.0006402534311727138, "loss": 3.8245, "step": 42375 }, { "epoch": 2.879467318929202, "grad_norm": 1.2220485210418701, "learning_rate": 0.000640210966163881, "loss": 3.5722, "step": 42380 }, { "epoch": 2.8798070389998642, "grad_norm": 1.030526041984558, "learning_rate": 0.0006401685011550482, "loss": 3.3564, "step": 42385 }, { "epoch": 2.880146759070526, "grad_norm": 1.0690104961395264, "learning_rate": 0.0006401260361462156, "loss": 3.4969, "step": 42390 }, { "epoch": 2.8804864791411875, "grad_norm": 1.5318810939788818, "learning_rate": 0.0006400835711373828, "loss": 3.5688, "step": 42395 }, { "epoch": 2.8808261992118496, "grad_norm": 1.5390149354934692, "learning_rate": 0.00064004110612855, "loss": 3.7346, "step": 42400 }, { "epoch": 2.881165919282511, "grad_norm": 1.3070091009140015, "learning_rate": 0.0006399986411197174, "loss": 3.3894, "step": 42405 }, { "epoch": 2.881505639353173, "grad_norm": 1.245890736579895, "learning_rate": 0.0006399561761108847, "loss": 3.5822, "step": 42410 }, { "epoch": 2.881845359423835, "grad_norm": 1.4617892503738403, "learning_rate": 0.0006399137111020519, "loss": 3.5793, "step": 42415 }, { "epoch": 2.8821850794944965, "grad_norm": 1.572035789489746, "learning_rate": 0.0006398712460932192, "loss": 3.8211, "step": 42420 }, { "epoch": 2.882524799565158, "grad_norm": 1.4186859130859375, "learning_rate": 0.0006398287810843865, "loss": 3.2743, "step": 42425 }, { "epoch": 2.8828645196358202, "grad_norm": 1.6587047576904297, "learning_rate": 0.0006397863160755537, "loss": 3.6516, "step": 42430 }, { "epoch": 2.883204239706482, "grad_norm": 1.1434472799301147, "learning_rate": 0.000639743851066721, "loss": 3.5779, "step": 42435 }, { "epoch": 2.8835439597771435, "grad_norm": 1.374503254890442, "learning_rate": 0.0006397013860578884, "loss": 3.4379, "step": 42440 }, { "epoch": 2.8838836798478056, "grad_norm": 1.2302442789077759, "learning_rate": 0.0006396589210490556, "loss": 3.891, "step": 42445 }, { "epoch": 2.884223399918467, "grad_norm": 1.570159912109375, "learning_rate": 0.0006396164560402229, "loss": 3.3482, "step": 42450 }, { "epoch": 2.884563119989129, "grad_norm": 1.2916616201400757, "learning_rate": 0.0006395739910313901, "loss": 3.558, "step": 42455 }, { "epoch": 2.884902840059791, "grad_norm": 1.1444939374923706, "learning_rate": 0.0006395315260225574, "loss": 3.7005, "step": 42460 }, { "epoch": 2.8852425601304525, "grad_norm": 1.0562695264816284, "learning_rate": 0.0006394890610137247, "loss": 3.5736, "step": 42465 }, { "epoch": 2.885582280201114, "grad_norm": 1.3545535802841187, "learning_rate": 0.0006394465960048919, "loss": 3.4839, "step": 42470 }, { "epoch": 2.8859220002717763, "grad_norm": 1.6357098817825317, "learning_rate": 0.0006394041309960593, "loss": 3.7045, "step": 42475 }, { "epoch": 2.886261720342438, "grad_norm": 1.212999701499939, "learning_rate": 0.0006393616659872266, "loss": 3.7217, "step": 42480 }, { "epoch": 2.8866014404130995, "grad_norm": 1.2148823738098145, "learning_rate": 0.0006393192009783938, "loss": 3.5507, "step": 42485 }, { "epoch": 2.8869411604837616, "grad_norm": 1.2529736757278442, "learning_rate": 0.000639276735969561, "loss": 3.5219, "step": 42490 }, { "epoch": 2.8872808805544232, "grad_norm": 1.2440544366836548, "learning_rate": 0.0006392342709607284, "loss": 3.6603, "step": 42495 }, { "epoch": 2.887620600625085, "grad_norm": 1.467263102531433, "learning_rate": 0.0006391918059518956, "loss": 3.4477, "step": 42500 }, { "epoch": 2.887960320695747, "grad_norm": 1.1656944751739502, "learning_rate": 0.0006391493409430629, "loss": 3.5354, "step": 42505 }, { "epoch": 2.8883000407664086, "grad_norm": 1.8366087675094604, "learning_rate": 0.0006391068759342303, "loss": 3.4677, "step": 42510 }, { "epoch": 2.88863976083707, "grad_norm": 1.4273821115493774, "learning_rate": 0.0006390644109253975, "loss": 3.2275, "step": 42515 }, { "epoch": 2.8889794809077323, "grad_norm": 1.4660345315933228, "learning_rate": 0.0006390219459165648, "loss": 3.6717, "step": 42520 }, { "epoch": 2.889319200978394, "grad_norm": 1.7326816320419312, "learning_rate": 0.0006389794809077321, "loss": 3.4576, "step": 42525 }, { "epoch": 2.8896589210490555, "grad_norm": 1.3651496171951294, "learning_rate": 0.0006389370158988993, "loss": 3.5902, "step": 42530 }, { "epoch": 2.8899986411197176, "grad_norm": 1.2552549839019775, "learning_rate": 0.0006388945508900666, "loss": 3.2276, "step": 42535 }, { "epoch": 2.8903383611903792, "grad_norm": 1.5088146924972534, "learning_rate": 0.0006388520858812338, "loss": 3.6806, "step": 42540 }, { "epoch": 2.890678081261041, "grad_norm": 1.3112224340438843, "learning_rate": 0.0006388096208724012, "loss": 3.3878, "step": 42545 }, { "epoch": 2.891017801331703, "grad_norm": 1.0755221843719482, "learning_rate": 0.0006387671558635685, "loss": 3.3948, "step": 42550 }, { "epoch": 2.8913575214023646, "grad_norm": 1.3440998792648315, "learning_rate": 0.0006387246908547357, "loss": 3.4268, "step": 42555 }, { "epoch": 2.891697241473026, "grad_norm": 1.257868766784668, "learning_rate": 0.000638682225845903, "loss": 3.6705, "step": 42560 }, { "epoch": 2.8920369615436883, "grad_norm": 1.4976718425750732, "learning_rate": 0.0006386397608370703, "loss": 3.6649, "step": 42565 }, { "epoch": 2.89237668161435, "grad_norm": 1.1405612230300903, "learning_rate": 0.0006385972958282375, "loss": 3.571, "step": 42570 }, { "epoch": 2.8927164016850115, "grad_norm": 1.1420698165893555, "learning_rate": 0.0006385548308194048, "loss": 3.5235, "step": 42575 }, { "epoch": 2.893056121755673, "grad_norm": 1.2150942087173462, "learning_rate": 0.0006385123658105722, "loss": 3.4145, "step": 42580 }, { "epoch": 2.8933958418263352, "grad_norm": 1.1393877267837524, "learning_rate": 0.0006384699008017394, "loss": 3.547, "step": 42585 }, { "epoch": 2.893735561896997, "grad_norm": 1.4931371212005615, "learning_rate": 0.0006384274357929066, "loss": 3.4714, "step": 42590 }, { "epoch": 2.8940752819676585, "grad_norm": 1.4190737009048462, "learning_rate": 0.000638384970784074, "loss": 3.6591, "step": 42595 }, { "epoch": 2.8944150020383206, "grad_norm": 1.6424216032028198, "learning_rate": 0.0006383425057752412, "loss": 3.5956, "step": 42600 }, { "epoch": 2.894754722108982, "grad_norm": 1.2010462284088135, "learning_rate": 0.0006383000407664084, "loss": 3.2472, "step": 42605 }, { "epoch": 2.895094442179644, "grad_norm": 1.409401535987854, "learning_rate": 0.0006382575757575758, "loss": 3.4705, "step": 42610 }, { "epoch": 2.8954341622503055, "grad_norm": 1.2451269626617432, "learning_rate": 0.0006382151107487431, "loss": 3.3484, "step": 42615 }, { "epoch": 2.8957738823209676, "grad_norm": 1.4466736316680908, "learning_rate": 0.0006381726457399103, "loss": 3.5574, "step": 42620 }, { "epoch": 2.896113602391629, "grad_norm": 3.6115469932556152, "learning_rate": 0.0006381301807310777, "loss": 3.4223, "step": 42625 }, { "epoch": 2.896453322462291, "grad_norm": 1.7482253313064575, "learning_rate": 0.0006380877157222449, "loss": 3.5157, "step": 42630 }, { "epoch": 2.896793042532953, "grad_norm": 1.3416839838027954, "learning_rate": 0.0006380452507134121, "loss": 3.6026, "step": 42635 }, { "epoch": 2.8971327626036145, "grad_norm": 1.4102038145065308, "learning_rate": 0.0006380027857045794, "loss": 3.4795, "step": 42640 }, { "epoch": 2.897472482674276, "grad_norm": 1.3841062784194946, "learning_rate": 0.0006379603206957467, "loss": 3.8655, "step": 42645 }, { "epoch": 2.8978122027449382, "grad_norm": 0.84596186876297, "learning_rate": 0.000637917855686914, "loss": 3.4486, "step": 42650 }, { "epoch": 2.8981519228156, "grad_norm": 1.2210054397583008, "learning_rate": 0.0006378753906780813, "loss": 3.4098, "step": 42655 }, { "epoch": 2.8984916428862615, "grad_norm": 1.4189064502716064, "learning_rate": 0.0006378329256692486, "loss": 3.5337, "step": 42660 }, { "epoch": 2.8988313629569236, "grad_norm": 1.1732168197631836, "learning_rate": 0.0006377904606604158, "loss": 3.6398, "step": 42665 }, { "epoch": 2.899171083027585, "grad_norm": 1.3393714427947998, "learning_rate": 0.0006377479956515831, "loss": 3.3925, "step": 42670 }, { "epoch": 2.899510803098247, "grad_norm": 1.405655026435852, "learning_rate": 0.0006377055306427504, "loss": 3.5878, "step": 42675 }, { "epoch": 2.899850523168909, "grad_norm": 1.3699077367782593, "learning_rate": 0.0006376630656339176, "loss": 3.447, "step": 42680 }, { "epoch": 2.9001902432395705, "grad_norm": 1.5830485820770264, "learning_rate": 0.000637620600625085, "loss": 3.5716, "step": 42685 }, { "epoch": 2.900529963310232, "grad_norm": 1.543452262878418, "learning_rate": 0.0006375781356162523, "loss": 3.4758, "step": 42690 }, { "epoch": 2.9008696833808942, "grad_norm": 1.175765037536621, "learning_rate": 0.0006375356706074195, "loss": 3.5612, "step": 42695 }, { "epoch": 2.901209403451556, "grad_norm": 1.074460506439209, "learning_rate": 0.0006374932055985868, "loss": 3.527, "step": 42700 }, { "epoch": 2.9015491235222175, "grad_norm": 1.437981367111206, "learning_rate": 0.000637450740589754, "loss": 3.7455, "step": 42705 }, { "epoch": 2.9018888435928796, "grad_norm": 1.3990809917449951, "learning_rate": 0.0006374082755809213, "loss": 3.6214, "step": 42710 }, { "epoch": 2.902228563663541, "grad_norm": 1.2969629764556885, "learning_rate": 0.0006373658105720886, "loss": 3.4676, "step": 42715 }, { "epoch": 2.902568283734203, "grad_norm": 1.1276081800460815, "learning_rate": 0.0006373233455632559, "loss": 3.4493, "step": 42720 }, { "epoch": 2.902908003804865, "grad_norm": 1.3689334392547607, "learning_rate": 0.0006372808805544232, "loss": 3.6373, "step": 42725 }, { "epoch": 2.9032477238755265, "grad_norm": 1.877745270729065, "learning_rate": 0.0006372384155455905, "loss": 3.5856, "step": 42730 }, { "epoch": 2.903587443946188, "grad_norm": 0.9934160709381104, "learning_rate": 0.0006371959505367577, "loss": 3.5927, "step": 42735 }, { "epoch": 2.9039271640168502, "grad_norm": 1.5436670780181885, "learning_rate": 0.0006371534855279249, "loss": 3.5416, "step": 42740 }, { "epoch": 2.904266884087512, "grad_norm": 1.3431291580200195, "learning_rate": 0.0006371110205190923, "loss": 3.3846, "step": 42745 }, { "epoch": 2.9046066041581735, "grad_norm": 1.5728691816329956, "learning_rate": 0.0006370685555102595, "loss": 3.4476, "step": 42750 }, { "epoch": 2.9049463242288356, "grad_norm": 1.2557244300842285, "learning_rate": 0.0006370260905014268, "loss": 3.4974, "step": 42755 }, { "epoch": 2.905286044299497, "grad_norm": 1.3156253099441528, "learning_rate": 0.0006369836254925942, "loss": 3.5538, "step": 42760 }, { "epoch": 2.905625764370159, "grad_norm": 1.2981493473052979, "learning_rate": 0.0006369411604837614, "loss": 3.4463, "step": 42765 }, { "epoch": 2.905965484440821, "grad_norm": 1.3717045783996582, "learning_rate": 0.0006368986954749286, "loss": 3.5422, "step": 42770 }, { "epoch": 2.9063052045114826, "grad_norm": 1.0539668798446655, "learning_rate": 0.000636856230466096, "loss": 3.7529, "step": 42775 }, { "epoch": 2.906644924582144, "grad_norm": 1.3704496622085571, "learning_rate": 0.0006368137654572632, "loss": 3.5693, "step": 42780 }, { "epoch": 2.9069846446528063, "grad_norm": 1.5620452165603638, "learning_rate": 0.0006367713004484304, "loss": 3.2649, "step": 42785 }, { "epoch": 2.907324364723468, "grad_norm": 1.2983030080795288, "learning_rate": 0.0006367288354395979, "loss": 3.5568, "step": 42790 }, { "epoch": 2.9076640847941295, "grad_norm": 1.2597336769104004, "learning_rate": 0.0006366863704307651, "loss": 3.3508, "step": 42795 }, { "epoch": 2.9080038048647916, "grad_norm": 2.4568779468536377, "learning_rate": 0.0006366439054219323, "loss": 3.6333, "step": 42800 }, { "epoch": 2.9083435249354532, "grad_norm": 1.9975894689559937, "learning_rate": 0.0006366014404130996, "loss": 3.4307, "step": 42805 }, { "epoch": 2.908683245006115, "grad_norm": 1.0480239391326904, "learning_rate": 0.0006365589754042669, "loss": 3.7466, "step": 42810 }, { "epoch": 2.909022965076777, "grad_norm": 1.4603917598724365, "learning_rate": 0.0006365165103954341, "loss": 3.6106, "step": 42815 }, { "epoch": 2.9093626851474386, "grad_norm": 0.9930907487869263, "learning_rate": 0.0006364740453866014, "loss": 3.4305, "step": 42820 }, { "epoch": 2.9097024052181, "grad_norm": 1.1173278093338013, "learning_rate": 0.0006364315803777688, "loss": 3.5091, "step": 42825 }, { "epoch": 2.9100421252887623, "grad_norm": 1.0534183979034424, "learning_rate": 0.000636389115368936, "loss": 3.3692, "step": 42830 }, { "epoch": 2.910381845359424, "grad_norm": 1.334654688835144, "learning_rate": 0.0006363466503601033, "loss": 3.7217, "step": 42835 }, { "epoch": 2.9107215654300855, "grad_norm": 1.1752150058746338, "learning_rate": 0.0006363041853512705, "loss": 3.6366, "step": 42840 }, { "epoch": 2.9110612855007476, "grad_norm": 1.1828851699829102, "learning_rate": 0.0006362617203424379, "loss": 3.5083, "step": 42845 }, { "epoch": 2.9114010055714092, "grad_norm": 1.34893000125885, "learning_rate": 0.0006362192553336051, "loss": 3.4671, "step": 42850 }, { "epoch": 2.911740725642071, "grad_norm": 1.6009347438812256, "learning_rate": 0.0006361767903247723, "loss": 3.4772, "step": 42855 }, { "epoch": 2.912080445712733, "grad_norm": 3.3987200260162354, "learning_rate": 0.0006361343253159398, "loss": 3.5774, "step": 42860 }, { "epoch": 2.9124201657833946, "grad_norm": 1.5075416564941406, "learning_rate": 0.000636091860307107, "loss": 3.5945, "step": 42865 }, { "epoch": 2.912759885854056, "grad_norm": 1.2610745429992676, "learning_rate": 0.0006360493952982742, "loss": 3.655, "step": 42870 }, { "epoch": 2.9130996059247183, "grad_norm": 1.2957229614257812, "learning_rate": 0.0006360069302894416, "loss": 3.5459, "step": 42875 }, { "epoch": 2.91343932599538, "grad_norm": 1.0434277057647705, "learning_rate": 0.0006359644652806088, "loss": 3.4988, "step": 42880 }, { "epoch": 2.9137790460660415, "grad_norm": 1.1003338098526, "learning_rate": 0.000635922000271776, "loss": 3.8286, "step": 42885 }, { "epoch": 2.9141187661367036, "grad_norm": 1.320120096206665, "learning_rate": 0.0006358795352629433, "loss": 3.4578, "step": 42890 }, { "epoch": 2.9144584862073653, "grad_norm": 1.3045713901519775, "learning_rate": 0.0006358370702541107, "loss": 3.5939, "step": 42895 }, { "epoch": 2.914798206278027, "grad_norm": 1.2940237522125244, "learning_rate": 0.0006357946052452779, "loss": 3.304, "step": 42900 }, { "epoch": 2.915137926348689, "grad_norm": 1.0343537330627441, "learning_rate": 0.0006357521402364452, "loss": 3.6269, "step": 42905 }, { "epoch": 2.9154776464193506, "grad_norm": 1.303252100944519, "learning_rate": 0.0006357096752276125, "loss": 3.4258, "step": 42910 }, { "epoch": 2.915817366490012, "grad_norm": 1.0357720851898193, "learning_rate": 0.0006356672102187797, "loss": 3.5644, "step": 42915 }, { "epoch": 2.916157086560674, "grad_norm": 1.859075665473938, "learning_rate": 0.000635624745209947, "loss": 3.551, "step": 42920 }, { "epoch": 2.916496806631336, "grad_norm": 1.2930209636688232, "learning_rate": 0.0006355822802011143, "loss": 3.7057, "step": 42925 }, { "epoch": 2.9168365267019976, "grad_norm": 1.476000189781189, "learning_rate": 0.0006355398151922816, "loss": 3.4282, "step": 42930 }, { "epoch": 2.917176246772659, "grad_norm": 1.412238359451294, "learning_rate": 0.0006354973501834489, "loss": 3.6304, "step": 42935 }, { "epoch": 2.9175159668433213, "grad_norm": 1.060352087020874, "learning_rate": 0.0006354548851746161, "loss": 3.7426, "step": 42940 }, { "epoch": 2.917855686913983, "grad_norm": 1.1098310947418213, "learning_rate": 0.0006354124201657834, "loss": 3.7706, "step": 42945 }, { "epoch": 2.9181954069846445, "grad_norm": 1.1490663290023804, "learning_rate": 0.0006353699551569507, "loss": 3.4365, "step": 42950 }, { "epoch": 2.918535127055306, "grad_norm": 1.7138173580169678, "learning_rate": 0.0006353274901481179, "loss": 3.384, "step": 42955 }, { "epoch": 2.9188748471259682, "grad_norm": 1.3115936517715454, "learning_rate": 0.0006352850251392852, "loss": 3.4713, "step": 42960 }, { "epoch": 2.91921456719663, "grad_norm": 1.0490459203720093, "learning_rate": 0.0006352425601304526, "loss": 3.6262, "step": 42965 }, { "epoch": 2.9195542872672915, "grad_norm": 1.4181268215179443, "learning_rate": 0.0006352000951216198, "loss": 3.6721, "step": 42970 }, { "epoch": 2.9198940073379536, "grad_norm": 0.9841074347496033, "learning_rate": 0.000635157630112787, "loss": 3.4078, "step": 42975 }, { "epoch": 2.920233727408615, "grad_norm": 1.1546909809112549, "learning_rate": 0.0006351151651039544, "loss": 3.7536, "step": 42980 }, { "epoch": 2.920573447479277, "grad_norm": 1.6858421564102173, "learning_rate": 0.0006350727000951216, "loss": 3.3658, "step": 42985 }, { "epoch": 2.920913167549939, "grad_norm": 1.7596908807754517, "learning_rate": 0.0006350302350862888, "loss": 3.516, "step": 42990 }, { "epoch": 2.9212528876206005, "grad_norm": 1.0506376028060913, "learning_rate": 0.0006349877700774563, "loss": 3.502, "step": 42995 }, { "epoch": 2.921592607691262, "grad_norm": 3.5714709758758545, "learning_rate": 0.0006349453050686235, "loss": 3.5305, "step": 43000 }, { "epoch": 2.9219323277619242, "grad_norm": 1.1660065650939941, "learning_rate": 0.0006349028400597907, "loss": 3.7422, "step": 43005 }, { "epoch": 2.922272047832586, "grad_norm": 1.1042402982711792, "learning_rate": 0.0006348603750509581, "loss": 3.5062, "step": 43010 }, { "epoch": 2.9226117679032475, "grad_norm": 1.4178889989852905, "learning_rate": 0.0006348179100421253, "loss": 3.2011, "step": 43015 }, { "epoch": 2.9229514879739096, "grad_norm": 1.495905876159668, "learning_rate": 0.0006347754450332925, "loss": 3.673, "step": 43020 }, { "epoch": 2.923291208044571, "grad_norm": 1.2650092840194702, "learning_rate": 0.0006347329800244599, "loss": 3.7014, "step": 43025 }, { "epoch": 2.923630928115233, "grad_norm": 1.454555630683899, "learning_rate": 0.0006346905150156272, "loss": 3.55, "step": 43030 }, { "epoch": 2.923970648185895, "grad_norm": 0.9633775353431702, "learning_rate": 0.0006346480500067944, "loss": 3.5616, "step": 43035 }, { "epoch": 2.9243103682565565, "grad_norm": 1.2115331888198853, "learning_rate": 0.0006346055849979617, "loss": 3.5774, "step": 43040 }, { "epoch": 2.924650088327218, "grad_norm": 1.1337119340896606, "learning_rate": 0.000634563119989129, "loss": 3.6256, "step": 43045 }, { "epoch": 2.9249898083978803, "grad_norm": 1.148568034172058, "learning_rate": 0.0006345206549802962, "loss": 3.7608, "step": 43050 }, { "epoch": 2.925329528468542, "grad_norm": 1.3281934261322021, "learning_rate": 0.0006344781899714635, "loss": 3.4234, "step": 43055 }, { "epoch": 2.9256692485392035, "grad_norm": 1.361668348312378, "learning_rate": 0.0006344357249626308, "loss": 3.4729, "step": 43060 }, { "epoch": 2.9260089686098656, "grad_norm": 1.1720653772354126, "learning_rate": 0.0006343932599537981, "loss": 3.6521, "step": 43065 }, { "epoch": 2.926348688680527, "grad_norm": 1.1872349977493286, "learning_rate": 0.0006343507949449654, "loss": 3.293, "step": 43070 }, { "epoch": 2.926688408751189, "grad_norm": 1.5723365545272827, "learning_rate": 0.0006343083299361327, "loss": 3.5544, "step": 43075 }, { "epoch": 2.927028128821851, "grad_norm": 3.796354055404663, "learning_rate": 0.0006342658649272999, "loss": 3.6563, "step": 43080 }, { "epoch": 2.9273678488925126, "grad_norm": 1.8223849534988403, "learning_rate": 0.0006342233999184672, "loss": 3.5964, "step": 43085 }, { "epoch": 2.927707568963174, "grad_norm": 1.1284785270690918, "learning_rate": 0.0006341809349096344, "loss": 3.305, "step": 43090 }, { "epoch": 2.9280472890338363, "grad_norm": 1.1434601545333862, "learning_rate": 0.0006341384699008017, "loss": 3.3275, "step": 43095 }, { "epoch": 2.928387009104498, "grad_norm": 1.3339396715164185, "learning_rate": 0.0006340960048919691, "loss": 3.4218, "step": 43100 }, { "epoch": 2.9287267291751595, "grad_norm": 1.56771981716156, "learning_rate": 0.0006340535398831363, "loss": 3.4485, "step": 43105 }, { "epoch": 2.9290664492458216, "grad_norm": 1.2084870338439941, "learning_rate": 0.0006340110748743036, "loss": 3.4389, "step": 43110 }, { "epoch": 2.9294061693164832, "grad_norm": 1.1758391857147217, "learning_rate": 0.0006339686098654709, "loss": 3.6413, "step": 43115 }, { "epoch": 2.929745889387145, "grad_norm": 1.4316216707229614, "learning_rate": 0.0006339261448566381, "loss": 3.2506, "step": 43120 }, { "epoch": 2.930085609457807, "grad_norm": 1.0605891942977905, "learning_rate": 0.0006338836798478053, "loss": 3.6662, "step": 43125 }, { "epoch": 2.9304253295284686, "grad_norm": 1.3502293825149536, "learning_rate": 0.0006338412148389727, "loss": 3.5711, "step": 43130 }, { "epoch": 2.93076504959913, "grad_norm": 1.2859015464782715, "learning_rate": 0.00063379874983014, "loss": 3.1511, "step": 43135 }, { "epoch": 2.9311047696697923, "grad_norm": 1.465813398361206, "learning_rate": 0.0006337562848213072, "loss": 3.4644, "step": 43140 }, { "epoch": 2.931444489740454, "grad_norm": 1.7667980194091797, "learning_rate": 0.0006337138198124746, "loss": 3.7587, "step": 43145 }, { "epoch": 2.9317842098111155, "grad_norm": 1.2257100343704224, "learning_rate": 0.0006336713548036418, "loss": 3.8929, "step": 43150 }, { "epoch": 2.9321239298817776, "grad_norm": 1.1707199811935425, "learning_rate": 0.000633628889794809, "loss": 3.5855, "step": 43155 }, { "epoch": 2.9324636499524392, "grad_norm": 1.1696478128433228, "learning_rate": 0.0006335864247859764, "loss": 3.5363, "step": 43160 }, { "epoch": 2.932803370023101, "grad_norm": 1.0243908166885376, "learning_rate": 0.0006335439597771436, "loss": 3.6023, "step": 43165 }, { "epoch": 2.933143090093763, "grad_norm": 1.822114109992981, "learning_rate": 0.0006335014947683109, "loss": 3.2335, "step": 43170 }, { "epoch": 2.9334828101644246, "grad_norm": 1.5140540599822998, "learning_rate": 0.0006334590297594783, "loss": 3.4545, "step": 43175 }, { "epoch": 2.933822530235086, "grad_norm": 1.526536226272583, "learning_rate": 0.0006334165647506455, "loss": 3.6643, "step": 43180 }, { "epoch": 2.9341622503057483, "grad_norm": 0.9479661583900452, "learning_rate": 0.0006333740997418128, "loss": 3.4274, "step": 43185 }, { "epoch": 2.93450197037641, "grad_norm": 1.2516785860061646, "learning_rate": 0.00063333163473298, "loss": 3.5739, "step": 43190 }, { "epoch": 2.9348416904470715, "grad_norm": 1.8157711029052734, "learning_rate": 0.0006332891697241473, "loss": 3.5455, "step": 43195 }, { "epoch": 2.9351814105177336, "grad_norm": 1.5581843852996826, "learning_rate": 0.0006332467047153146, "loss": 3.5793, "step": 43200 }, { "epoch": 2.9355211305883953, "grad_norm": 2.849411725997925, "learning_rate": 0.0006332042397064819, "loss": 3.6522, "step": 43205 }, { "epoch": 2.935860850659057, "grad_norm": 1.062954306602478, "learning_rate": 0.0006331617746976492, "loss": 3.6245, "step": 43210 }, { "epoch": 2.936200570729719, "grad_norm": 1.1842594146728516, "learning_rate": 0.0006331193096888165, "loss": 3.5387, "step": 43215 }, { "epoch": 2.9365402908003806, "grad_norm": 1.408599853515625, "learning_rate": 0.0006330768446799837, "loss": 3.4884, "step": 43220 }, { "epoch": 2.9368800108710422, "grad_norm": 1.3358200788497925, "learning_rate": 0.000633034379671151, "loss": 3.4891, "step": 43225 }, { "epoch": 2.9372197309417043, "grad_norm": 1.4055432081222534, "learning_rate": 0.0006329919146623183, "loss": 3.5401, "step": 43230 }, { "epoch": 2.937559451012366, "grad_norm": 1.3422985076904297, "learning_rate": 0.0006329494496534855, "loss": 3.3596, "step": 43235 }, { "epoch": 2.9378991710830276, "grad_norm": 1.4558990001678467, "learning_rate": 0.0006329069846446528, "loss": 3.5339, "step": 43240 }, { "epoch": 2.9382388911536896, "grad_norm": 1.3887803554534912, "learning_rate": 0.0006328645196358202, "loss": 3.3492, "step": 43245 }, { "epoch": 2.9385786112243513, "grad_norm": 1.214064121246338, "learning_rate": 0.0006328220546269874, "loss": 3.4825, "step": 43250 }, { "epoch": 2.938918331295013, "grad_norm": 1.4184765815734863, "learning_rate": 0.0006327795896181546, "loss": 3.6709, "step": 43255 }, { "epoch": 2.9392580513656745, "grad_norm": 1.431591510772705, "learning_rate": 0.000632737124609322, "loss": 3.4831, "step": 43260 }, { "epoch": 2.9395977714363366, "grad_norm": 1.5056920051574707, "learning_rate": 0.0006326946596004892, "loss": 3.3708, "step": 43265 }, { "epoch": 2.9399374915069982, "grad_norm": 1.4572066068649292, "learning_rate": 0.0006326521945916564, "loss": 3.5639, "step": 43270 }, { "epoch": 2.94027721157766, "grad_norm": 1.0909233093261719, "learning_rate": 0.0006326097295828239, "loss": 3.406, "step": 43275 }, { "epoch": 2.940616931648322, "grad_norm": 1.5936126708984375, "learning_rate": 0.0006325672645739911, "loss": 3.5234, "step": 43280 }, { "epoch": 2.9409566517189836, "grad_norm": 1.4077632427215576, "learning_rate": 0.0006325247995651583, "loss": 3.6819, "step": 43285 }, { "epoch": 2.941296371789645, "grad_norm": 1.0380257368087769, "learning_rate": 0.0006324823345563256, "loss": 3.557, "step": 43290 }, { "epoch": 2.941636091860307, "grad_norm": 1.3328310251235962, "learning_rate": 0.0006324398695474929, "loss": 3.6065, "step": 43295 }, { "epoch": 2.941975811930969, "grad_norm": 1.6472783088684082, "learning_rate": 0.0006323974045386601, "loss": 3.6647, "step": 43300 }, { "epoch": 2.9423155320016305, "grad_norm": 1.1918997764587402, "learning_rate": 0.0006323549395298274, "loss": 3.3118, "step": 43305 }, { "epoch": 2.942655252072292, "grad_norm": 1.2324390411376953, "learning_rate": 0.0006323124745209948, "loss": 3.5942, "step": 43310 }, { "epoch": 2.9429949721429542, "grad_norm": 9.842827796936035, "learning_rate": 0.000632270009512162, "loss": 3.303, "step": 43315 }, { "epoch": 2.943334692213616, "grad_norm": 1.8581806421279907, "learning_rate": 0.0006322275445033293, "loss": 3.7588, "step": 43320 }, { "epoch": 2.9436744122842775, "grad_norm": 1.9386318922042847, "learning_rate": 0.0006321850794944965, "loss": 3.3249, "step": 43325 }, { "epoch": 2.9440141323549396, "grad_norm": 1.2238622903823853, "learning_rate": 0.0006321426144856638, "loss": 3.3727, "step": 43330 }, { "epoch": 2.944353852425601, "grad_norm": 1.169459342956543, "learning_rate": 0.0006321001494768311, "loss": 3.5149, "step": 43335 }, { "epoch": 2.944693572496263, "grad_norm": 1.004610300064087, "learning_rate": 0.0006320576844679983, "loss": 3.8538, "step": 43340 }, { "epoch": 2.945033292566925, "grad_norm": 1.5609526634216309, "learning_rate": 0.0006320152194591657, "loss": 3.6066, "step": 43345 }, { "epoch": 2.9453730126375866, "grad_norm": 1.2862722873687744, "learning_rate": 0.000631972754450333, "loss": 3.5186, "step": 43350 }, { "epoch": 2.945712732708248, "grad_norm": 1.2860233783721924, "learning_rate": 0.0006319302894415002, "loss": 3.653, "step": 43355 }, { "epoch": 2.9460524527789103, "grad_norm": 1.1591213941574097, "learning_rate": 0.0006318878244326675, "loss": 3.4767, "step": 43360 }, { "epoch": 2.946392172849572, "grad_norm": 1.1799938678741455, "learning_rate": 0.0006318453594238348, "loss": 3.3958, "step": 43365 }, { "epoch": 2.9467318929202335, "grad_norm": 1.6881994009017944, "learning_rate": 0.000631802894415002, "loss": 3.6653, "step": 43370 }, { "epoch": 2.9470716129908956, "grad_norm": 1.1838480234146118, "learning_rate": 0.0006317604294061692, "loss": 3.8195, "step": 43375 }, { "epoch": 2.9474113330615572, "grad_norm": 1.7543773651123047, "learning_rate": 0.0006317179643973367, "loss": 3.3772, "step": 43380 }, { "epoch": 2.947751053132219, "grad_norm": 1.1079869270324707, "learning_rate": 0.0006316754993885039, "loss": 3.3983, "step": 43385 }, { "epoch": 2.948090773202881, "grad_norm": 1.2579823732376099, "learning_rate": 0.0006316330343796711, "loss": 3.6201, "step": 43390 }, { "epoch": 2.9484304932735426, "grad_norm": 1.1725804805755615, "learning_rate": 0.0006315905693708385, "loss": 3.5017, "step": 43395 }, { "epoch": 2.948770213344204, "grad_norm": 1.2831846475601196, "learning_rate": 0.0006315481043620057, "loss": 3.5302, "step": 43400 }, { "epoch": 2.9491099334148663, "grad_norm": 1.2953609228134155, "learning_rate": 0.0006315056393531729, "loss": 3.3238, "step": 43405 }, { "epoch": 2.949449653485528, "grad_norm": 1.6532299518585205, "learning_rate": 0.0006314631743443403, "loss": 3.2479, "step": 43410 }, { "epoch": 2.9497893735561895, "grad_norm": 1.3329956531524658, "learning_rate": 0.0006314207093355076, "loss": 3.5361, "step": 43415 }, { "epoch": 2.9501290936268516, "grad_norm": 1.324249267578125, "learning_rate": 0.0006313782443266748, "loss": 3.4109, "step": 43420 }, { "epoch": 2.9504688136975132, "grad_norm": 1.227259635925293, "learning_rate": 0.0006313357793178422, "loss": 3.7368, "step": 43425 }, { "epoch": 2.950808533768175, "grad_norm": 1.5803236961364746, "learning_rate": 0.0006312933143090094, "loss": 3.8303, "step": 43430 }, { "epoch": 2.951148253838837, "grad_norm": 1.7586536407470703, "learning_rate": 0.0006312508493001766, "loss": 3.7395, "step": 43435 }, { "epoch": 2.9514879739094986, "grad_norm": 1.3255313634872437, "learning_rate": 0.0006312083842913439, "loss": 3.6018, "step": 43440 }, { "epoch": 2.95182769398016, "grad_norm": 1.3014750480651855, "learning_rate": 0.0006311659192825112, "loss": 3.7103, "step": 43445 }, { "epoch": 2.9521674140508223, "grad_norm": 1.483386516571045, "learning_rate": 0.0006311234542736785, "loss": 3.4242, "step": 43450 }, { "epoch": 2.952507134121484, "grad_norm": 1.487197756767273, "learning_rate": 0.0006310809892648458, "loss": 3.3935, "step": 43455 }, { "epoch": 2.9528468541921455, "grad_norm": 1.3551249504089355, "learning_rate": 0.0006310385242560131, "loss": 3.5329, "step": 43460 }, { "epoch": 2.9531865742628076, "grad_norm": 1.1746541261672974, "learning_rate": 0.0006309960592471803, "loss": 3.4752, "step": 43465 }, { "epoch": 2.9535262943334692, "grad_norm": 1.6631654500961304, "learning_rate": 0.0006309535942383476, "loss": 3.7497, "step": 43470 }, { "epoch": 2.953866014404131, "grad_norm": 1.2804960012435913, "learning_rate": 0.0006309111292295148, "loss": 3.3929, "step": 43475 }, { "epoch": 2.954205734474793, "grad_norm": 1.4597326517105103, "learning_rate": 0.0006308686642206821, "loss": 3.6397, "step": 43480 }, { "epoch": 2.9545454545454546, "grad_norm": 0.9962115287780762, "learning_rate": 0.0006308261992118495, "loss": 3.6016, "step": 43485 }, { "epoch": 2.954885174616116, "grad_norm": 1.1596949100494385, "learning_rate": 0.0006307837342030167, "loss": 3.7424, "step": 43490 }, { "epoch": 2.9552248946867783, "grad_norm": 1.105789065361023, "learning_rate": 0.000630741269194184, "loss": 3.3326, "step": 43495 }, { "epoch": 2.95556461475744, "grad_norm": 1.4601114988327026, "learning_rate": 0.0006306988041853513, "loss": 3.4151, "step": 43500 }, { "epoch": 2.9559043348281016, "grad_norm": 1.3463457822799683, "learning_rate": 0.0006306563391765185, "loss": 3.1908, "step": 43505 }, { "epoch": 2.9562440548987636, "grad_norm": 1.357289433479309, "learning_rate": 0.0006306138741676857, "loss": 3.6042, "step": 43510 }, { "epoch": 2.9565837749694253, "grad_norm": 1.414056658744812, "learning_rate": 0.0006305714091588531, "loss": 3.2212, "step": 43515 }, { "epoch": 2.956923495040087, "grad_norm": 1.0055276155471802, "learning_rate": 0.0006305289441500204, "loss": 3.388, "step": 43520 }, { "epoch": 2.957263215110749, "grad_norm": 1.2972866296768188, "learning_rate": 0.0006304864791411878, "loss": 3.5068, "step": 43525 }, { "epoch": 2.9576029351814106, "grad_norm": 3.1484110355377197, "learning_rate": 0.000630444014132355, "loss": 3.6943, "step": 43530 }, { "epoch": 2.9579426552520722, "grad_norm": 1.4392677545547485, "learning_rate": 0.0006304015491235222, "loss": 3.3771, "step": 43535 }, { "epoch": 2.9582823753227343, "grad_norm": 1.359270453453064, "learning_rate": 0.0006303590841146895, "loss": 3.3403, "step": 43540 }, { "epoch": 2.958622095393396, "grad_norm": 1.313547968864441, "learning_rate": 0.0006303166191058568, "loss": 3.5861, "step": 43545 }, { "epoch": 2.9589618154640576, "grad_norm": 1.1788197755813599, "learning_rate": 0.000630274154097024, "loss": 3.7185, "step": 43550 }, { "epoch": 2.9593015355347196, "grad_norm": 1.4058529138565063, "learning_rate": 0.0006302316890881914, "loss": 3.1589, "step": 43555 }, { "epoch": 2.9596412556053813, "grad_norm": 1.3150372505187988, "learning_rate": 0.0006301892240793587, "loss": 3.4976, "step": 43560 }, { "epoch": 2.959980975676043, "grad_norm": 1.1106046438217163, "learning_rate": 0.0006301467590705259, "loss": 3.2765, "step": 43565 }, { "epoch": 2.960320695746705, "grad_norm": 1.840296745300293, "learning_rate": 0.0006301042940616932, "loss": 3.399, "step": 43570 }, { "epoch": 2.9606604158173666, "grad_norm": 1.4883382320404053, "learning_rate": 0.0006300618290528604, "loss": 3.6775, "step": 43575 }, { "epoch": 2.9610001358880282, "grad_norm": 1.302650809288025, "learning_rate": 0.0006300193640440277, "loss": 3.6014, "step": 43580 }, { "epoch": 2.9613398559586903, "grad_norm": 1.3608354330062866, "learning_rate": 0.0006299768990351951, "loss": 3.5314, "step": 43585 }, { "epoch": 2.961679576029352, "grad_norm": 1.0111430883407593, "learning_rate": 0.0006299344340263623, "loss": 3.7729, "step": 43590 }, { "epoch": 2.9620192961000136, "grad_norm": 1.2464485168457031, "learning_rate": 0.0006298919690175296, "loss": 3.5021, "step": 43595 }, { "epoch": 2.962359016170675, "grad_norm": 1.1947020292282104, "learning_rate": 0.0006298495040086969, "loss": 3.6917, "step": 43600 }, { "epoch": 2.9626987362413373, "grad_norm": 1.0785329341888428, "learning_rate": 0.0006298070389998641, "loss": 3.6634, "step": 43605 }, { "epoch": 2.963038456311999, "grad_norm": 1.332304835319519, "learning_rate": 0.0006297645739910314, "loss": 3.5923, "step": 43610 }, { "epoch": 2.9633781763826605, "grad_norm": 1.2138060331344604, "learning_rate": 0.0006297221089821987, "loss": 3.2654, "step": 43615 }, { "epoch": 2.9637178964533226, "grad_norm": 1.259627342224121, "learning_rate": 0.000629679643973366, "loss": 3.6847, "step": 43620 }, { "epoch": 2.9640576165239843, "grad_norm": 1.2644089460372925, "learning_rate": 0.0006296371789645332, "loss": 3.6285, "step": 43625 }, { "epoch": 2.964397336594646, "grad_norm": 10.903395652770996, "learning_rate": 0.0006295947139557006, "loss": 3.3401, "step": 43630 }, { "epoch": 2.9647370566653075, "grad_norm": 1.3934565782546997, "learning_rate": 0.0006295522489468678, "loss": 3.5075, "step": 43635 }, { "epoch": 2.9650767767359696, "grad_norm": 1.7802801132202148, "learning_rate": 0.000629509783938035, "loss": 3.5955, "step": 43640 }, { "epoch": 2.965416496806631, "grad_norm": 1.0301975011825562, "learning_rate": 0.0006294673189292024, "loss": 3.6394, "step": 43645 }, { "epoch": 2.965756216877293, "grad_norm": 1.5499943494796753, "learning_rate": 0.0006294248539203696, "loss": 3.5576, "step": 43650 }, { "epoch": 2.966095936947955, "grad_norm": 1.1359871625900269, "learning_rate": 0.0006293823889115369, "loss": 3.6342, "step": 43655 }, { "epoch": 2.9664356570186166, "grad_norm": 1.2802854776382446, "learning_rate": 0.0006293399239027043, "loss": 3.609, "step": 43660 }, { "epoch": 2.966775377089278, "grad_norm": 1.2139159440994263, "learning_rate": 0.0006292974588938715, "loss": 3.5278, "step": 43665 }, { "epoch": 2.9671150971599403, "grad_norm": 1.1546683311462402, "learning_rate": 0.0006292549938850387, "loss": 3.747, "step": 43670 }, { "epoch": 2.967454817230602, "grad_norm": 1.0564160346984863, "learning_rate": 0.000629212528876206, "loss": 3.5018, "step": 43675 }, { "epoch": 2.9677945373012635, "grad_norm": 1.4797147512435913, "learning_rate": 0.0006291700638673733, "loss": 3.5587, "step": 43680 }, { "epoch": 2.9681342573719256, "grad_norm": 3.104491710662842, "learning_rate": 0.0006291275988585405, "loss": 3.5942, "step": 43685 }, { "epoch": 2.9684739774425872, "grad_norm": 1.2052149772644043, "learning_rate": 0.0006290851338497079, "loss": 3.3701, "step": 43690 }, { "epoch": 2.968813697513249, "grad_norm": 1.6869055032730103, "learning_rate": 0.0006290426688408752, "loss": 3.7212, "step": 43695 }, { "epoch": 2.969153417583911, "grad_norm": 1.4181268215179443, "learning_rate": 0.0006290002038320424, "loss": 3.5182, "step": 43700 }, { "epoch": 2.9694931376545726, "grad_norm": 1.457741379737854, "learning_rate": 0.0006289577388232097, "loss": 3.3856, "step": 43705 }, { "epoch": 2.969832857725234, "grad_norm": 1.169553518295288, "learning_rate": 0.000628915273814377, "loss": 3.4756, "step": 43710 }, { "epoch": 2.9701725777958963, "grad_norm": 1.3488088846206665, "learning_rate": 0.0006288728088055442, "loss": 3.3779, "step": 43715 }, { "epoch": 2.970512297866558, "grad_norm": 1.122656226158142, "learning_rate": 0.0006288303437967115, "loss": 3.3758, "step": 43720 }, { "epoch": 2.9708520179372195, "grad_norm": 1.673352599143982, "learning_rate": 0.0006287878787878788, "loss": 3.4607, "step": 43725 }, { "epoch": 2.9711917380078816, "grad_norm": 1.7858870029449463, "learning_rate": 0.0006287454137790461, "loss": 3.6219, "step": 43730 }, { "epoch": 2.9715314580785432, "grad_norm": 1.8816994428634644, "learning_rate": 0.0006287029487702134, "loss": 3.4883, "step": 43735 }, { "epoch": 2.971871178149205, "grad_norm": 1.3712729215621948, "learning_rate": 0.0006286604837613806, "loss": 3.4375, "step": 43740 }, { "epoch": 2.972210898219867, "grad_norm": 1.2095478773117065, "learning_rate": 0.0006286180187525479, "loss": 3.7003, "step": 43745 }, { "epoch": 2.9725506182905286, "grad_norm": 1.6439698934555054, "learning_rate": 0.0006285755537437152, "loss": 3.6142, "step": 43750 }, { "epoch": 2.97289033836119, "grad_norm": 1.4280691146850586, "learning_rate": 0.0006285330887348824, "loss": 3.4673, "step": 43755 }, { "epoch": 2.9732300584318523, "grad_norm": 1.4040493965148926, "learning_rate": 0.0006284906237260498, "loss": 3.7707, "step": 43760 }, { "epoch": 2.973569778502514, "grad_norm": 1.5884507894515991, "learning_rate": 0.0006284481587172171, "loss": 3.3909, "step": 43765 }, { "epoch": 2.9739094985731755, "grad_norm": 1.0370662212371826, "learning_rate": 0.0006284056937083843, "loss": 3.6109, "step": 43770 }, { "epoch": 2.9742492186438376, "grad_norm": 1.6966699361801147, "learning_rate": 0.0006283632286995515, "loss": 3.3178, "step": 43775 }, { "epoch": 2.9745889387144993, "grad_norm": 2.1876182556152344, "learning_rate": 0.0006283207636907189, "loss": 3.7279, "step": 43780 }, { "epoch": 2.974928658785161, "grad_norm": 1.3151191473007202, "learning_rate": 0.0006282782986818861, "loss": 3.536, "step": 43785 }, { "epoch": 2.975268378855823, "grad_norm": 1.2128208875656128, "learning_rate": 0.0006282358336730533, "loss": 3.614, "step": 43790 }, { "epoch": 2.9756080989264846, "grad_norm": 1.647749662399292, "learning_rate": 0.0006281933686642208, "loss": 3.6413, "step": 43795 }, { "epoch": 2.975947818997146, "grad_norm": 1.3252525329589844, "learning_rate": 0.000628150903655388, "loss": 3.4918, "step": 43800 }, { "epoch": 2.9762875390678083, "grad_norm": 1.1700718402862549, "learning_rate": 0.0006281084386465552, "loss": 3.5621, "step": 43805 }, { "epoch": 2.97662725913847, "grad_norm": 1.4733301401138306, "learning_rate": 0.0006280659736377226, "loss": 3.4267, "step": 43810 }, { "epoch": 2.9769669792091316, "grad_norm": 1.205227017402649, "learning_rate": 0.0006280235086288898, "loss": 3.6097, "step": 43815 }, { "epoch": 2.9773066992797936, "grad_norm": 1.4004751443862915, "learning_rate": 0.000627981043620057, "loss": 3.841, "step": 43820 }, { "epoch": 2.9776464193504553, "grad_norm": 1.0065537691116333, "learning_rate": 0.0006279385786112243, "loss": 3.4634, "step": 43825 }, { "epoch": 2.977986139421117, "grad_norm": 1.331727385520935, "learning_rate": 0.0006278961136023917, "loss": 3.4407, "step": 43830 }, { "epoch": 2.978325859491779, "grad_norm": 1.2924623489379883, "learning_rate": 0.0006278536485935589, "loss": 3.0846, "step": 43835 }, { "epoch": 2.9786655795624406, "grad_norm": 1.1248539686203003, "learning_rate": 0.0006278111835847262, "loss": 3.5854, "step": 43840 }, { "epoch": 2.9790052996331022, "grad_norm": 1.1042985916137695, "learning_rate": 0.0006277687185758935, "loss": 3.6257, "step": 43845 }, { "epoch": 2.9793450197037643, "grad_norm": 1.3393385410308838, "learning_rate": 0.0006277262535670607, "loss": 3.6071, "step": 43850 }, { "epoch": 2.979684739774426, "grad_norm": 1.1059069633483887, "learning_rate": 0.000627683788558228, "loss": 3.4081, "step": 43855 }, { "epoch": 2.9800244598450876, "grad_norm": 1.4303562641143799, "learning_rate": 0.0006276413235493952, "loss": 3.5118, "step": 43860 }, { "epoch": 2.9803641799157496, "grad_norm": 1.1775882244110107, "learning_rate": 0.0006275988585405627, "loss": 3.424, "step": 43865 }, { "epoch": 2.9807038999864113, "grad_norm": 1.1792223453521729, "learning_rate": 0.0006275563935317299, "loss": 3.5177, "step": 43870 }, { "epoch": 2.981043620057073, "grad_norm": 1.5245370864868164, "learning_rate": 0.0006275139285228971, "loss": 3.4343, "step": 43875 }, { "epoch": 2.981383340127735, "grad_norm": 1.2685906887054443, "learning_rate": 0.0006274714635140645, "loss": 3.6931, "step": 43880 }, { "epoch": 2.9817230601983966, "grad_norm": 1.5319782495498657, "learning_rate": 0.0006274289985052317, "loss": 3.3388, "step": 43885 }, { "epoch": 2.9820627802690582, "grad_norm": 1.5063025951385498, "learning_rate": 0.0006273865334963989, "loss": 3.3018, "step": 43890 }, { "epoch": 2.9824025003397203, "grad_norm": 1.3351949453353882, "learning_rate": 0.0006273440684875663, "loss": 3.9241, "step": 43895 }, { "epoch": 2.982742220410382, "grad_norm": 2.4220526218414307, "learning_rate": 0.0006273016034787336, "loss": 3.577, "step": 43900 }, { "epoch": 2.9830819404810436, "grad_norm": 1.7103294134140015, "learning_rate": 0.0006272591384699008, "loss": 3.5306, "step": 43905 }, { "epoch": 2.9834216605517057, "grad_norm": 1.819038987159729, "learning_rate": 0.0006272166734610682, "loss": 3.5453, "step": 43910 }, { "epoch": 2.9837613806223673, "grad_norm": 1.0445129871368408, "learning_rate": 0.0006271742084522354, "loss": 3.6141, "step": 43915 }, { "epoch": 2.984101100693029, "grad_norm": 1.331730842590332, "learning_rate": 0.0006271317434434026, "loss": 3.5266, "step": 43920 }, { "epoch": 2.984440820763691, "grad_norm": 1.1720972061157227, "learning_rate": 0.0006270892784345699, "loss": 3.557, "step": 43925 }, { "epoch": 2.9847805408343526, "grad_norm": 1.3980058431625366, "learning_rate": 0.0006270468134257372, "loss": 3.3928, "step": 43930 }, { "epoch": 2.9851202609050143, "grad_norm": 1.8413645029067993, "learning_rate": 0.0006270043484169045, "loss": 3.146, "step": 43935 }, { "epoch": 2.985459980975676, "grad_norm": 1.2991853952407837, "learning_rate": 0.0006269618834080718, "loss": 3.405, "step": 43940 }, { "epoch": 2.985799701046338, "grad_norm": 1.1860425472259521, "learning_rate": 0.0006269194183992391, "loss": 3.4659, "step": 43945 }, { "epoch": 2.9861394211169996, "grad_norm": 1.286164402961731, "learning_rate": 0.0006268769533904063, "loss": 3.5581, "step": 43950 }, { "epoch": 2.9864791411876612, "grad_norm": 1.4843242168426514, "learning_rate": 0.0006268344883815736, "loss": 3.5796, "step": 43955 }, { "epoch": 2.9868188612583233, "grad_norm": 1.646600365638733, "learning_rate": 0.0006267920233727408, "loss": 3.7153, "step": 43960 }, { "epoch": 2.987158581328985, "grad_norm": 1.3399006128311157, "learning_rate": 0.0006267495583639081, "loss": 3.5524, "step": 43965 }, { "epoch": 2.9874983013996466, "grad_norm": 1.3863720893859863, "learning_rate": 0.0006267070933550755, "loss": 3.736, "step": 43970 }, { "epoch": 2.987838021470308, "grad_norm": 1.0256742238998413, "learning_rate": 0.0006266646283462427, "loss": 3.4161, "step": 43975 }, { "epoch": 2.9881777415409703, "grad_norm": 1.2139216661453247, "learning_rate": 0.00062662216333741, "loss": 3.485, "step": 43980 }, { "epoch": 2.988517461611632, "grad_norm": 1.347464680671692, "learning_rate": 0.0006265796983285773, "loss": 3.3578, "step": 43985 }, { "epoch": 2.9888571816822935, "grad_norm": 1.8289918899536133, "learning_rate": 0.0006265372333197445, "loss": 3.7097, "step": 43990 }, { "epoch": 2.9891969017529556, "grad_norm": 1.5542476177215576, "learning_rate": 0.0006264947683109118, "loss": 3.7571, "step": 43995 }, { "epoch": 2.9895366218236172, "grad_norm": 1.270017147064209, "learning_rate": 0.0006264523033020791, "loss": 3.4635, "step": 44000 }, { "epoch": 2.989876341894279, "grad_norm": 1.6869947910308838, "learning_rate": 0.0006264098382932464, "loss": 3.5131, "step": 44005 }, { "epoch": 2.990216061964941, "grad_norm": 1.9443761110305786, "learning_rate": 0.0006263673732844136, "loss": 3.3245, "step": 44010 }, { "epoch": 2.9905557820356026, "grad_norm": 1.4191405773162842, "learning_rate": 0.000626324908275581, "loss": 3.679, "step": 44015 }, { "epoch": 2.990895502106264, "grad_norm": 1.4588662385940552, "learning_rate": 0.0006262824432667482, "loss": 3.5853, "step": 44020 }, { "epoch": 2.9912352221769263, "grad_norm": 1.263277292251587, "learning_rate": 0.0006262399782579154, "loss": 3.5087, "step": 44025 }, { "epoch": 2.991574942247588, "grad_norm": 1.2868274450302124, "learning_rate": 0.0006261975132490828, "loss": 4.0615, "step": 44030 }, { "epoch": 2.9919146623182495, "grad_norm": 4.71685266494751, "learning_rate": 0.00062615504824025, "loss": 3.45, "step": 44035 }, { "epoch": 2.9922543823889116, "grad_norm": 1.2072181701660156, "learning_rate": 0.0006261125832314173, "loss": 3.4831, "step": 44040 }, { "epoch": 2.9925941024595732, "grad_norm": 1.1255221366882324, "learning_rate": 0.0006260701182225847, "loss": 3.6581, "step": 44045 }, { "epoch": 2.992933822530235, "grad_norm": 1.2335566282272339, "learning_rate": 0.0006260276532137519, "loss": 3.6603, "step": 44050 }, { "epoch": 2.993273542600897, "grad_norm": 1.2857866287231445, "learning_rate": 0.0006259851882049191, "loss": 3.7371, "step": 44055 }, { "epoch": 2.9936132626715586, "grad_norm": 2.144801139831543, "learning_rate": 0.0006259427231960864, "loss": 3.6181, "step": 44060 }, { "epoch": 2.99395298274222, "grad_norm": 1.3334639072418213, "learning_rate": 0.0006259002581872537, "loss": 3.5105, "step": 44065 }, { "epoch": 2.9942927028128823, "grad_norm": 1.394665241241455, "learning_rate": 0.0006258577931784209, "loss": 3.6195, "step": 44070 }, { "epoch": 2.994632422883544, "grad_norm": 1.420682668685913, "learning_rate": 0.0006258153281695883, "loss": 3.4545, "step": 44075 }, { "epoch": 2.9949721429542056, "grad_norm": 1.8606219291687012, "learning_rate": 0.0006257728631607556, "loss": 3.3399, "step": 44080 }, { "epoch": 2.9953118630248676, "grad_norm": 1.4524481296539307, "learning_rate": 0.0006257303981519228, "loss": 3.5397, "step": 44085 }, { "epoch": 2.9956515830955293, "grad_norm": 1.4197267293930054, "learning_rate": 0.0006256879331430901, "loss": 3.401, "step": 44090 }, { "epoch": 2.995991303166191, "grad_norm": 1.2034244537353516, "learning_rate": 0.0006256454681342574, "loss": 3.7216, "step": 44095 }, { "epoch": 2.996331023236853, "grad_norm": 1.4778412580490112, "learning_rate": 0.0006256030031254246, "loss": 3.652, "step": 44100 }, { "epoch": 2.9966707433075146, "grad_norm": 1.2287814617156982, "learning_rate": 0.000625560538116592, "loss": 3.6871, "step": 44105 }, { "epoch": 2.9970104633781762, "grad_norm": 4.713544845581055, "learning_rate": 0.0006255180731077593, "loss": 3.4894, "step": 44110 }, { "epoch": 2.9973501834488383, "grad_norm": 1.5898104906082153, "learning_rate": 0.0006254756080989265, "loss": 3.3809, "step": 44115 }, { "epoch": 2.9976899035195, "grad_norm": 1.2314107418060303, "learning_rate": 0.0006254331430900938, "loss": 3.66, "step": 44120 }, { "epoch": 2.9980296235901616, "grad_norm": 1.1976113319396973, "learning_rate": 0.000625390678081261, "loss": 3.7057, "step": 44125 }, { "epoch": 2.9983693436608236, "grad_norm": 1.6527233123779297, "learning_rate": 0.0006253482130724283, "loss": 3.6884, "step": 44130 }, { "epoch": 2.9987090637314853, "grad_norm": 1.1780169010162354, "learning_rate": 0.0006253057480635956, "loss": 3.6014, "step": 44135 }, { "epoch": 2.999048783802147, "grad_norm": 1.222949743270874, "learning_rate": 0.0006252632830547629, "loss": 3.2699, "step": 44140 }, { "epoch": 2.999388503872809, "grad_norm": 1.178155779838562, "learning_rate": 0.0006252208180459302, "loss": 3.5005, "step": 44145 }, { "epoch": 2.9997282239434706, "grad_norm": 1.1492253541946411, "learning_rate": 0.0006251783530370975, "loss": 3.5077, "step": 44150 }, { "epoch": 3.0, "eval_bertscore": { "f1": 0.840332531659845, "precision": 0.8442042163724018, "recall": 0.8372985872375328 }, "eval_bleu_4": 0.016016247645606194, "eval_exact_match": 0.00038763446070355656, "eval_loss": 3.4547934532165527, "eval_meteor": 0.09051802920735663, "eval_rouge": { "rouge1": 0.12487636632511093, "rouge2": 0.015611168301871352, "rougeL": 0.10630893889768564, "rougeLsum": 0.10631469198625434 }, "eval_runtime": 1920.208, "eval_samples_per_second": 5.374, "eval_steps_per_second": 0.672, "step": 44154 }, { "epoch": 3.0000679440141322, "grad_norm": 1.2058123350143433, "learning_rate": 0.0006251358880282647, "loss": 3.3763, "step": 44155 }, { "epoch": 3.0004076640847943, "grad_norm": 1.261699914932251, "learning_rate": 0.0006250934230194319, "loss": 3.5694, "step": 44160 }, { "epoch": 3.000747384155456, "grad_norm": 1.5556201934814453, "learning_rate": 0.0006250509580105993, "loss": 3.4752, "step": 44165 }, { "epoch": 3.0010871042261176, "grad_norm": 1.3033230304718018, "learning_rate": 0.0006250084930017665, "loss": 3.2857, "step": 44170 }, { "epoch": 3.0014268242967796, "grad_norm": 1.6718885898590088, "learning_rate": 0.0006249660279929338, "loss": 3.4962, "step": 44175 }, { "epoch": 3.0017665443674413, "grad_norm": 1.269756555557251, "learning_rate": 0.0006249235629841012, "loss": 3.4491, "step": 44180 }, { "epoch": 3.002106264438103, "grad_norm": 1.4461380243301392, "learning_rate": 0.0006248810979752684, "loss": 3.2644, "step": 44185 }, { "epoch": 3.002445984508765, "grad_norm": 1.0632834434509277, "learning_rate": 0.0006248386329664356, "loss": 3.6181, "step": 44190 }, { "epoch": 3.0027857045794266, "grad_norm": 1.4248203039169312, "learning_rate": 0.000624796167957603, "loss": 3.23, "step": 44195 }, { "epoch": 3.0031254246500882, "grad_norm": 1.2203080654144287, "learning_rate": 0.0006247537029487702, "loss": 3.3653, "step": 44200 }, { "epoch": 3.0034651447207503, "grad_norm": 1.4314488172531128, "learning_rate": 0.0006247112379399374, "loss": 3.5012, "step": 44205 }, { "epoch": 3.003804864791412, "grad_norm": 1.2170368432998657, "learning_rate": 0.0006246687729311049, "loss": 3.4718, "step": 44210 }, { "epoch": 3.0041445848620736, "grad_norm": 1.167516827583313, "learning_rate": 0.0006246263079222721, "loss": 3.439, "step": 44215 }, { "epoch": 3.004484304932735, "grad_norm": 1.775165319442749, "learning_rate": 0.0006245838429134394, "loss": 3.4284, "step": 44220 }, { "epoch": 3.0048240250033973, "grad_norm": 1.3820164203643799, "learning_rate": 0.0006245413779046066, "loss": 3.4817, "step": 44225 }, { "epoch": 3.005163745074059, "grad_norm": 1.345770001411438, "learning_rate": 0.0006244989128957739, "loss": 3.328, "step": 44230 }, { "epoch": 3.0055034651447206, "grad_norm": 1.513146996498108, "learning_rate": 0.0006244564478869412, "loss": 3.2414, "step": 44235 }, { "epoch": 3.0058431852153826, "grad_norm": 1.1929785013198853, "learning_rate": 0.0006244139828781084, "loss": 3.4869, "step": 44240 }, { "epoch": 3.0061829052860443, "grad_norm": 1.5841622352600098, "learning_rate": 0.0006243715178692758, "loss": 3.5707, "step": 44245 }, { "epoch": 3.006522625356706, "grad_norm": 1.400761604309082, "learning_rate": 0.0006243290528604431, "loss": 3.4739, "step": 44250 }, { "epoch": 3.006862345427368, "grad_norm": 1.4409781694412231, "learning_rate": 0.0006242865878516103, "loss": 3.3768, "step": 44255 }, { "epoch": 3.0072020654980296, "grad_norm": 1.4502137899398804, "learning_rate": 0.0006242441228427775, "loss": 3.8375, "step": 44260 }, { "epoch": 3.0075417855686912, "grad_norm": 1.2450839281082153, "learning_rate": 0.0006242016578339449, "loss": 3.6005, "step": 44265 }, { "epoch": 3.0078815056393533, "grad_norm": 1.5111688375473022, "learning_rate": 0.0006241591928251121, "loss": 3.0899, "step": 44270 }, { "epoch": 3.008221225710015, "grad_norm": 1.5227469205856323, "learning_rate": 0.0006241167278162793, "loss": 3.3497, "step": 44275 }, { "epoch": 3.0085609457806766, "grad_norm": 1.1473456621170044, "learning_rate": 0.0006240742628074468, "loss": 3.6175, "step": 44280 }, { "epoch": 3.0089006658513386, "grad_norm": 1.9473215341567993, "learning_rate": 0.000624031797798614, "loss": 3.3822, "step": 44285 }, { "epoch": 3.0092403859220003, "grad_norm": 1.6065378189086914, "learning_rate": 0.0006239893327897812, "loss": 3.4291, "step": 44290 }, { "epoch": 3.009580105992662, "grad_norm": 1.1331645250320435, "learning_rate": 0.0006239468677809486, "loss": 3.5174, "step": 44295 }, { "epoch": 3.009919826063324, "grad_norm": 1.19963538646698, "learning_rate": 0.0006239044027721158, "loss": 3.4312, "step": 44300 }, { "epoch": 3.0102595461339856, "grad_norm": 1.2947001457214355, "learning_rate": 0.000623861937763283, "loss": 3.3663, "step": 44305 }, { "epoch": 3.0105992662046472, "grad_norm": 1.326711654663086, "learning_rate": 0.0006238194727544503, "loss": 3.6114, "step": 44310 }, { "epoch": 3.0109389862753093, "grad_norm": 1.7081252336502075, "learning_rate": 0.0006237770077456177, "loss": 3.5171, "step": 44315 }, { "epoch": 3.011278706345971, "grad_norm": 1.2199621200561523, "learning_rate": 0.0006237345427367849, "loss": 3.3664, "step": 44320 }, { "epoch": 3.0116184264166326, "grad_norm": 1.4498560428619385, "learning_rate": 0.0006236920777279522, "loss": 3.4479, "step": 44325 }, { "epoch": 3.0119581464872947, "grad_norm": 1.3020660877227783, "learning_rate": 0.0006236496127191195, "loss": 3.7818, "step": 44330 }, { "epoch": 3.0122978665579563, "grad_norm": 1.4778538942337036, "learning_rate": 0.0006236071477102867, "loss": 3.4673, "step": 44335 }, { "epoch": 3.012637586628618, "grad_norm": 1.218457579612732, "learning_rate": 0.000623564682701454, "loss": 3.3381, "step": 44340 }, { "epoch": 3.01297730669928, "grad_norm": 1.4885897636413574, "learning_rate": 0.0006235222176926213, "loss": 3.6266, "step": 44345 }, { "epoch": 3.0133170267699416, "grad_norm": 1.3185468912124634, "learning_rate": 0.0006234797526837886, "loss": 3.4465, "step": 44350 }, { "epoch": 3.0136567468406033, "grad_norm": 1.3616403341293335, "learning_rate": 0.0006234372876749559, "loss": 3.6099, "step": 44355 }, { "epoch": 3.0139964669112653, "grad_norm": 1.118623971939087, "learning_rate": 0.0006233948226661231, "loss": 3.5178, "step": 44360 }, { "epoch": 3.014336186981927, "grad_norm": 1.447962760925293, "learning_rate": 0.0006233523576572904, "loss": 3.3964, "step": 44365 }, { "epoch": 3.0146759070525886, "grad_norm": 1.2556034326553345, "learning_rate": 0.0006233098926484577, "loss": 3.4244, "step": 44370 }, { "epoch": 3.01501562712325, "grad_norm": 1.2173449993133545, "learning_rate": 0.0006232674276396249, "loss": 3.5676, "step": 44375 }, { "epoch": 3.0153553471939123, "grad_norm": 1.473557949066162, "learning_rate": 0.0006232249626307922, "loss": 3.2652, "step": 44380 }, { "epoch": 3.015695067264574, "grad_norm": 1.439597725868225, "learning_rate": 0.0006231824976219596, "loss": 3.5272, "step": 44385 }, { "epoch": 3.0160347873352356, "grad_norm": 1.7108899354934692, "learning_rate": 0.0006231400326131268, "loss": 3.576, "step": 44390 }, { "epoch": 3.0163745074058976, "grad_norm": 1.1242796182632446, "learning_rate": 0.000623097567604294, "loss": 3.3897, "step": 44395 }, { "epoch": 3.0167142274765593, "grad_norm": 1.4040253162384033, "learning_rate": 0.0006230551025954614, "loss": 3.6633, "step": 44400 }, { "epoch": 3.017053947547221, "grad_norm": 1.3262425661087036, "learning_rate": 0.0006230126375866286, "loss": 3.3127, "step": 44405 }, { "epoch": 3.017393667617883, "grad_norm": 18.18290901184082, "learning_rate": 0.0006229701725777958, "loss": 3.3503, "step": 44410 }, { "epoch": 3.0177333876885446, "grad_norm": 1.301285982131958, "learning_rate": 0.0006229277075689632, "loss": 3.6839, "step": 44415 }, { "epoch": 3.0180731077592062, "grad_norm": 1.462579607963562, "learning_rate": 0.0006228852425601305, "loss": 3.6339, "step": 44420 }, { "epoch": 3.0184128278298683, "grad_norm": 1.575748324394226, "learning_rate": 0.0006228427775512977, "loss": 3.7572, "step": 44425 }, { "epoch": 3.01875254790053, "grad_norm": 1.0478808879852295, "learning_rate": 0.0006228003125424651, "loss": 3.549, "step": 44430 }, { "epoch": 3.0190922679711916, "grad_norm": 1.1307073831558228, "learning_rate": 0.0006227578475336323, "loss": 3.2255, "step": 44435 }, { "epoch": 3.0194319880418536, "grad_norm": 1.2233967781066895, "learning_rate": 0.0006227153825247995, "loss": 3.8324, "step": 44440 }, { "epoch": 3.0197717081125153, "grad_norm": 1.4545047283172607, "learning_rate": 0.0006226729175159669, "loss": 3.4816, "step": 44445 }, { "epoch": 3.020111428183177, "grad_norm": 1.4084489345550537, "learning_rate": 0.0006226304525071341, "loss": 3.4409, "step": 44450 }, { "epoch": 3.020451148253839, "grad_norm": 1.805510401725769, "learning_rate": 0.0006225879874983014, "loss": 3.295, "step": 44455 }, { "epoch": 3.0207908683245006, "grad_norm": 1.305690884590149, "learning_rate": 0.0006225455224894687, "loss": 3.4321, "step": 44460 }, { "epoch": 3.0211305883951622, "grad_norm": 1.5010725259780884, "learning_rate": 0.000622503057480636, "loss": 3.5343, "step": 44465 }, { "epoch": 3.0214703084658243, "grad_norm": 1.6529858112335205, "learning_rate": 0.0006224605924718032, "loss": 3.3234, "step": 44470 }, { "epoch": 3.021810028536486, "grad_norm": 1.444009780883789, "learning_rate": 0.0006224181274629705, "loss": 3.4597, "step": 44475 }, { "epoch": 3.0221497486071476, "grad_norm": 1.4754987955093384, "learning_rate": 0.0006223756624541378, "loss": 3.6055, "step": 44480 }, { "epoch": 3.0224894686778097, "grad_norm": 1.4873011112213135, "learning_rate": 0.000622333197445305, "loss": 3.6965, "step": 44485 }, { "epoch": 3.0228291887484713, "grad_norm": 1.1761136054992676, "learning_rate": 0.0006222907324364724, "loss": 3.1818, "step": 44490 }, { "epoch": 3.023168908819133, "grad_norm": 1.376204252243042, "learning_rate": 0.0006222482674276397, "loss": 3.4226, "step": 44495 }, { "epoch": 3.023508628889795, "grad_norm": 1.2669364213943481, "learning_rate": 0.0006222058024188069, "loss": 3.4943, "step": 44500 }, { "epoch": 3.0238483489604566, "grad_norm": 1.4467658996582031, "learning_rate": 0.0006221633374099742, "loss": 3.3093, "step": 44505 }, { "epoch": 3.0241880690311183, "grad_norm": 1.371985673904419, "learning_rate": 0.0006221208724011414, "loss": 3.4744, "step": 44510 }, { "epoch": 3.0245277891017803, "grad_norm": 1.3573631048202515, "learning_rate": 0.0006220784073923087, "loss": 3.4705, "step": 44515 }, { "epoch": 3.024867509172442, "grad_norm": 1.4009640216827393, "learning_rate": 0.000622035942383476, "loss": 3.388, "step": 44520 }, { "epoch": 3.0252072292431036, "grad_norm": 1.3431370258331299, "learning_rate": 0.0006219934773746433, "loss": 3.3269, "step": 44525 }, { "epoch": 3.0255469493137657, "grad_norm": 1.3516242504119873, "learning_rate": 0.0006219510123658106, "loss": 3.5687, "step": 44530 }, { "epoch": 3.0258866693844273, "grad_norm": 1.3445366621017456, "learning_rate": 0.0006219085473569779, "loss": 3.4092, "step": 44535 }, { "epoch": 3.026226389455089, "grad_norm": 1.3591253757476807, "learning_rate": 0.0006218660823481451, "loss": 3.9813, "step": 44540 }, { "epoch": 3.026566109525751, "grad_norm": 1.4651918411254883, "learning_rate": 0.0006218236173393123, "loss": 3.1576, "step": 44545 }, { "epoch": 3.0269058295964126, "grad_norm": 1.6742714643478394, "learning_rate": 0.0006217811523304797, "loss": 3.4697, "step": 44550 }, { "epoch": 3.0272455496670743, "grad_norm": 1.0798819065093994, "learning_rate": 0.0006217386873216469, "loss": 3.5305, "step": 44555 }, { "epoch": 3.027585269737736, "grad_norm": 1.3809890747070312, "learning_rate": 0.0006216962223128143, "loss": 3.4286, "step": 44560 }, { "epoch": 3.027924989808398, "grad_norm": 1.2108731269836426, "learning_rate": 0.0006216537573039816, "loss": 3.5758, "step": 44565 }, { "epoch": 3.0282647098790596, "grad_norm": 1.0634121894836426, "learning_rate": 0.0006216112922951488, "loss": 3.5023, "step": 44570 }, { "epoch": 3.0286044299497212, "grad_norm": 1.3761703968048096, "learning_rate": 0.0006215688272863161, "loss": 3.5121, "step": 44575 }, { "epoch": 3.0289441500203833, "grad_norm": 1.6447434425354004, "learning_rate": 0.0006215263622774834, "loss": 3.2944, "step": 44580 }, { "epoch": 3.029283870091045, "grad_norm": 1.661073923110962, "learning_rate": 0.0006214838972686506, "loss": 3.4939, "step": 44585 }, { "epoch": 3.0296235901617066, "grad_norm": 1.3047720193862915, "learning_rate": 0.0006214414322598179, "loss": 3.3003, "step": 44590 }, { "epoch": 3.0299633102323686, "grad_norm": 1.6170653104782104, "learning_rate": 0.0006213989672509853, "loss": 3.4397, "step": 44595 }, { "epoch": 3.0303030303030303, "grad_norm": 1.2093039751052856, "learning_rate": 0.0006213565022421525, "loss": 3.4628, "step": 44600 }, { "epoch": 3.030642750373692, "grad_norm": 1.602662205696106, "learning_rate": 0.0006213140372333198, "loss": 3.4632, "step": 44605 }, { "epoch": 3.030982470444354, "grad_norm": 1.150122880935669, "learning_rate": 0.000621271572224487, "loss": 3.3064, "step": 44610 }, { "epoch": 3.0313221905150156, "grad_norm": 1.1579607725143433, "learning_rate": 0.0006212291072156543, "loss": 3.3027, "step": 44615 }, { "epoch": 3.0316619105856772, "grad_norm": 1.2906478643417358, "learning_rate": 0.0006211866422068216, "loss": 3.3922, "step": 44620 }, { "epoch": 3.0320016306563393, "grad_norm": 1.7038288116455078, "learning_rate": 0.0006211441771979888, "loss": 3.4845, "step": 44625 }, { "epoch": 3.032341350727001, "grad_norm": 1.3003995418548584, "learning_rate": 0.0006211017121891562, "loss": 3.6008, "step": 44630 }, { "epoch": 3.0326810707976626, "grad_norm": 1.4613896608352661, "learning_rate": 0.0006210592471803235, "loss": 3.4253, "step": 44635 }, { "epoch": 3.0330207908683247, "grad_norm": 1.1258265972137451, "learning_rate": 0.0006210167821714907, "loss": 3.2587, "step": 44640 }, { "epoch": 3.0333605109389863, "grad_norm": 2.1701231002807617, "learning_rate": 0.000620974317162658, "loss": 3.6638, "step": 44645 }, { "epoch": 3.033700231009648, "grad_norm": 1.1753935813903809, "learning_rate": 0.0006209318521538253, "loss": 3.1177, "step": 44650 }, { "epoch": 3.03403995108031, "grad_norm": 1.3011246919631958, "learning_rate": 0.0006208893871449925, "loss": 3.5104, "step": 44655 }, { "epoch": 3.0343796711509716, "grad_norm": 1.3396861553192139, "learning_rate": 0.0006208469221361597, "loss": 3.4552, "step": 44660 }, { "epoch": 3.0347193912216333, "grad_norm": 1.2917641401290894, "learning_rate": 0.0006208044571273272, "loss": 3.666, "step": 44665 }, { "epoch": 3.0350591112922953, "grad_norm": 1.659544825553894, "learning_rate": 0.0006207619921184944, "loss": 3.1988, "step": 44670 }, { "epoch": 3.035398831362957, "grad_norm": 1.4820982217788696, "learning_rate": 0.0006207195271096616, "loss": 3.2718, "step": 44675 }, { "epoch": 3.0357385514336186, "grad_norm": 1.6744621992111206, "learning_rate": 0.000620677062100829, "loss": 3.5343, "step": 44680 }, { "epoch": 3.0360782715042807, "grad_norm": 1.2694183588027954, "learning_rate": 0.0006206345970919962, "loss": 3.5817, "step": 44685 }, { "epoch": 3.0364179915749423, "grad_norm": 1.3228780031204224, "learning_rate": 0.0006205921320831634, "loss": 3.4112, "step": 44690 }, { "epoch": 3.036757711645604, "grad_norm": 2.024089813232422, "learning_rate": 0.0006205496670743309, "loss": 3.3044, "step": 44695 }, { "epoch": 3.037097431716266, "grad_norm": 1.1980454921722412, "learning_rate": 0.0006205072020654981, "loss": 3.5662, "step": 44700 }, { "epoch": 3.0374371517869276, "grad_norm": 1.6060166358947754, "learning_rate": 0.0006204647370566653, "loss": 3.3369, "step": 44705 }, { "epoch": 3.0377768718575893, "grad_norm": 1.1763144731521606, "learning_rate": 0.0006204222720478326, "loss": 3.6545, "step": 44710 }, { "epoch": 3.038116591928251, "grad_norm": 1.5745357275009155, "learning_rate": 0.0006203798070389999, "loss": 3.1827, "step": 44715 }, { "epoch": 3.038456311998913, "grad_norm": 1.3420497179031372, "learning_rate": 0.0006203373420301671, "loss": 3.3196, "step": 44720 }, { "epoch": 3.0387960320695746, "grad_norm": 1.0916392803192139, "learning_rate": 0.0006202948770213344, "loss": 3.35, "step": 44725 }, { "epoch": 3.0391357521402362, "grad_norm": 4.461605548858643, "learning_rate": 0.0006202524120125018, "loss": 3.4936, "step": 44730 }, { "epoch": 3.0394754722108983, "grad_norm": 1.2954003810882568, "learning_rate": 0.000620209947003669, "loss": 3.555, "step": 44735 }, { "epoch": 3.03981519228156, "grad_norm": 1.3217689990997314, "learning_rate": 0.0006201674819948363, "loss": 3.1287, "step": 44740 }, { "epoch": 3.0401549123522216, "grad_norm": 1.517815351486206, "learning_rate": 0.0006201250169860035, "loss": 3.6271, "step": 44745 }, { "epoch": 3.0404946324228836, "grad_norm": 1.1476929187774658, "learning_rate": 0.0006200825519771708, "loss": 3.3738, "step": 44750 }, { "epoch": 3.0408343524935453, "grad_norm": 1.990746021270752, "learning_rate": 0.0006200400869683381, "loss": 3.0818, "step": 44755 }, { "epoch": 3.041174072564207, "grad_norm": 1.7585660219192505, "learning_rate": 0.0006199976219595053, "loss": 3.3469, "step": 44760 }, { "epoch": 3.041513792634869, "grad_norm": 1.2871880531311035, "learning_rate": 0.0006199551569506727, "loss": 3.2942, "step": 44765 }, { "epoch": 3.0418535127055306, "grad_norm": 1.3736836910247803, "learning_rate": 0.00061991269194184, "loss": 3.1131, "step": 44770 }, { "epoch": 3.0421932327761922, "grad_norm": 1.7511037588119507, "learning_rate": 0.0006198702269330072, "loss": 3.5562, "step": 44775 }, { "epoch": 3.0425329528468543, "grad_norm": 1.5662825107574463, "learning_rate": 0.0006198277619241745, "loss": 3.4433, "step": 44780 }, { "epoch": 3.042872672917516, "grad_norm": 1.3925658464431763, "learning_rate": 0.0006197852969153418, "loss": 3.2981, "step": 44785 }, { "epoch": 3.0432123929881776, "grad_norm": 1.350950837135315, "learning_rate": 0.000619742831906509, "loss": 3.2134, "step": 44790 }, { "epoch": 3.0435521130588397, "grad_norm": 1.3809870481491089, "learning_rate": 0.0006197003668976762, "loss": 3.4585, "step": 44795 }, { "epoch": 3.0438918331295013, "grad_norm": 1.1825377941131592, "learning_rate": 0.0006196579018888437, "loss": 3.4841, "step": 44800 }, { "epoch": 3.044231553200163, "grad_norm": 2.3669726848602295, "learning_rate": 0.0006196154368800109, "loss": 3.4104, "step": 44805 }, { "epoch": 3.044571273270825, "grad_norm": 1.1408592462539673, "learning_rate": 0.0006195729718711781, "loss": 3.2291, "step": 44810 }, { "epoch": 3.0449109933414866, "grad_norm": 1.2527505159378052, "learning_rate": 0.0006195305068623455, "loss": 3.6527, "step": 44815 }, { "epoch": 3.0452507134121483, "grad_norm": Infinity, "learning_rate": 0.0006194965348552792, "loss": 3.509, "step": 44820 }, { "epoch": 3.0455904334828103, "grad_norm": 1.1515945196151733, "learning_rate": 0.0006194540698464466, "loss": 3.4679, "step": 44825 }, { "epoch": 3.045930153553472, "grad_norm": 1.4360603094100952, "learning_rate": 0.0006194116048376139, "loss": 3.4468, "step": 44830 }, { "epoch": 3.0462698736241336, "grad_norm": 1.5507398843765259, "learning_rate": 0.0006193691398287811, "loss": 3.1503, "step": 44835 }, { "epoch": 3.0466095936947957, "grad_norm": 1.363986611366272, "learning_rate": 0.0006193266748199484, "loss": 3.3914, "step": 44840 }, { "epoch": 3.0469493137654573, "grad_norm": 1.389897346496582, "learning_rate": 0.0006192842098111156, "loss": 3.299, "step": 44845 }, { "epoch": 3.047289033836119, "grad_norm": 1.3803433179855347, "learning_rate": 0.0006192417448022829, "loss": 3.6435, "step": 44850 }, { "epoch": 3.047628753906781, "grad_norm": 1.1388641595840454, "learning_rate": 0.0006191992797934502, "loss": 3.51, "step": 44855 }, { "epoch": 3.0479684739774426, "grad_norm": 1.4259675741195679, "learning_rate": 0.0006191568147846175, "loss": 3.3115, "step": 44860 }, { "epoch": 3.0483081940481043, "grad_norm": 1.6661288738250732, "learning_rate": 0.0006191143497757848, "loss": 3.4477, "step": 44865 }, { "epoch": 3.0486479141187663, "grad_norm": 1.2833762168884277, "learning_rate": 0.0006190718847669521, "loss": 3.3554, "step": 44870 }, { "epoch": 3.048987634189428, "grad_norm": 1.626135230064392, "learning_rate": 0.0006190294197581193, "loss": 3.4341, "step": 44875 }, { "epoch": 3.0493273542600896, "grad_norm": 1.443228840827942, "learning_rate": 0.0006189869547492865, "loss": 3.2777, "step": 44880 }, { "epoch": 3.0496670743307517, "grad_norm": 1.5300182104110718, "learning_rate": 0.0006189444897404539, "loss": 3.4916, "step": 44885 }, { "epoch": 3.0500067944014133, "grad_norm": 1.4208024740219116, "learning_rate": 0.0006189020247316211, "loss": 3.5963, "step": 44890 }, { "epoch": 3.050346514472075, "grad_norm": 1.5126193761825562, "learning_rate": 0.0006188595597227884, "loss": 3.4588, "step": 44895 }, { "epoch": 3.0506862345427366, "grad_norm": 1.3685513734817505, "learning_rate": 0.0006188170947139558, "loss": 3.4766, "step": 44900 }, { "epoch": 3.0510259546133986, "grad_norm": 1.0642260313034058, "learning_rate": 0.000618774629705123, "loss": 3.2384, "step": 44905 }, { "epoch": 3.0513656746840603, "grad_norm": 1.6630722284317017, "learning_rate": 0.0006187321646962902, "loss": 3.4026, "step": 44910 }, { "epoch": 3.051705394754722, "grad_norm": 1.0305455923080444, "learning_rate": 0.0006186896996874576, "loss": 3.5885, "step": 44915 }, { "epoch": 3.052045114825384, "grad_norm": 1.169061541557312, "learning_rate": 0.0006186472346786248, "loss": 3.5331, "step": 44920 }, { "epoch": 3.0523848348960456, "grad_norm": 1.3675216436386108, "learning_rate": 0.000618604769669792, "loss": 3.4353, "step": 44925 }, { "epoch": 3.0527245549667072, "grad_norm": 1.506380319595337, "learning_rate": 0.0006185623046609595, "loss": 3.561, "step": 44930 }, { "epoch": 3.0530642750373693, "grad_norm": 1.5541292428970337, "learning_rate": 0.0006185198396521267, "loss": 3.2687, "step": 44935 }, { "epoch": 3.053403995108031, "grad_norm": 1.345220923423767, "learning_rate": 0.0006184773746432939, "loss": 3.381, "step": 44940 }, { "epoch": 3.0537437151786926, "grad_norm": 1.1807076930999756, "learning_rate": 0.0006184349096344612, "loss": 3.5094, "step": 44945 }, { "epoch": 3.0540834352493547, "grad_norm": 1.2249805927276611, "learning_rate": 0.0006183924446256285, "loss": 3.4845, "step": 44950 }, { "epoch": 3.0544231553200163, "grad_norm": 1.1686375141143799, "learning_rate": 0.0006183499796167957, "loss": 3.3283, "step": 44955 }, { "epoch": 3.054762875390678, "grad_norm": 0.9257850050926208, "learning_rate": 0.0006183075146079631, "loss": 3.5408, "step": 44960 }, { "epoch": 3.05510259546134, "grad_norm": 1.455501675605774, "learning_rate": 0.0006182650495991304, "loss": 3.4065, "step": 44965 }, { "epoch": 3.0554423155320016, "grad_norm": 1.4760726690292358, "learning_rate": 0.0006182225845902976, "loss": 3.3422, "step": 44970 }, { "epoch": 3.0557820356026633, "grad_norm": 1.7240872383117676, "learning_rate": 0.0006181801195814649, "loss": 3.4723, "step": 44975 }, { "epoch": 3.0561217556733253, "grad_norm": 1.1859674453735352, "learning_rate": 0.0006181376545726321, "loss": 3.4903, "step": 44980 }, { "epoch": 3.056461475743987, "grad_norm": 1.2433308362960815, "learning_rate": 0.0006180951895637994, "loss": 3.5054, "step": 44985 }, { "epoch": 3.0568011958146486, "grad_norm": 1.2473646402359009, "learning_rate": 0.0006180527245549667, "loss": 3.5265, "step": 44990 }, { "epoch": 3.0571409158853107, "grad_norm": 1.1910622119903564, "learning_rate": 0.000618010259546134, "loss": 3.1388, "step": 44995 }, { "epoch": 3.0574806359559723, "grad_norm": 1.2896859645843506, "learning_rate": 0.0006179677945373013, "loss": 3.4837, "step": 45000 }, { "epoch": 3.057820356026634, "grad_norm": 1.362323522567749, "learning_rate": 0.0006179253295284686, "loss": 3.3192, "step": 45005 }, { "epoch": 3.058160076097296, "grad_norm": 1.186782717704773, "learning_rate": 0.0006178828645196358, "loss": 3.1704, "step": 45010 }, { "epoch": 3.0584997961679576, "grad_norm": 1.2972956895828247, "learning_rate": 0.0006178403995108031, "loss": 3.5406, "step": 45015 }, { "epoch": 3.0588395162386193, "grad_norm": 1.321549654006958, "learning_rate": 0.0006177979345019704, "loss": 3.1476, "step": 45020 }, { "epoch": 3.0591792363092813, "grad_norm": 1.2462749481201172, "learning_rate": 0.0006177554694931376, "loss": 3.5466, "step": 45025 }, { "epoch": 3.059518956379943, "grad_norm": 1.183896541595459, "learning_rate": 0.000617713004484305, "loss": 3.5881, "step": 45030 }, { "epoch": 3.0598586764506046, "grad_norm": 1.3776506185531616, "learning_rate": 0.0006176705394754723, "loss": 3.3316, "step": 45035 }, { "epoch": 3.0601983965212667, "grad_norm": 1.23482346534729, "learning_rate": 0.0006176280744666395, "loss": 3.5975, "step": 45040 }, { "epoch": 3.0605381165919283, "grad_norm": 1.1401804685592651, "learning_rate": 0.0006175856094578067, "loss": 3.2688, "step": 45045 }, { "epoch": 3.06087783666259, "grad_norm": 1.6329920291900635, "learning_rate": 0.0006175431444489741, "loss": 3.3748, "step": 45050 }, { "epoch": 3.0612175567332516, "grad_norm": 1.3991878032684326, "learning_rate": 0.0006175006794401413, "loss": 3.4625, "step": 45055 }, { "epoch": 3.0615572768039137, "grad_norm": 1.1378618478775024, "learning_rate": 0.0006174582144313085, "loss": 3.4732, "step": 45060 }, { "epoch": 3.0618969968745753, "grad_norm": 1.5817246437072754, "learning_rate": 0.000617415749422476, "loss": 3.4794, "step": 45065 }, { "epoch": 3.062236716945237, "grad_norm": 6.9073100090026855, "learning_rate": 0.0006173732844136432, "loss": 3.3742, "step": 45070 }, { "epoch": 3.062576437015899, "grad_norm": 1.1409225463867188, "learning_rate": 0.0006173308194048104, "loss": 3.8202, "step": 45075 }, { "epoch": 3.0629161570865606, "grad_norm": 1.133592128753662, "learning_rate": 0.0006172883543959777, "loss": 3.5192, "step": 45080 }, { "epoch": 3.0632558771572223, "grad_norm": 1.1184605360031128, "learning_rate": 0.000617245889387145, "loss": 3.6114, "step": 45085 }, { "epoch": 3.0635955972278843, "grad_norm": 1.224578619003296, "learning_rate": 0.0006172034243783122, "loss": 3.3757, "step": 45090 }, { "epoch": 3.063935317298546, "grad_norm": 1.199341058731079, "learning_rate": 0.0006171609593694795, "loss": 3.42, "step": 45095 }, { "epoch": 3.0642750373692076, "grad_norm": 1.4838910102844238, "learning_rate": 0.0006171184943606469, "loss": 3.4339, "step": 45100 }, { "epoch": 3.0646147574398697, "grad_norm": 1.8039865493774414, "learning_rate": 0.0006170760293518142, "loss": 3.5582, "step": 45105 }, { "epoch": 3.0649544775105313, "grad_norm": 1.1029267311096191, "learning_rate": 0.0006170335643429814, "loss": 3.6395, "step": 45110 }, { "epoch": 3.065294197581193, "grad_norm": 2.002575159072876, "learning_rate": 0.0006169910993341487, "loss": 3.4154, "step": 45115 }, { "epoch": 3.065633917651855, "grad_norm": 1.3593268394470215, "learning_rate": 0.000616948634325316, "loss": 3.5207, "step": 45120 }, { "epoch": 3.0659736377225166, "grad_norm": 1.3157044649124146, "learning_rate": 0.0006169061693164832, "loss": 3.3857, "step": 45125 }, { "epoch": 3.0663133577931783, "grad_norm": 5.708446025848389, "learning_rate": 0.0006168637043076504, "loss": 3.4637, "step": 45130 }, { "epoch": 3.0666530778638403, "grad_norm": 1.3464572429656982, "learning_rate": 0.0006168212392988179, "loss": 3.4328, "step": 45135 }, { "epoch": 3.066992797934502, "grad_norm": 1.9640638828277588, "learning_rate": 0.0006167787742899851, "loss": 3.28, "step": 45140 }, { "epoch": 3.0673325180051636, "grad_norm": 1.3849457502365112, "learning_rate": 0.0006167363092811523, "loss": 3.2684, "step": 45145 }, { "epoch": 3.0676722380758257, "grad_norm": 1.1698646545410156, "learning_rate": 0.0006166938442723197, "loss": 3.4907, "step": 45150 }, { "epoch": 3.0680119581464873, "grad_norm": 1.471271276473999, "learning_rate": 0.0006166513792634869, "loss": 3.4441, "step": 45155 }, { "epoch": 3.068351678217149, "grad_norm": 1.141658067703247, "learning_rate": 0.0006166089142546541, "loss": 3.3546, "step": 45160 }, { "epoch": 3.068691398287811, "grad_norm": 1.2728583812713623, "learning_rate": 0.0006165664492458215, "loss": 3.5972, "step": 45165 }, { "epoch": 3.0690311183584726, "grad_norm": 1.3365758657455444, "learning_rate": 0.0006165239842369888, "loss": 3.4943, "step": 45170 }, { "epoch": 3.0693708384291343, "grad_norm": 1.47832453250885, "learning_rate": 0.000616481519228156, "loss": 3.3709, "step": 45175 }, { "epoch": 3.0697105584997963, "grad_norm": 1.1917002201080322, "learning_rate": 0.0006164390542193234, "loss": 3.4623, "step": 45180 }, { "epoch": 3.070050278570458, "grad_norm": 1.351613163948059, "learning_rate": 0.0006163965892104906, "loss": 3.4256, "step": 45185 }, { "epoch": 3.0703899986411196, "grad_norm": 1.1679068803787231, "learning_rate": 0.0006163541242016578, "loss": 3.5615, "step": 45190 }, { "epoch": 3.0707297187117817, "grad_norm": 1.6093645095825195, "learning_rate": 0.0006163116591928251, "loss": 3.3264, "step": 45195 }, { "epoch": 3.0710694387824433, "grad_norm": 1.2537055015563965, "learning_rate": 0.0006162691941839924, "loss": 3.4234, "step": 45200 }, { "epoch": 3.071409158853105, "grad_norm": 1.5389262437820435, "learning_rate": 0.0006162267291751597, "loss": 3.1077, "step": 45205 }, { "epoch": 3.071748878923767, "grad_norm": 1.5267258882522583, "learning_rate": 0.000616184264166327, "loss": 3.5361, "step": 45210 }, { "epoch": 3.0720885989944287, "grad_norm": 1.4556713104248047, "learning_rate": 0.0006161417991574943, "loss": 3.5177, "step": 45215 }, { "epoch": 3.0724283190650903, "grad_norm": 1.3687971830368042, "learning_rate": 0.0006160993341486615, "loss": 3.4241, "step": 45220 }, { "epoch": 3.0727680391357524, "grad_norm": 1.4150725603103638, "learning_rate": 0.0006160568691398288, "loss": 3.4832, "step": 45225 }, { "epoch": 3.073107759206414, "grad_norm": 1.1161363124847412, "learning_rate": 0.000616014404130996, "loss": 3.3285, "step": 45230 }, { "epoch": 3.0734474792770756, "grad_norm": 1.4770272970199585, "learning_rate": 0.0006159719391221633, "loss": 3.0612, "step": 45235 }, { "epoch": 3.0737871993477373, "grad_norm": 1.2998046875, "learning_rate": 0.0006159294741133307, "loss": 3.504, "step": 45240 }, { "epoch": 3.0741269194183993, "grad_norm": 1.3803868293762207, "learning_rate": 0.0006158870091044979, "loss": 3.6739, "step": 45245 }, { "epoch": 3.074466639489061, "grad_norm": 1.6858258247375488, "learning_rate": 0.0006158445440956652, "loss": 3.7922, "step": 45250 }, { "epoch": 3.0748063595597226, "grad_norm": 1.033996343612671, "learning_rate": 0.0006158020790868325, "loss": 3.4325, "step": 45255 }, { "epoch": 3.0751460796303847, "grad_norm": 1.5435645580291748, "learning_rate": 0.0006157596140779997, "loss": 3.2182, "step": 45260 }, { "epoch": 3.0754857997010463, "grad_norm": 1.5276274681091309, "learning_rate": 0.000615717149069167, "loss": 3.3848, "step": 45265 }, { "epoch": 3.075825519771708, "grad_norm": 1.547436237335205, "learning_rate": 0.0006156746840603343, "loss": 3.2089, "step": 45270 }, { "epoch": 3.07616523984237, "grad_norm": 1.301903486251831, "learning_rate": 0.0006156322190515016, "loss": 3.2073, "step": 45275 }, { "epoch": 3.0765049599130316, "grad_norm": 1.2604727745056152, "learning_rate": 0.0006155897540426688, "loss": 3.3405, "step": 45280 }, { "epoch": 3.0768446799836933, "grad_norm": 1.1624531745910645, "learning_rate": 0.0006155472890338362, "loss": 3.6613, "step": 45285 }, { "epoch": 3.0771844000543553, "grad_norm": 1.0770974159240723, "learning_rate": 0.0006155048240250034, "loss": 3.4037, "step": 45290 }, { "epoch": 3.077524120125017, "grad_norm": 1.6038063764572144, "learning_rate": 0.0006154623590161706, "loss": 3.4291, "step": 45295 }, { "epoch": 3.0778638401956786, "grad_norm": 1.3468550443649292, "learning_rate": 0.000615419894007338, "loss": 3.6772, "step": 45300 }, { "epoch": 3.0782035602663407, "grad_norm": 1.0259641408920288, "learning_rate": 0.0006153774289985052, "loss": 3.4695, "step": 45305 }, { "epoch": 3.0785432803370023, "grad_norm": 1.6032816171646118, "learning_rate": 0.0006153349639896725, "loss": 3.4942, "step": 45310 }, { "epoch": 3.078883000407664, "grad_norm": 1.0251809358596802, "learning_rate": 0.0006152924989808399, "loss": 3.5843, "step": 45315 }, { "epoch": 3.079222720478326, "grad_norm": 1.215618371963501, "learning_rate": 0.0006152500339720071, "loss": 3.3985, "step": 45320 }, { "epoch": 3.0795624405489876, "grad_norm": 1.2591513395309448, "learning_rate": 0.0006152075689631743, "loss": 3.4693, "step": 45325 }, { "epoch": 3.0799021606196493, "grad_norm": 1.265233039855957, "learning_rate": 0.0006151651039543416, "loss": 3.4754, "step": 45330 }, { "epoch": 3.0802418806903114, "grad_norm": 1.4289551973342896, "learning_rate": 0.0006151226389455089, "loss": 3.5026, "step": 45335 }, { "epoch": 3.080581600760973, "grad_norm": 1.2451751232147217, "learning_rate": 0.0006150801739366761, "loss": 3.2809, "step": 45340 }, { "epoch": 3.0809213208316346, "grad_norm": 1.3607521057128906, "learning_rate": 0.0006150377089278435, "loss": 3.6311, "step": 45345 }, { "epoch": 3.0812610409022967, "grad_norm": 1.5903358459472656, "learning_rate": 0.0006149952439190108, "loss": 3.4729, "step": 45350 }, { "epoch": 3.0816007609729583, "grad_norm": 1.6005879640579224, "learning_rate": 0.000614952778910178, "loss": 3.5196, "step": 45355 }, { "epoch": 3.08194048104362, "grad_norm": 1.2986429929733276, "learning_rate": 0.0006149103139013453, "loss": 3.5424, "step": 45360 }, { "epoch": 3.082280201114282, "grad_norm": 1.6538854837417603, "learning_rate": 0.0006148678488925126, "loss": 3.4518, "step": 45365 }, { "epoch": 3.0826199211849437, "grad_norm": 1.4944618940353394, "learning_rate": 0.0006148253838836798, "loss": 3.443, "step": 45370 }, { "epoch": 3.0829596412556053, "grad_norm": 1.361278772354126, "learning_rate": 0.0006147829188748471, "loss": 3.1133, "step": 45375 }, { "epoch": 3.0832993613262674, "grad_norm": 1.2475816011428833, "learning_rate": 0.0006147404538660144, "loss": 3.4301, "step": 45380 }, { "epoch": 3.083639081396929, "grad_norm": 1.2508471012115479, "learning_rate": 0.0006146979888571817, "loss": 3.5739, "step": 45385 }, { "epoch": 3.0839788014675906, "grad_norm": 1.4376658201217651, "learning_rate": 0.000614655523848349, "loss": 3.4837, "step": 45390 }, { "epoch": 3.0843185215382523, "grad_norm": 1.0657851696014404, "learning_rate": 0.0006146130588395162, "loss": 3.5087, "step": 45395 }, { "epoch": 3.0846582416089143, "grad_norm": 1.4242188930511475, "learning_rate": 0.0006145705938306835, "loss": 3.4576, "step": 45400 }, { "epoch": 3.084997961679576, "grad_norm": 2.163621664047241, "learning_rate": 0.0006145281288218508, "loss": 3.3545, "step": 45405 }, { "epoch": 3.0853376817502376, "grad_norm": 1.3732507228851318, "learning_rate": 0.000614485663813018, "loss": 3.2309, "step": 45410 }, { "epoch": 3.0856774018208997, "grad_norm": 1.3513835668563843, "learning_rate": 0.0006144431988041854, "loss": 3.2801, "step": 45415 }, { "epoch": 3.0860171218915613, "grad_norm": 2.351116418838501, "learning_rate": 0.0006144007337953527, "loss": 3.393, "step": 45420 }, { "epoch": 3.086356841962223, "grad_norm": 1.4653000831604004, "learning_rate": 0.0006143582687865199, "loss": 3.5361, "step": 45425 }, { "epoch": 3.086696562032885, "grad_norm": 1.2952382564544678, "learning_rate": 0.0006143158037776871, "loss": 3.5699, "step": 45430 }, { "epoch": 3.0870362821035466, "grad_norm": 1.3594781160354614, "learning_rate": 0.0006142733387688545, "loss": 3.6366, "step": 45435 }, { "epoch": 3.0873760021742083, "grad_norm": 1.122217059135437, "learning_rate": 0.0006142308737600217, "loss": 3.4081, "step": 45440 }, { "epoch": 3.0877157222448703, "grad_norm": 1.4401246309280396, "learning_rate": 0.0006141884087511891, "loss": 3.4351, "step": 45445 }, { "epoch": 3.088055442315532, "grad_norm": 1.4996811151504517, "learning_rate": 0.0006141459437423564, "loss": 3.7892, "step": 45450 }, { "epoch": 3.0883951623861936, "grad_norm": 1.2089322805404663, "learning_rate": 0.0006141034787335236, "loss": 3.473, "step": 45455 }, { "epoch": 3.0887348824568557, "grad_norm": 1.4355798959732056, "learning_rate": 0.0006140610137246909, "loss": 3.5344, "step": 45460 }, { "epoch": 3.0890746025275173, "grad_norm": 1.3717691898345947, "learning_rate": 0.0006140185487158582, "loss": 3.379, "step": 45465 }, { "epoch": 3.089414322598179, "grad_norm": 1.1708626747131348, "learning_rate": 0.0006139760837070254, "loss": 3.3429, "step": 45470 }, { "epoch": 3.089754042668841, "grad_norm": 1.404199481010437, "learning_rate": 0.0006139336186981927, "loss": 3.2284, "step": 45475 }, { "epoch": 3.0900937627395026, "grad_norm": 1.3202823400497437, "learning_rate": 0.00061389115368936, "loss": 3.2602, "step": 45480 }, { "epoch": 3.0904334828101643, "grad_norm": 2.5240347385406494, "learning_rate": 0.0006138486886805273, "loss": 3.4284, "step": 45485 }, { "epoch": 3.0907732028808264, "grad_norm": 1.1612930297851562, "learning_rate": 0.0006138062236716946, "loss": 3.6039, "step": 45490 }, { "epoch": 3.091112922951488, "grad_norm": 1.5785819292068481, "learning_rate": 0.0006137637586628618, "loss": 3.4387, "step": 45495 }, { "epoch": 3.0914526430221496, "grad_norm": 1.3386114835739136, "learning_rate": 0.0006137212936540291, "loss": 3.4395, "step": 45500 }, { "epoch": 3.0917923630928117, "grad_norm": 1.5601589679718018, "learning_rate": 0.0006136788286451964, "loss": 3.3039, "step": 45505 }, { "epoch": 3.0921320831634733, "grad_norm": 1.4175492525100708, "learning_rate": 0.0006136363636363636, "loss": 3.6544, "step": 45510 }, { "epoch": 3.092471803234135, "grad_norm": 1.2296220064163208, "learning_rate": 0.000613593898627531, "loss": 3.6135, "step": 45515 }, { "epoch": 3.092811523304797, "grad_norm": 0.9545400738716125, "learning_rate": 0.0006135514336186983, "loss": 3.5859, "step": 45520 }, { "epoch": 3.0931512433754587, "grad_norm": 1.4403594732284546, "learning_rate": 0.0006135089686098655, "loss": 3.5041, "step": 45525 }, { "epoch": 3.0934909634461203, "grad_norm": 1.3535234928131104, "learning_rate": 0.0006134665036010327, "loss": 3.602, "step": 45530 }, { "epoch": 3.0938306835167824, "grad_norm": 1.5082499980926514, "learning_rate": 0.0006134240385922001, "loss": 3.4003, "step": 45535 }, { "epoch": 3.094170403587444, "grad_norm": 0.9747965931892395, "learning_rate": 0.0006133815735833673, "loss": 3.5264, "step": 45540 }, { "epoch": 3.0945101236581056, "grad_norm": 1.1838974952697754, "learning_rate": 0.0006133391085745345, "loss": 3.6855, "step": 45545 }, { "epoch": 3.0948498437287677, "grad_norm": 1.2940688133239746, "learning_rate": 0.000613296643565702, "loss": 3.4036, "step": 45550 }, { "epoch": 3.0951895637994293, "grad_norm": 1.3213920593261719, "learning_rate": 0.0006132541785568692, "loss": 3.5886, "step": 45555 }, { "epoch": 3.095529283870091, "grad_norm": 1.2163206338882446, "learning_rate": 0.0006132117135480364, "loss": 3.4935, "step": 45560 }, { "epoch": 3.095869003940753, "grad_norm": 3.6152713298797607, "learning_rate": 0.0006131692485392038, "loss": 3.4553, "step": 45565 }, { "epoch": 3.0962087240114147, "grad_norm": NaN, "learning_rate": 0.0006131352765321375, "loss": 3.5422, "step": 45570 }, { "epoch": 3.0965484440820763, "grad_norm": 1.2216639518737793, "learning_rate": 0.0006130928115233048, "loss": 3.4824, "step": 45575 }, { "epoch": 3.096888164152738, "grad_norm": 1.2870502471923828, "learning_rate": 0.0006130503465144721, "loss": 3.2443, "step": 45580 }, { "epoch": 3.0972278842234, "grad_norm": 1.3932127952575684, "learning_rate": 0.0006130078815056394, "loss": 3.6359, "step": 45585 }, { "epoch": 3.0975676042940616, "grad_norm": 1.2936004400253296, "learning_rate": 0.0006129654164968066, "loss": 3.3503, "step": 45590 }, { "epoch": 3.0979073243647233, "grad_norm": 1.0070725679397583, "learning_rate": 0.0006129229514879739, "loss": 3.3634, "step": 45595 }, { "epoch": 3.0982470444353853, "grad_norm": 1.5649296045303345, "learning_rate": 0.0006128804864791412, "loss": 3.3719, "step": 45600 }, { "epoch": 3.098586764506047, "grad_norm": 0.928960382938385, "learning_rate": 0.0006128380214703084, "loss": 3.2957, "step": 45605 }, { "epoch": 3.0989264845767086, "grad_norm": 1.1094951629638672, "learning_rate": 0.0006127955564614758, "loss": 3.2671, "step": 45610 }, { "epoch": 3.0992662046473707, "grad_norm": 1.3753803968429565, "learning_rate": 0.000612753091452643, "loss": 3.2535, "step": 45615 }, { "epoch": 3.0996059247180323, "grad_norm": 1.0846582651138306, "learning_rate": 0.0006127106264438103, "loss": 3.4851, "step": 45620 }, { "epoch": 3.099945644788694, "grad_norm": 1.9133808612823486, "learning_rate": 0.0006126681614349776, "loss": 3.1957, "step": 45625 }, { "epoch": 3.100285364859356, "grad_norm": 1.5107945203781128, "learning_rate": 0.0006126256964261448, "loss": 3.4532, "step": 45630 }, { "epoch": 3.1006250849300176, "grad_norm": 1.4215947389602661, "learning_rate": 0.0006125832314173121, "loss": 3.4718, "step": 45635 }, { "epoch": 3.1009648050006793, "grad_norm": 1.1441162824630737, "learning_rate": 0.0006125407664084794, "loss": 3.1749, "step": 45640 }, { "epoch": 3.1013045250713414, "grad_norm": 1.7101263999938965, "learning_rate": 0.0006124983013996467, "loss": 3.5251, "step": 45645 }, { "epoch": 3.101644245142003, "grad_norm": 1.4296177625656128, "learning_rate": 0.0006124558363908141, "loss": 3.3975, "step": 45650 }, { "epoch": 3.1019839652126646, "grad_norm": 1.2640212774276733, "learning_rate": 0.0006124133713819813, "loss": 3.4497, "step": 45655 }, { "epoch": 3.1023236852833267, "grad_norm": 2.078572988510132, "learning_rate": 0.0006123709063731485, "loss": 3.6756, "step": 45660 }, { "epoch": 3.1026634053539883, "grad_norm": 1.6442937850952148, "learning_rate": 0.0006123284413643158, "loss": 3.2799, "step": 45665 }, { "epoch": 3.10300312542465, "grad_norm": 1.5210949182510376, "learning_rate": 0.0006122859763554831, "loss": 3.3208, "step": 45670 }, { "epoch": 3.103342845495312, "grad_norm": 1.1851388216018677, "learning_rate": 0.0006122435113466503, "loss": 3.097, "step": 45675 }, { "epoch": 3.1036825655659737, "grad_norm": 1.473912000656128, "learning_rate": 0.0006122010463378177, "loss": 3.5946, "step": 45680 }, { "epoch": 3.1040222856366353, "grad_norm": 1.1034220457077026, "learning_rate": 0.000612158581328985, "loss": 3.6225, "step": 45685 }, { "epoch": 3.1043620057072974, "grad_norm": 1.3642841577529907, "learning_rate": 0.0006121161163201522, "loss": 3.435, "step": 45690 }, { "epoch": 3.104701725777959, "grad_norm": 1.600019097328186, "learning_rate": 0.0006120736513113195, "loss": 3.3912, "step": 45695 }, { "epoch": 3.1050414458486206, "grad_norm": 1.723976731300354, "learning_rate": 0.0006120311863024868, "loss": 3.6889, "step": 45700 }, { "epoch": 3.1053811659192827, "grad_norm": 1.7914884090423584, "learning_rate": 0.000611988721293654, "loss": 3.5266, "step": 45705 }, { "epoch": 3.1057208859899443, "grad_norm": 1.4625910520553589, "learning_rate": 0.0006119462562848214, "loss": 3.6529, "step": 45710 }, { "epoch": 3.106060606060606, "grad_norm": 1.5525407791137695, "learning_rate": 0.0006119037912759886, "loss": 3.3846, "step": 45715 }, { "epoch": 3.106400326131268, "grad_norm": 1.034467339515686, "learning_rate": 0.0006118613262671559, "loss": 3.3821, "step": 45720 }, { "epoch": 3.1067400462019297, "grad_norm": 1.4836831092834473, "learning_rate": 0.0006118188612583232, "loss": 3.3622, "step": 45725 }, { "epoch": 3.1070797662725913, "grad_norm": 1.113092064857483, "learning_rate": 0.0006117763962494904, "loss": 3.3252, "step": 45730 }, { "epoch": 3.107419486343253, "grad_norm": 1.4519814252853394, "learning_rate": 0.0006117339312406577, "loss": 3.4701, "step": 45735 }, { "epoch": 3.107759206413915, "grad_norm": 1.7156579494476318, "learning_rate": 0.000611691466231825, "loss": 3.4515, "step": 45740 }, { "epoch": 3.1080989264845766, "grad_norm": 1.4271669387817383, "learning_rate": 0.0006116490012229923, "loss": 3.5629, "step": 45745 }, { "epoch": 3.1084386465552383, "grad_norm": 1.482161045074463, "learning_rate": 0.0006116065362141596, "loss": 3.5768, "step": 45750 }, { "epoch": 3.1087783666259003, "grad_norm": 1.6209338903427124, "learning_rate": 0.0006115640712053269, "loss": 3.4523, "step": 45755 }, { "epoch": 3.109118086696562, "grad_norm": 1.4140269756317139, "learning_rate": 0.0006115216061964941, "loss": 3.5188, "step": 45760 }, { "epoch": 3.1094578067672236, "grad_norm": 1.3062160015106201, "learning_rate": 0.0006114791411876613, "loss": 3.235, "step": 45765 }, { "epoch": 3.1097975268378857, "grad_norm": 1.6874371767044067, "learning_rate": 0.0006114366761788287, "loss": 3.3922, "step": 45770 }, { "epoch": 3.1101372469085473, "grad_norm": 1.3887293338775635, "learning_rate": 0.0006113942111699959, "loss": 3.4785, "step": 45775 }, { "epoch": 3.110476966979209, "grad_norm": 1.0411242246627808, "learning_rate": 0.0006113517461611632, "loss": 3.5546, "step": 45780 }, { "epoch": 3.110816687049871, "grad_norm": 1.2749441862106323, "learning_rate": 0.0006113092811523306, "loss": 3.5811, "step": 45785 }, { "epoch": 3.1111564071205327, "grad_norm": 1.1779125928878784, "learning_rate": 0.0006112668161434978, "loss": 3.7549, "step": 45790 }, { "epoch": 3.1114961271911943, "grad_norm": 1.482248306274414, "learning_rate": 0.000611224351134665, "loss": 3.5727, "step": 45795 }, { "epoch": 3.1118358472618564, "grad_norm": 1.3850595951080322, "learning_rate": 0.0006111818861258324, "loss": 3.714, "step": 45800 }, { "epoch": 3.112175567332518, "grad_norm": 1.4376187324523926, "learning_rate": 0.0006111394211169996, "loss": 3.353, "step": 45805 }, { "epoch": 3.1125152874031796, "grad_norm": 1.1663975715637207, "learning_rate": 0.0006110969561081668, "loss": 3.2775, "step": 45810 }, { "epoch": 3.1128550074738417, "grad_norm": 1.0554778575897217, "learning_rate": 0.0006110544910993342, "loss": 3.3814, "step": 45815 }, { "epoch": 3.1131947275445033, "grad_norm": 1.483296513557434, "learning_rate": 0.0006110120260905015, "loss": 3.6884, "step": 45820 }, { "epoch": 3.113534447615165, "grad_norm": 1.363166093826294, "learning_rate": 0.0006109695610816687, "loss": 3.3023, "step": 45825 }, { "epoch": 3.113874167685827, "grad_norm": 0.9944158792495728, "learning_rate": 0.000610927096072836, "loss": 3.5828, "step": 45830 }, { "epoch": 3.1142138877564887, "grad_norm": 1.2578719854354858, "learning_rate": 0.0006108846310640033, "loss": 3.5332, "step": 45835 }, { "epoch": 3.1145536078271503, "grad_norm": 1.382556676864624, "learning_rate": 0.0006108421660551705, "loss": 3.5181, "step": 45840 }, { "epoch": 3.1148933278978124, "grad_norm": 1.227181315422058, "learning_rate": 0.0006107997010463378, "loss": 3.4432, "step": 45845 }, { "epoch": 3.115233047968474, "grad_norm": 1.3463290929794312, "learning_rate": 0.0006107572360375052, "loss": 3.5964, "step": 45850 }, { "epoch": 3.1155727680391356, "grad_norm": 1.5290919542312622, "learning_rate": 0.0006107147710286724, "loss": 3.4499, "step": 45855 }, { "epoch": 3.1159124881097977, "grad_norm": 1.3440643548965454, "learning_rate": 0.0006106723060198397, "loss": 3.3574, "step": 45860 }, { "epoch": 3.1162522081804593, "grad_norm": 1.0983834266662598, "learning_rate": 0.0006106298410110069, "loss": 3.5068, "step": 45865 }, { "epoch": 3.116591928251121, "grad_norm": 1.4200443029403687, "learning_rate": 0.0006105873760021742, "loss": 3.3992, "step": 45870 }, { "epoch": 3.116931648321783, "grad_norm": 1.28275465965271, "learning_rate": 0.0006105449109933415, "loss": 3.5023, "step": 45875 }, { "epoch": 3.1172713683924447, "grad_norm": 1.231931209564209, "learning_rate": 0.0006105024459845087, "loss": 3.6425, "step": 45880 }, { "epoch": 3.1176110884631063, "grad_norm": 1.3005805015563965, "learning_rate": 0.0006104599809756761, "loss": 3.4046, "step": 45885 }, { "epoch": 3.1179508085337684, "grad_norm": 1.652156114578247, "learning_rate": 0.0006104175159668434, "loss": 3.5017, "step": 45890 }, { "epoch": 3.11829052860443, "grad_norm": 1.4338189363479614, "learning_rate": 0.0006103750509580106, "loss": 3.3117, "step": 45895 }, { "epoch": 3.1186302486750916, "grad_norm": 1.7889819145202637, "learning_rate": 0.0006103325859491778, "loss": 3.6527, "step": 45900 }, { "epoch": 3.1189699687457537, "grad_norm": 1.1652958393096924, "learning_rate": 0.0006102901209403452, "loss": 3.6604, "step": 45905 }, { "epoch": 3.1193096888164153, "grad_norm": 1.5772041082382202, "learning_rate": 0.0006102476559315124, "loss": 3.5603, "step": 45910 }, { "epoch": 3.119649408887077, "grad_norm": 1.311049222946167, "learning_rate": 0.0006102051909226796, "loss": 3.5225, "step": 45915 }, { "epoch": 3.1199891289577386, "grad_norm": 1.2819911241531372, "learning_rate": 0.0006101627259138471, "loss": 3.3756, "step": 45920 }, { "epoch": 3.1203288490284007, "grad_norm": 1.2945932149887085, "learning_rate": 0.0006101202609050143, "loss": 3.2555, "step": 45925 }, { "epoch": 3.1206685690990623, "grad_norm": 1.8074145317077637, "learning_rate": 0.0006100777958961815, "loss": 3.4745, "step": 45930 }, { "epoch": 3.121008289169724, "grad_norm": 1.374984622001648, "learning_rate": 0.0006100353308873489, "loss": 3.4811, "step": 45935 }, { "epoch": 3.121348009240386, "grad_norm": 1.3289365768432617, "learning_rate": 0.0006099928658785161, "loss": 3.28, "step": 45940 }, { "epoch": 3.1216877293110477, "grad_norm": 1.346815824508667, "learning_rate": 0.0006099504008696833, "loss": 3.538, "step": 45945 }, { "epoch": 3.1220274493817093, "grad_norm": 1.2100954055786133, "learning_rate": 0.0006099079358608506, "loss": 3.5166, "step": 45950 }, { "epoch": 3.1223671694523714, "grad_norm": 1.2741049528121948, "learning_rate": 0.000609865470852018, "loss": 3.2709, "step": 45955 }, { "epoch": 3.122706889523033, "grad_norm": 1.3162730932235718, "learning_rate": 0.0006098230058431852, "loss": 3.4788, "step": 45960 }, { "epoch": 3.1230466095936946, "grad_norm": 1.1028448343276978, "learning_rate": 0.0006097805408343525, "loss": 3.2889, "step": 45965 }, { "epoch": 3.1233863296643567, "grad_norm": 1.864567518234253, "learning_rate": 0.0006097380758255198, "loss": 3.5001, "step": 45970 }, { "epoch": 3.1237260497350183, "grad_norm": 1.6767839193344116, "learning_rate": 0.000609695610816687, "loss": 3.5841, "step": 45975 }, { "epoch": 3.12406576980568, "grad_norm": 1.2024388313293457, "learning_rate": 0.0006096531458078543, "loss": 3.2839, "step": 45980 }, { "epoch": 3.124405489876342, "grad_norm": 1.143550157546997, "learning_rate": 0.0006096106807990216, "loss": 3.6212, "step": 45985 }, { "epoch": 3.1247452099470037, "grad_norm": 1.0927555561065674, "learning_rate": 0.000609568215790189, "loss": 3.2077, "step": 45990 }, { "epoch": 3.1250849300176653, "grad_norm": 1.1881505250930786, "learning_rate": 0.0006095257507813562, "loss": 3.5222, "step": 45995 }, { "epoch": 3.1254246500883274, "grad_norm": 1.2931349277496338, "learning_rate": 0.0006094832857725234, "loss": 3.6824, "step": 46000 }, { "epoch": 3.125764370158989, "grad_norm": 1.1499040126800537, "learning_rate": 0.0006094408207636908, "loss": 3.3758, "step": 46005 }, { "epoch": 3.1261040902296506, "grad_norm": 1.1697932481765747, "learning_rate": 0.000609398355754858, "loss": 3.546, "step": 46010 }, { "epoch": 3.1264438103003127, "grad_norm": 1.3497369289398193, "learning_rate": 0.0006093558907460252, "loss": 3.3667, "step": 46015 }, { "epoch": 3.1267835303709743, "grad_norm": 1.3964118957519531, "learning_rate": 0.0006093134257371926, "loss": 3.406, "step": 46020 }, { "epoch": 3.127123250441636, "grad_norm": 1.141353726387024, "learning_rate": 0.0006092709607283599, "loss": 3.4859, "step": 46025 }, { "epoch": 3.127462970512298, "grad_norm": 1.1624276638031006, "learning_rate": 0.0006092284957195271, "loss": 3.4523, "step": 46030 }, { "epoch": 3.1278026905829597, "grad_norm": 1.4369248151779175, "learning_rate": 0.0006091860307106945, "loss": 3.2275, "step": 46035 }, { "epoch": 3.1281424106536213, "grad_norm": 1.9019341468811035, "learning_rate": 0.0006091435657018617, "loss": 3.5311, "step": 46040 }, { "epoch": 3.1284821307242834, "grad_norm": 1.3506786823272705, "learning_rate": 0.0006091011006930289, "loss": 3.239, "step": 46045 }, { "epoch": 3.128821850794945, "grad_norm": 1.428390383720398, "learning_rate": 0.0006090586356841962, "loss": 3.5009, "step": 46050 }, { "epoch": 3.1291615708656066, "grad_norm": 1.2439333200454712, "learning_rate": 0.0006090161706753635, "loss": 3.3402, "step": 46055 }, { "epoch": 3.1295012909362687, "grad_norm": 1.3020583391189575, "learning_rate": 0.0006089737056665308, "loss": 3.5467, "step": 46060 }, { "epoch": 3.1298410110069304, "grad_norm": 1.3999029397964478, "learning_rate": 0.0006089312406576981, "loss": 3.443, "step": 46065 }, { "epoch": 3.130180731077592, "grad_norm": 1.55173921585083, "learning_rate": 0.0006088887756488654, "loss": 3.6232, "step": 46070 }, { "epoch": 3.1305204511482536, "grad_norm": 1.790220022201538, "learning_rate": 0.0006088463106400326, "loss": 3.3302, "step": 46075 }, { "epoch": 3.1308601712189157, "grad_norm": 1.5738433599472046, "learning_rate": 0.0006088038456311999, "loss": 3.4125, "step": 46080 }, { "epoch": 3.1311998912895773, "grad_norm": 1.2510416507720947, "learning_rate": 0.0006087613806223672, "loss": 3.3248, "step": 46085 }, { "epoch": 3.131539611360239, "grad_norm": 1.2249952554702759, "learning_rate": 0.0006087189156135344, "loss": 3.7081, "step": 46090 }, { "epoch": 3.131879331430901, "grad_norm": 1.1673643589019775, "learning_rate": 0.0006086764506047018, "loss": 3.6657, "step": 46095 }, { "epoch": 3.1322190515015627, "grad_norm": 1.1490727663040161, "learning_rate": 0.000608633985595869, "loss": 3.6746, "step": 46100 }, { "epoch": 3.1325587715722243, "grad_norm": 2.282069206237793, "learning_rate": 0.0006085915205870363, "loss": 3.7449, "step": 46105 }, { "epoch": 3.1328984916428864, "grad_norm": 1.352993130683899, "learning_rate": 0.0006085490555782036, "loss": 3.5259, "step": 46110 }, { "epoch": 3.133238211713548, "grad_norm": 1.5034648180007935, "learning_rate": 0.0006085065905693708, "loss": 3.2931, "step": 46115 }, { "epoch": 3.1335779317842096, "grad_norm": 1.4029514789581299, "learning_rate": 0.0006084641255605381, "loss": 3.4324, "step": 46120 }, { "epoch": 3.1339176518548717, "grad_norm": 1.3248939514160156, "learning_rate": 0.0006084216605517054, "loss": 3.7048, "step": 46125 }, { "epoch": 3.1342573719255333, "grad_norm": 1.202429175376892, "learning_rate": 0.0006083791955428727, "loss": 3.1997, "step": 46130 }, { "epoch": 3.134597091996195, "grad_norm": 1.6315077543258667, "learning_rate": 0.00060833673053404, "loss": 3.368, "step": 46135 }, { "epoch": 3.134936812066857, "grad_norm": 1.2032949924468994, "learning_rate": 0.0006082942655252073, "loss": 3.2473, "step": 46140 }, { "epoch": 3.1352765321375187, "grad_norm": 1.3955529928207397, "learning_rate": 0.0006082518005163745, "loss": 3.5302, "step": 46145 }, { "epoch": 3.1356162522081803, "grad_norm": 1.408687710762024, "learning_rate": 0.0006082093355075417, "loss": 3.439, "step": 46150 }, { "epoch": 3.1359559722788424, "grad_norm": 1.2002086639404297, "learning_rate": 0.0006081668704987091, "loss": 3.4844, "step": 46155 }, { "epoch": 3.136295692349504, "grad_norm": 1.3604390621185303, "learning_rate": 0.0006081244054898763, "loss": 3.4025, "step": 46160 }, { "epoch": 3.1366354124201656, "grad_norm": 1.487786054611206, "learning_rate": 0.0006080819404810436, "loss": 3.4823, "step": 46165 }, { "epoch": 3.1369751324908277, "grad_norm": 1.1050435304641724, "learning_rate": 0.000608039475472211, "loss": 3.5751, "step": 46170 }, { "epoch": 3.1373148525614893, "grad_norm": 1.4454766511917114, "learning_rate": 0.0006079970104633782, "loss": 3.671, "step": 46175 }, { "epoch": 3.137654572632151, "grad_norm": 1.3341429233551025, "learning_rate": 0.0006079545454545454, "loss": 3.5087, "step": 46180 }, { "epoch": 3.137994292702813, "grad_norm": 1.3358532190322876, "learning_rate": 0.0006079120804457128, "loss": 3.5273, "step": 46185 }, { "epoch": 3.1383340127734747, "grad_norm": 1.4991095066070557, "learning_rate": 0.00060786961543688, "loss": 3.584, "step": 46190 }, { "epoch": 3.1386737328441363, "grad_norm": 1.2180535793304443, "learning_rate": 0.0006078271504280472, "loss": 3.6359, "step": 46195 }, { "epoch": 3.1390134529147984, "grad_norm": 1.6806111335754395, "learning_rate": 0.0006077846854192147, "loss": 3.4374, "step": 46200 }, { "epoch": 3.13935317298546, "grad_norm": 1.4255584478378296, "learning_rate": 0.0006077422204103819, "loss": 3.3958, "step": 46205 }, { "epoch": 3.1396928930561216, "grad_norm": 1.7343487739562988, "learning_rate": 0.0006076997554015491, "loss": 3.4638, "step": 46210 }, { "epoch": 3.1400326131267837, "grad_norm": 1.1582876443862915, "learning_rate": 0.0006076572903927164, "loss": 3.8169, "step": 46215 }, { "epoch": 3.1403723331974454, "grad_norm": 1.150223731994629, "learning_rate": 0.0006076148253838837, "loss": 3.4874, "step": 46220 }, { "epoch": 3.140712053268107, "grad_norm": 1.3204333782196045, "learning_rate": 0.0006075723603750509, "loss": 3.3711, "step": 46225 }, { "epoch": 3.141051773338769, "grad_norm": 1.4387731552124023, "learning_rate": 0.0006075298953662182, "loss": 3.3087, "step": 46230 }, { "epoch": 3.1413914934094307, "grad_norm": 1.5855740308761597, "learning_rate": 0.0006074874303573856, "loss": 3.4458, "step": 46235 }, { "epoch": 3.1417312134800923, "grad_norm": 1.1538736820220947, "learning_rate": 0.0006074449653485528, "loss": 3.4297, "step": 46240 }, { "epoch": 3.1420709335507544, "grad_norm": 1.5265257358551025, "learning_rate": 0.0006074025003397201, "loss": 3.3562, "step": 46245 }, { "epoch": 3.142410653621416, "grad_norm": 1.2451738119125366, "learning_rate": 0.0006073600353308873, "loss": 3.4876, "step": 46250 }, { "epoch": 3.1427503736920777, "grad_norm": 2.037403106689453, "learning_rate": 0.0006073175703220546, "loss": 3.4635, "step": 46255 }, { "epoch": 3.1430900937627397, "grad_norm": 1.6353331804275513, "learning_rate": 0.0006072751053132219, "loss": 3.5331, "step": 46260 }, { "epoch": 3.1434298138334014, "grad_norm": 1.377304196357727, "learning_rate": 0.0006072326403043891, "loss": 3.5058, "step": 46265 }, { "epoch": 3.143769533904063, "grad_norm": 1.855214238166809, "learning_rate": 0.0006071901752955565, "loss": 3.6375, "step": 46270 }, { "epoch": 3.1441092539747246, "grad_norm": 1.4343295097351074, "learning_rate": 0.0006071477102867238, "loss": 3.6819, "step": 46275 }, { "epoch": 3.1444489740453867, "grad_norm": 1.5112332105636597, "learning_rate": 0.000607105245277891, "loss": 3.4781, "step": 46280 }, { "epoch": 3.1447886941160483, "grad_norm": 1.5640445947647095, "learning_rate": 0.0006070627802690583, "loss": 3.4615, "step": 46285 }, { "epoch": 3.14512841418671, "grad_norm": 0.9909858703613281, "learning_rate": 0.0006070203152602256, "loss": 3.6198, "step": 46290 }, { "epoch": 3.145468134257372, "grad_norm": 1.528580665588379, "learning_rate": 0.0006069778502513928, "loss": 3.3851, "step": 46295 }, { "epoch": 3.1458078543280337, "grad_norm": 1.3045730590820312, "learning_rate": 0.00060693538524256, "loss": 3.6879, "step": 46300 }, { "epoch": 3.1461475743986953, "grad_norm": 1.5005170106887817, "learning_rate": 0.0006068929202337275, "loss": 3.6866, "step": 46305 }, { "epoch": 3.1464872944693574, "grad_norm": 1.381426453590393, "learning_rate": 0.0006068504552248947, "loss": 3.3957, "step": 46310 }, { "epoch": 3.146827014540019, "grad_norm": 1.0901875495910645, "learning_rate": 0.0006068079902160619, "loss": 3.3081, "step": 46315 }, { "epoch": 3.1471667346106806, "grad_norm": 1.4439277648925781, "learning_rate": 0.0006067655252072293, "loss": 3.2858, "step": 46320 }, { "epoch": 3.1475064546813427, "grad_norm": 1.7552381753921509, "learning_rate": 0.0006067230601983965, "loss": 3.7809, "step": 46325 }, { "epoch": 3.1478461747520043, "grad_norm": 1.3186126947402954, "learning_rate": 0.0006066805951895638, "loss": 3.585, "step": 46330 }, { "epoch": 3.148185894822666, "grad_norm": 1.6021828651428223, "learning_rate": 0.0006066381301807312, "loss": 3.5199, "step": 46335 }, { "epoch": 3.148525614893328, "grad_norm": 1.6157182455062866, "learning_rate": 0.0006065956651718984, "loss": 3.3374, "step": 46340 }, { "epoch": 3.1488653349639897, "grad_norm": 1.2772817611694336, "learning_rate": 0.0006065532001630657, "loss": 3.5347, "step": 46345 }, { "epoch": 3.1492050550346513, "grad_norm": 1.3469756841659546, "learning_rate": 0.0006065107351542329, "loss": 3.4004, "step": 46350 }, { "epoch": 3.1495447751053134, "grad_norm": 1.3257267475128174, "learning_rate": 0.0006064682701454002, "loss": 3.3534, "step": 46355 }, { "epoch": 3.149884495175975, "grad_norm": 1.4468939304351807, "learning_rate": 0.0006064258051365675, "loss": 3.5651, "step": 46360 }, { "epoch": 3.1502242152466366, "grad_norm": 1.2032592296600342, "learning_rate": 0.0006063833401277347, "loss": 3.6324, "step": 46365 }, { "epoch": 3.1505639353172987, "grad_norm": 1.7021185159683228, "learning_rate": 0.0006063408751189021, "loss": 3.3224, "step": 46370 }, { "epoch": 3.1509036553879604, "grad_norm": 1.1181751489639282, "learning_rate": 0.0006062984101100694, "loss": 3.3856, "step": 46375 }, { "epoch": 3.151243375458622, "grad_norm": 1.319309949874878, "learning_rate": 0.0006062559451012366, "loss": 3.4137, "step": 46380 }, { "epoch": 3.151583095529284, "grad_norm": 1.3086835145950317, "learning_rate": 0.0006062134800924039, "loss": 3.5818, "step": 46385 }, { "epoch": 3.1519228155999457, "grad_norm": 1.041145920753479, "learning_rate": 0.0006061710150835712, "loss": 3.4359, "step": 46390 }, { "epoch": 3.1522625356706073, "grad_norm": 1.1497055292129517, "learning_rate": 0.0006061285500747384, "loss": 3.5467, "step": 46395 }, { "epoch": 3.1526022557412694, "grad_norm": 1.3045287132263184, "learning_rate": 0.0006060860850659056, "loss": 3.6891, "step": 46400 }, { "epoch": 3.152941975811931, "grad_norm": 2.060621500015259, "learning_rate": 0.0006060436200570731, "loss": 3.3994, "step": 46405 }, { "epoch": 3.1532816958825927, "grad_norm": 1.6818346977233887, "learning_rate": 0.0006060011550482403, "loss": 3.5222, "step": 46410 }, { "epoch": 3.1536214159532543, "grad_norm": 1.2361401319503784, "learning_rate": 0.0006059586900394075, "loss": 3.2667, "step": 46415 }, { "epoch": 3.1539611360239164, "grad_norm": 1.3957360982894897, "learning_rate": 0.0006059162250305749, "loss": 3.4603, "step": 46420 }, { "epoch": 3.154300856094578, "grad_norm": 1.2072943449020386, "learning_rate": 0.0006058737600217421, "loss": 3.6946, "step": 46425 }, { "epoch": 3.1546405761652396, "grad_norm": 1.2120031118392944, "learning_rate": 0.0006058312950129093, "loss": 3.5241, "step": 46430 }, { "epoch": 3.1549802962359017, "grad_norm": 1.4189940690994263, "learning_rate": 0.0006057888300040767, "loss": 3.3981, "step": 46435 }, { "epoch": 3.1553200163065633, "grad_norm": 1.3977395296096802, "learning_rate": 0.000605746364995244, "loss": 3.5407, "step": 46440 }, { "epoch": 3.155659736377225, "grad_norm": 0.956300675868988, "learning_rate": 0.0006057038999864112, "loss": 3.5772, "step": 46445 }, { "epoch": 3.155999456447887, "grad_norm": 1.144478440284729, "learning_rate": 0.0006056614349775785, "loss": 3.5786, "step": 46450 }, { "epoch": 3.1563391765185487, "grad_norm": 1.2731516361236572, "learning_rate": 0.0006056189699687458, "loss": 3.6901, "step": 46455 }, { "epoch": 3.1566788965892103, "grad_norm": 1.4934799671173096, "learning_rate": 0.000605576504959913, "loss": 3.3431, "step": 46460 }, { "epoch": 3.1570186166598724, "grad_norm": 0.8725825548171997, "learning_rate": 0.0006055340399510803, "loss": 3.3967, "step": 46465 }, { "epoch": 3.157358336730534, "grad_norm": 1.4741226434707642, "learning_rate": 0.0006054915749422476, "loss": 3.605, "step": 46470 }, { "epoch": 3.1576980568011956, "grad_norm": 1.2176612615585327, "learning_rate": 0.0006054491099334149, "loss": 3.2368, "step": 46475 }, { "epoch": 3.1580377768718577, "grad_norm": 1.162545919418335, "learning_rate": 0.0006054066449245822, "loss": 3.3268, "step": 46480 }, { "epoch": 3.1583774969425193, "grad_norm": 1.1692075729370117, "learning_rate": 0.0006053641799157495, "loss": 3.4085, "step": 46485 }, { "epoch": 3.158717217013181, "grad_norm": 1.2992013692855835, "learning_rate": 0.0006053217149069167, "loss": 3.4124, "step": 46490 }, { "epoch": 3.159056937083843, "grad_norm": 1.3898805379867554, "learning_rate": 0.000605279249898084, "loss": 3.4458, "step": 46495 }, { "epoch": 3.1593966571545047, "grad_norm": 1.1014914512634277, "learning_rate": 0.0006052367848892512, "loss": 2.9882, "step": 46500 }, { "epoch": 3.1597363772251663, "grad_norm": 1.4176620244979858, "learning_rate": 0.0006051943198804185, "loss": 3.5741, "step": 46505 }, { "epoch": 3.1600760972958284, "grad_norm": 2.8769450187683105, "learning_rate": 0.0006051518548715859, "loss": 3.6705, "step": 46510 }, { "epoch": 3.16041581736649, "grad_norm": 1.4169831275939941, "learning_rate": 0.0006051093898627531, "loss": 3.5887, "step": 46515 }, { "epoch": 3.1607555374371517, "grad_norm": 1.2467222213745117, "learning_rate": 0.0006050669248539204, "loss": 3.6325, "step": 46520 }, { "epoch": 3.1610952575078137, "grad_norm": 1.5447388887405396, "learning_rate": 0.0006050244598450877, "loss": 3.3623, "step": 46525 }, { "epoch": 3.1614349775784754, "grad_norm": 1.3707507848739624, "learning_rate": 0.0006049819948362549, "loss": 3.4529, "step": 46530 }, { "epoch": 3.161774697649137, "grad_norm": 1.1472841501235962, "learning_rate": 0.0006049395298274221, "loss": 3.6266, "step": 46535 }, { "epoch": 3.162114417719799, "grad_norm": 2.1438400745391846, "learning_rate": 0.0006048970648185895, "loss": 3.3283, "step": 46540 }, { "epoch": 3.1624541377904607, "grad_norm": 1.5843286514282227, "learning_rate": 0.0006048545998097568, "loss": 3.3249, "step": 46545 }, { "epoch": 3.1627938578611223, "grad_norm": 0.928382158279419, "learning_rate": 0.000604812134800924, "loss": 3.6075, "step": 46550 }, { "epoch": 3.1631335779317844, "grad_norm": 1.7482064962387085, "learning_rate": 0.0006047696697920914, "loss": 3.5731, "step": 46555 }, { "epoch": 3.163473298002446, "grad_norm": 1.5535202026367188, "learning_rate": 0.0006047272047832586, "loss": 3.4443, "step": 46560 }, { "epoch": 3.1638130180731077, "grad_norm": 0.9508783221244812, "learning_rate": 0.0006046847397744258, "loss": 3.5817, "step": 46565 }, { "epoch": 3.1641527381437697, "grad_norm": 1.375109076499939, "learning_rate": 0.0006046422747655932, "loss": 3.4116, "step": 46570 }, { "epoch": 3.1644924582144314, "grad_norm": 1.3170512914657593, "learning_rate": 0.0006045998097567604, "loss": 3.7169, "step": 46575 }, { "epoch": 3.164832178285093, "grad_norm": 1.1195461750030518, "learning_rate": 0.0006045573447479277, "loss": 3.7434, "step": 46580 }, { "epoch": 3.165171898355755, "grad_norm": 1.6064153909683228, "learning_rate": 0.0006045148797390951, "loss": 3.4216, "step": 46585 }, { "epoch": 3.1655116184264167, "grad_norm": 1.2885504961013794, "learning_rate": 0.0006044724147302623, "loss": 3.2805, "step": 46590 }, { "epoch": 3.1658513384970783, "grad_norm": 1.2716528177261353, "learning_rate": 0.0006044299497214295, "loss": 3.6504, "step": 46595 }, { "epoch": 3.1661910585677404, "grad_norm": 1.2357020378112793, "learning_rate": 0.0006043874847125968, "loss": 3.4575, "step": 46600 }, { "epoch": 3.166530778638402, "grad_norm": 1.4705440998077393, "learning_rate": 0.0006043450197037641, "loss": 3.6084, "step": 46605 }, { "epoch": 3.1668704987090637, "grad_norm": 1.9956036806106567, "learning_rate": 0.0006043025546949313, "loss": 3.3971, "step": 46610 }, { "epoch": 3.1672102187797253, "grad_norm": 1.42682945728302, "learning_rate": 0.0006042600896860987, "loss": 3.4378, "step": 46615 }, { "epoch": 3.1675499388503874, "grad_norm": 1.4159690141677856, "learning_rate": 0.000604217624677266, "loss": 3.6103, "step": 46620 }, { "epoch": 3.167889658921049, "grad_norm": 1.331215262413025, "learning_rate": 0.0006041751596684332, "loss": 3.8553, "step": 46625 }, { "epoch": 3.1682293789917106, "grad_norm": 1.238718032836914, "learning_rate": 0.0006041326946596005, "loss": 3.1834, "step": 46630 }, { "epoch": 3.1685690990623727, "grad_norm": 1.2273244857788086, "learning_rate": 0.0006040902296507677, "loss": 3.6301, "step": 46635 }, { "epoch": 3.1689088191330343, "grad_norm": 1.0718958377838135, "learning_rate": 0.000604047764641935, "loss": 3.5364, "step": 46640 }, { "epoch": 3.169248539203696, "grad_norm": 1.3264296054840088, "learning_rate": 0.0006040052996331023, "loss": 3.5423, "step": 46645 }, { "epoch": 3.169588259274358, "grad_norm": 1.8742413520812988, "learning_rate": 0.0006039628346242696, "loss": 3.6591, "step": 46650 }, { "epoch": 3.1699279793450197, "grad_norm": 1.3485711812973022, "learning_rate": 0.0006039203696154369, "loss": 3.4753, "step": 46655 }, { "epoch": 3.1702676994156813, "grad_norm": 1.2390235662460327, "learning_rate": 0.0006038779046066042, "loss": 3.6613, "step": 46660 }, { "epoch": 3.1706074194863434, "grad_norm": 1.0357927083969116, "learning_rate": 0.0006038354395977714, "loss": 3.4274, "step": 46665 }, { "epoch": 3.170947139557005, "grad_norm": 1.2932326793670654, "learning_rate": 0.0006037929745889388, "loss": 3.45, "step": 46670 }, { "epoch": 3.1712868596276667, "grad_norm": 1.5024068355560303, "learning_rate": 0.000603750509580106, "loss": 3.5657, "step": 46675 }, { "epoch": 3.1716265796983287, "grad_norm": 1.228591799736023, "learning_rate": 0.0006037080445712732, "loss": 3.5915, "step": 46680 }, { "epoch": 3.1719662997689904, "grad_norm": 1.5525052547454834, "learning_rate": 0.0006036655795624407, "loss": 3.3589, "step": 46685 }, { "epoch": 3.172306019839652, "grad_norm": 2.1151158809661865, "learning_rate": 0.0006036231145536079, "loss": 3.5159, "step": 46690 }, { "epoch": 3.172645739910314, "grad_norm": 1.2094756364822388, "learning_rate": 0.0006035806495447751, "loss": 3.7545, "step": 46695 }, { "epoch": 3.1729854599809757, "grad_norm": 1.3362512588500977, "learning_rate": 0.0006035381845359424, "loss": 3.242, "step": 46700 }, { "epoch": 3.1733251800516373, "grad_norm": 1.1773319244384766, "learning_rate": 0.0006034957195271097, "loss": 3.4608, "step": 46705 }, { "epoch": 3.1736649001222994, "grad_norm": 1.3653055429458618, "learning_rate": 0.0006034532545182769, "loss": 3.4122, "step": 46710 }, { "epoch": 3.174004620192961, "grad_norm": 1.7249150276184082, "learning_rate": 0.0006034107895094442, "loss": 3.7042, "step": 46715 }, { "epoch": 3.1743443402636227, "grad_norm": 1.2393215894699097, "learning_rate": 0.0006033683245006116, "loss": 3.5005, "step": 46720 }, { "epoch": 3.1746840603342847, "grad_norm": 1.1957261562347412, "learning_rate": 0.0006033258594917788, "loss": 3.3722, "step": 46725 }, { "epoch": 3.1750237804049464, "grad_norm": 1.1240980625152588, "learning_rate": 0.0006032833944829461, "loss": 3.479, "step": 46730 }, { "epoch": 3.175363500475608, "grad_norm": 1.3648138046264648, "learning_rate": 0.0006032409294741133, "loss": 3.4309, "step": 46735 }, { "epoch": 3.17570322054627, "grad_norm": 1.5184168815612793, "learning_rate": 0.0006031984644652806, "loss": 3.5221, "step": 46740 }, { "epoch": 3.1760429406169317, "grad_norm": 1.36304771900177, "learning_rate": 0.0006031559994564479, "loss": 3.7468, "step": 46745 }, { "epoch": 3.1763826606875933, "grad_norm": 1.1738405227661133, "learning_rate": 0.0006031135344476151, "loss": 3.4311, "step": 46750 }, { "epoch": 3.176722380758255, "grad_norm": 1.5703452825546265, "learning_rate": 0.0006030710694387825, "loss": 3.1214, "step": 46755 }, { "epoch": 3.177062100828917, "grad_norm": 1.4771990776062012, "learning_rate": 0.0006030286044299498, "loss": 3.5092, "step": 46760 }, { "epoch": 3.1774018208995787, "grad_norm": 1.2017878293991089, "learning_rate": 0.000602986139421117, "loss": 3.3501, "step": 46765 }, { "epoch": 3.1777415409702403, "grad_norm": 1.3134690523147583, "learning_rate": 0.0006029436744122843, "loss": 3.5082, "step": 46770 }, { "epoch": 3.1780812610409024, "grad_norm": 1.3854724168777466, "learning_rate": 0.0006029012094034516, "loss": 3.2639, "step": 46775 }, { "epoch": 3.178420981111564, "grad_norm": 1.3474791049957275, "learning_rate": 0.0006028587443946188, "loss": 3.5436, "step": 46780 }, { "epoch": 3.1787607011822256, "grad_norm": 1.3587034940719604, "learning_rate": 0.000602816279385786, "loss": 3.3714, "step": 46785 }, { "epoch": 3.1791004212528877, "grad_norm": 1.1709083318710327, "learning_rate": 0.0006027738143769535, "loss": 3.7807, "step": 46790 }, { "epoch": 3.1794401413235494, "grad_norm": 1.2374874353408813, "learning_rate": 0.0006027313493681207, "loss": 3.4958, "step": 46795 }, { "epoch": 3.179779861394211, "grad_norm": 1.577650547027588, "learning_rate": 0.0006026888843592879, "loss": 3.2688, "step": 46800 }, { "epoch": 3.180119581464873, "grad_norm": 0.9975987672805786, "learning_rate": 0.0006026464193504553, "loss": 3.3022, "step": 46805 }, { "epoch": 3.1804593015355347, "grad_norm": 1.6688082218170166, "learning_rate": 0.0006026039543416225, "loss": 3.4255, "step": 46810 }, { "epoch": 3.1807990216061963, "grad_norm": 1.2869994640350342, "learning_rate": 0.0006025614893327897, "loss": 3.3214, "step": 46815 }, { "epoch": 3.1811387416768584, "grad_norm": 1.3081655502319336, "learning_rate": 0.0006025190243239571, "loss": 3.3434, "step": 46820 }, { "epoch": 3.18147846174752, "grad_norm": 1.0761311054229736, "learning_rate": 0.0006024765593151244, "loss": 3.5435, "step": 46825 }, { "epoch": 3.1818181818181817, "grad_norm": 1.0919946432113647, "learning_rate": 0.0006024340943062916, "loss": 3.466, "step": 46830 }, { "epoch": 3.1821579018888437, "grad_norm": 1.0325549840927124, "learning_rate": 0.000602391629297459, "loss": 3.5802, "step": 46835 }, { "epoch": 3.1824976219595054, "grad_norm": 1.4288134574890137, "learning_rate": 0.0006023491642886262, "loss": 3.3168, "step": 46840 }, { "epoch": 3.182837342030167, "grad_norm": 1.3297550678253174, "learning_rate": 0.0006023066992797934, "loss": 3.4813, "step": 46845 }, { "epoch": 3.183177062100829, "grad_norm": 1.2246761322021484, "learning_rate": 0.0006022642342709607, "loss": 3.6129, "step": 46850 }, { "epoch": 3.1835167821714907, "grad_norm": 1.1955668926239014, "learning_rate": 0.000602221769262128, "loss": 3.5008, "step": 46855 }, { "epoch": 3.1838565022421523, "grad_norm": 1.5534290075302124, "learning_rate": 0.0006021793042532953, "loss": 3.628, "step": 46860 }, { "epoch": 3.1841962223128144, "grad_norm": 1.2838151454925537, "learning_rate": 0.0006021368392444626, "loss": 3.3248, "step": 46865 }, { "epoch": 3.184535942383476, "grad_norm": 1.1347395181655884, "learning_rate": 0.0006020943742356299, "loss": 3.5902, "step": 46870 }, { "epoch": 3.1848756624541377, "grad_norm": 1.287245750427246, "learning_rate": 0.0006020519092267971, "loss": 3.5607, "step": 46875 }, { "epoch": 3.1852153825247997, "grad_norm": 1.2326995134353638, "learning_rate": 0.0006020094442179644, "loss": 3.5448, "step": 46880 }, { "epoch": 3.1855551025954614, "grad_norm": 1.1517353057861328, "learning_rate": 0.0006019669792091316, "loss": 3.692, "step": 46885 }, { "epoch": 3.185894822666123, "grad_norm": 1.5543369054794312, "learning_rate": 0.0006019245142002989, "loss": 3.394, "step": 46890 }, { "epoch": 3.186234542736785, "grad_norm": 1.526663899421692, "learning_rate": 0.0006018820491914663, "loss": 3.3898, "step": 46895 }, { "epoch": 3.1865742628074467, "grad_norm": 1.4051742553710938, "learning_rate": 0.0006018395841826335, "loss": 3.6374, "step": 46900 }, { "epoch": 3.1869139828781083, "grad_norm": 1.4754722118377686, "learning_rate": 0.0006017971191738008, "loss": 3.2734, "step": 46905 }, { "epoch": 3.1872537029487704, "grad_norm": 1.2899101972579956, "learning_rate": 0.0006017546541649681, "loss": 3.584, "step": 46910 }, { "epoch": 3.187593423019432, "grad_norm": 1.250020980834961, "learning_rate": 0.0006017121891561353, "loss": 3.3923, "step": 46915 }, { "epoch": 3.1879331430900937, "grad_norm": 1.448042869567871, "learning_rate": 0.0006016697241473025, "loss": 3.6045, "step": 46920 }, { "epoch": 3.1882728631607558, "grad_norm": 1.1828112602233887, "learning_rate": 0.00060162725913847, "loss": 3.5865, "step": 46925 }, { "epoch": 3.1886125832314174, "grad_norm": 3.5684971809387207, "learning_rate": 0.0006015847941296372, "loss": 3.4801, "step": 46930 }, { "epoch": 3.188952303302079, "grad_norm": 1.4584379196166992, "learning_rate": 0.0006015423291208044, "loss": 3.4068, "step": 46935 }, { "epoch": 3.189292023372741, "grad_norm": 1.4387844800949097, "learning_rate": 0.0006014998641119718, "loss": 3.5557, "step": 46940 }, { "epoch": 3.1896317434434027, "grad_norm": 1.01021409034729, "learning_rate": 0.000601457399103139, "loss": 3.3452, "step": 46945 }, { "epoch": 3.1899714635140644, "grad_norm": 1.0283631086349487, "learning_rate": 0.0006014149340943062, "loss": 3.4457, "step": 46950 }, { "epoch": 3.190311183584726, "grad_norm": 1.5451741218566895, "learning_rate": 0.0006013724690854736, "loss": 3.4198, "step": 46955 }, { "epoch": 3.190650903655388, "grad_norm": 1.1870487928390503, "learning_rate": 0.0006013300040766409, "loss": 3.2708, "step": 46960 }, { "epoch": 3.1909906237260497, "grad_norm": 9.13224983215332, "learning_rate": 0.0006012875390678081, "loss": 3.3857, "step": 46965 }, { "epoch": 3.1913303437967113, "grad_norm": 1.934024453163147, "learning_rate": 0.0006012450740589755, "loss": 3.3426, "step": 46970 }, { "epoch": 3.1916700638673734, "grad_norm": 1.4473795890808105, "learning_rate": 0.0006012026090501427, "loss": 3.5227, "step": 46975 }, { "epoch": 3.192009783938035, "grad_norm": 1.4147803783416748, "learning_rate": 0.0006011601440413099, "loss": 3.3752, "step": 46980 }, { "epoch": 3.1923495040086967, "grad_norm": 1.4594544172286987, "learning_rate": 0.0006011176790324772, "loss": 3.3442, "step": 46985 }, { "epoch": 3.1926892240793587, "grad_norm": 1.6790879964828491, "learning_rate": 0.0006010752140236445, "loss": 3.4915, "step": 46990 }, { "epoch": 3.1930289441500204, "grad_norm": 1.157167673110962, "learning_rate": 0.0006010327490148118, "loss": 3.3852, "step": 46995 }, { "epoch": 3.193368664220682, "grad_norm": 1.0098849534988403, "learning_rate": 0.0006009902840059791, "loss": 3.3147, "step": 47000 }, { "epoch": 3.193708384291344, "grad_norm": 1.3868027925491333, "learning_rate": 0.0006009478189971464, "loss": 3.6857, "step": 47005 }, { "epoch": 3.1940481043620057, "grad_norm": 1.9865353107452393, "learning_rate": 0.0006009053539883137, "loss": 3.2949, "step": 47010 }, { "epoch": 3.1943878244326673, "grad_norm": 1.4246598482131958, "learning_rate": 0.0006008628889794809, "loss": 3.4366, "step": 47015 }, { "epoch": 3.1947275445033294, "grad_norm": 1.1872037649154663, "learning_rate": 0.0006008204239706482, "loss": 3.5323, "step": 47020 }, { "epoch": 3.195067264573991, "grad_norm": 1.2121171951293945, "learning_rate": 0.0006007779589618155, "loss": 3.583, "step": 47025 }, { "epoch": 3.1954069846446527, "grad_norm": 1.290684461593628, "learning_rate": 0.0006007354939529828, "loss": 3.3754, "step": 47030 }, { "epoch": 3.1957467047153147, "grad_norm": 1.218820571899414, "learning_rate": 0.00060069302894415, "loss": 3.4876, "step": 47035 }, { "epoch": 3.1960864247859764, "grad_norm": 1.445959210395813, "learning_rate": 0.0006006505639353174, "loss": 3.4901, "step": 47040 }, { "epoch": 3.196426144856638, "grad_norm": 1.0958566665649414, "learning_rate": 0.0006006080989264846, "loss": 3.2235, "step": 47045 }, { "epoch": 3.1967658649273, "grad_norm": 1.6651636362075806, "learning_rate": 0.0006005656339176518, "loss": 3.3455, "step": 47050 }, { "epoch": 3.1971055849979617, "grad_norm": 1.096583604812622, "learning_rate": 0.0006005231689088192, "loss": 3.4508, "step": 47055 }, { "epoch": 3.1974453050686233, "grad_norm": 1.4820733070373535, "learning_rate": 0.0006004807038999864, "loss": 3.2929, "step": 47060 }, { "epoch": 3.1977850251392854, "grad_norm": 1.1821815967559814, "learning_rate": 0.0006004382388911537, "loss": 3.6543, "step": 47065 }, { "epoch": 3.198124745209947, "grad_norm": 1.6353092193603516, "learning_rate": 0.0006003957738823211, "loss": 3.4712, "step": 47070 }, { "epoch": 3.1984644652806087, "grad_norm": 1.27754807472229, "learning_rate": 0.0006003533088734883, "loss": 3.7684, "step": 47075 }, { "epoch": 3.1988041853512708, "grad_norm": 1.2860944271087646, "learning_rate": 0.0006003108438646555, "loss": 3.2348, "step": 47080 }, { "epoch": 3.1991439054219324, "grad_norm": 1.4355179071426392, "learning_rate": 0.0006002683788558228, "loss": 3.5178, "step": 47085 }, { "epoch": 3.199483625492594, "grad_norm": 1.5857691764831543, "learning_rate": 0.0006002259138469901, "loss": 3.5949, "step": 47090 }, { "epoch": 3.1998233455632556, "grad_norm": 1.206839919090271, "learning_rate": 0.0006001834488381573, "loss": 3.3189, "step": 47095 }, { "epoch": 3.2001630656339177, "grad_norm": 1.1965374946594238, "learning_rate": 0.0006001409838293247, "loss": 3.3933, "step": 47100 }, { "epoch": 3.2005027857045794, "grad_norm": 1.4640554189682007, "learning_rate": 0.000600098518820492, "loss": 3.4587, "step": 47105 }, { "epoch": 3.200842505775241, "grad_norm": 1.1128923892974854, "learning_rate": 0.0006000560538116592, "loss": 3.8607, "step": 47110 }, { "epoch": 3.201182225845903, "grad_norm": 1.4626011848449707, "learning_rate": 0.0006000135888028265, "loss": 3.4408, "step": 47115 }, { "epoch": 3.2015219459165647, "grad_norm": 1.2902369499206543, "learning_rate": 0.0005999711237939938, "loss": 3.5021, "step": 47120 }, { "epoch": 3.2018616659872263, "grad_norm": 1.1350111961364746, "learning_rate": 0.000599928658785161, "loss": 3.1255, "step": 47125 }, { "epoch": 3.2022013860578884, "grad_norm": 1.0622214078903198, "learning_rate": 0.0005998861937763283, "loss": 3.6332, "step": 47130 }, { "epoch": 3.20254110612855, "grad_norm": 1.5171536207199097, "learning_rate": 0.0005998437287674956, "loss": 3.6775, "step": 47135 }, { "epoch": 3.2028808261992117, "grad_norm": 1.3763819932937622, "learning_rate": 0.0005998012637586629, "loss": 3.4415, "step": 47140 }, { "epoch": 3.2032205462698737, "grad_norm": 1.0090845823287964, "learning_rate": 0.0005997587987498302, "loss": 3.5244, "step": 47145 }, { "epoch": 3.2035602663405354, "grad_norm": 1.2840018272399902, "learning_rate": 0.0005997163337409974, "loss": 3.4991, "step": 47150 }, { "epoch": 3.203899986411197, "grad_norm": 1.0833420753479004, "learning_rate": 0.0005996738687321647, "loss": 3.5801, "step": 47155 }, { "epoch": 3.204239706481859, "grad_norm": 1.2841640710830688, "learning_rate": 0.000599631403723332, "loss": 3.4999, "step": 47160 }, { "epoch": 3.2045794265525207, "grad_norm": 1.403252363204956, "learning_rate": 0.0005995889387144992, "loss": 3.5121, "step": 47165 }, { "epoch": 3.2049191466231823, "grad_norm": 1.4213119745254517, "learning_rate": 0.0005995464737056666, "loss": 3.6706, "step": 47170 }, { "epoch": 3.2052588666938444, "grad_norm": 1.1950548887252808, "learning_rate": 0.0005995040086968339, "loss": 3.3815, "step": 47175 }, { "epoch": 3.205598586764506, "grad_norm": 1.3393971920013428, "learning_rate": 0.0005994615436880011, "loss": 3.6454, "step": 47180 }, { "epoch": 3.2059383068351677, "grad_norm": 1.4554917812347412, "learning_rate": 0.0005994190786791683, "loss": 3.3762, "step": 47185 }, { "epoch": 3.2062780269058297, "grad_norm": 1.490322470664978, "learning_rate": 0.0005993766136703357, "loss": 3.3321, "step": 47190 }, { "epoch": 3.2066177469764914, "grad_norm": 1.4819539785385132, "learning_rate": 0.0005993341486615029, "loss": 3.3848, "step": 47195 }, { "epoch": 3.206957467047153, "grad_norm": 1.1861577033996582, "learning_rate": 0.0005992916836526701, "loss": 3.2977, "step": 47200 }, { "epoch": 3.207297187117815, "grad_norm": 1.5731127262115479, "learning_rate": 0.0005992492186438376, "loss": 3.3454, "step": 47205 }, { "epoch": 3.2076369071884767, "grad_norm": 1.0306729078292847, "learning_rate": 0.0005992067536350048, "loss": 3.5825, "step": 47210 }, { "epoch": 3.2079766272591383, "grad_norm": 1.4913653135299683, "learning_rate": 0.000599164288626172, "loss": 3.1841, "step": 47215 }, { "epoch": 3.2083163473298004, "grad_norm": 1.4470069408416748, "learning_rate": 0.0005991218236173394, "loss": 3.3162, "step": 47220 }, { "epoch": 3.208656067400462, "grad_norm": 1.2115609645843506, "learning_rate": 0.0005990793586085066, "loss": 3.4315, "step": 47225 }, { "epoch": 3.2089957874711237, "grad_norm": 2.131941080093384, "learning_rate": 0.0005990368935996738, "loss": 3.1101, "step": 47230 }, { "epoch": 3.2093355075417858, "grad_norm": 1.8066364526748657, "learning_rate": 0.0005989944285908411, "loss": 3.7343, "step": 47235 }, { "epoch": 3.2096752276124474, "grad_norm": 1.4271044731140137, "learning_rate": 0.0005989519635820085, "loss": 3.5395, "step": 47240 }, { "epoch": 3.210014947683109, "grad_norm": 1.3696742057800293, "learning_rate": 0.0005989094985731757, "loss": 3.5414, "step": 47245 }, { "epoch": 3.210354667753771, "grad_norm": 1.397528886795044, "learning_rate": 0.000598867033564343, "loss": 3.72, "step": 47250 }, { "epoch": 3.2106943878244327, "grad_norm": 1.2960633039474487, "learning_rate": 0.0005988245685555103, "loss": 3.4884, "step": 47255 }, { "epoch": 3.2110341078950944, "grad_norm": 1.3755066394805908, "learning_rate": 0.0005987821035466775, "loss": 3.5493, "step": 47260 }, { "epoch": 3.2113738279657564, "grad_norm": 1.3325165510177612, "learning_rate": 0.0005987396385378448, "loss": 3.1648, "step": 47265 }, { "epoch": 3.211713548036418, "grad_norm": 1.1387581825256348, "learning_rate": 0.000598697173529012, "loss": 3.5149, "step": 47270 }, { "epoch": 3.2120532681070797, "grad_norm": 1.9105881452560425, "learning_rate": 0.0005986547085201794, "loss": 3.442, "step": 47275 }, { "epoch": 3.2123929881777418, "grad_norm": 1.5397639274597168, "learning_rate": 0.0005986122435113467, "loss": 3.5413, "step": 47280 }, { "epoch": 3.2127327082484034, "grad_norm": 1.0721336603164673, "learning_rate": 0.0005985697785025139, "loss": 3.4219, "step": 47285 }, { "epoch": 3.213072428319065, "grad_norm": 1.5944808721542358, "learning_rate": 0.0005985273134936812, "loss": 3.488, "step": 47290 }, { "epoch": 3.2134121483897267, "grad_norm": 1.128190040588379, "learning_rate": 0.0005984848484848485, "loss": 3.3921, "step": 47295 }, { "epoch": 3.2137518684603887, "grad_norm": 1.4092369079589844, "learning_rate": 0.0005984423834760157, "loss": 3.5977, "step": 47300 }, { "epoch": 3.2140915885310504, "grad_norm": 1.0678107738494873, "learning_rate": 0.000598399918467183, "loss": 3.1569, "step": 47305 }, { "epoch": 3.214431308601712, "grad_norm": 1.2528427839279175, "learning_rate": 0.0005983574534583504, "loss": 3.1773, "step": 47310 }, { "epoch": 3.214771028672374, "grad_norm": 1.187411904335022, "learning_rate": 0.0005983149884495176, "loss": 3.4295, "step": 47315 }, { "epoch": 3.2151107487430357, "grad_norm": 1.211861491203308, "learning_rate": 0.0005982725234406848, "loss": 3.6571, "step": 47320 }, { "epoch": 3.2154504688136973, "grad_norm": 1.1559525728225708, "learning_rate": 0.0005982300584318522, "loss": 3.6103, "step": 47325 }, { "epoch": 3.2157901888843594, "grad_norm": 1.7585647106170654, "learning_rate": 0.0005981875934230194, "loss": 3.2642, "step": 47330 }, { "epoch": 3.216129908955021, "grad_norm": 1.346286654472351, "learning_rate": 0.0005981451284141866, "loss": 3.3506, "step": 47335 }, { "epoch": 3.2164696290256827, "grad_norm": 1.427101492881775, "learning_rate": 0.000598102663405354, "loss": 3.4164, "step": 47340 }, { "epoch": 3.2168093490963448, "grad_norm": 1.2879711389541626, "learning_rate": 0.0005980601983965213, "loss": 3.168, "step": 47345 }, { "epoch": 3.2171490691670064, "grad_norm": 1.42931067943573, "learning_rate": 0.0005980177333876886, "loss": 3.5004, "step": 47350 }, { "epoch": 3.217488789237668, "grad_norm": 1.1016908884048462, "learning_rate": 0.0005979752683788559, "loss": 3.3569, "step": 47355 }, { "epoch": 3.21782850930833, "grad_norm": 1.2281854152679443, "learning_rate": 0.0005979328033700231, "loss": 3.5529, "step": 47360 }, { "epoch": 3.2181682293789917, "grad_norm": 2.2187211513519287, "learning_rate": 0.0005978903383611904, "loss": 3.2983, "step": 47365 }, { "epoch": 3.2185079494496533, "grad_norm": 10.008557319641113, "learning_rate": 0.0005978478733523576, "loss": 3.5279, "step": 47370 }, { "epoch": 3.2188476695203154, "grad_norm": 1.1983789205551147, "learning_rate": 0.0005978054083435249, "loss": 3.3772, "step": 47375 }, { "epoch": 3.219187389590977, "grad_norm": 1.0580521821975708, "learning_rate": 0.0005977629433346923, "loss": 3.6535, "step": 47380 }, { "epoch": 3.2195271096616387, "grad_norm": 1.1301231384277344, "learning_rate": 0.0005977204783258595, "loss": 3.1546, "step": 47385 }, { "epoch": 3.2198668297323008, "grad_norm": 0.9777482151985168, "learning_rate": 0.0005976780133170268, "loss": 3.6087, "step": 47390 }, { "epoch": 3.2202065498029624, "grad_norm": 1.4743887186050415, "learning_rate": 0.0005976355483081941, "loss": 3.5096, "step": 47395 }, { "epoch": 3.220546269873624, "grad_norm": 1.1759583950042725, "learning_rate": 0.0005975930832993613, "loss": 3.3763, "step": 47400 }, { "epoch": 3.220885989944286, "grad_norm": 1.8830326795578003, "learning_rate": 0.0005975506182905286, "loss": 3.5209, "step": 47405 }, { "epoch": 3.2212257100149477, "grad_norm": 1.218815565109253, "learning_rate": 0.000597508153281696, "loss": 3.4191, "step": 47410 }, { "epoch": 3.2215654300856094, "grad_norm": 1.2872300148010254, "learning_rate": 0.0005974656882728632, "loss": 3.5593, "step": 47415 }, { "epoch": 3.2219051501562714, "grad_norm": 1.4976118803024292, "learning_rate": 0.0005974232232640304, "loss": 3.4166, "step": 47420 }, { "epoch": 3.222244870226933, "grad_norm": 1.4205818176269531, "learning_rate": 0.0005973807582551978, "loss": 3.3522, "step": 47425 }, { "epoch": 3.2225845902975947, "grad_norm": 2.4568629264831543, "learning_rate": 0.000597338293246365, "loss": 3.3625, "step": 47430 }, { "epoch": 3.2229243103682563, "grad_norm": 2.447404146194458, "learning_rate": 0.0005972958282375322, "loss": 3.4335, "step": 47435 }, { "epoch": 3.2232640304389184, "grad_norm": 1.3373628854751587, "learning_rate": 0.0005972533632286996, "loss": 3.309, "step": 47440 }, { "epoch": 3.22360375050958, "grad_norm": 1.2784273624420166, "learning_rate": 0.0005972108982198669, "loss": 3.6346, "step": 47445 }, { "epoch": 3.2239434705802417, "grad_norm": 1.284286379814148, "learning_rate": 0.0005971684332110341, "loss": 3.3951, "step": 47450 }, { "epoch": 3.2242831906509037, "grad_norm": 1.3021758794784546, "learning_rate": 0.0005971259682022015, "loss": 3.5899, "step": 47455 }, { "epoch": 3.2246229107215654, "grad_norm": 1.3992390632629395, "learning_rate": 0.0005970835031933687, "loss": 3.5818, "step": 47460 }, { "epoch": 3.224962630792227, "grad_norm": 1.194024920463562, "learning_rate": 0.0005970410381845359, "loss": 3.4678, "step": 47465 }, { "epoch": 3.225302350862889, "grad_norm": 1.18559730052948, "learning_rate": 0.0005969985731757032, "loss": 3.3996, "step": 47470 }, { "epoch": 3.2256420709335507, "grad_norm": 1.1133431196212769, "learning_rate": 0.0005969561081668705, "loss": 3.4275, "step": 47475 }, { "epoch": 3.2259817910042123, "grad_norm": 1.2203541994094849, "learning_rate": 0.0005969136431580378, "loss": 3.673, "step": 47480 }, { "epoch": 3.2263215110748744, "grad_norm": 1.0904656648635864, "learning_rate": 0.0005968711781492051, "loss": 3.697, "step": 47485 }, { "epoch": 3.226661231145536, "grad_norm": 1.7045519351959229, "learning_rate": 0.0005968287131403724, "loss": 3.513, "step": 47490 }, { "epoch": 3.2270009512161977, "grad_norm": 1.6173940896987915, "learning_rate": 0.0005967862481315396, "loss": 3.0988, "step": 47495 }, { "epoch": 3.2273406712868598, "grad_norm": 1.4760959148406982, "learning_rate": 0.0005967437831227069, "loss": 3.1396, "step": 47500 }, { "epoch": 3.2276803913575214, "grad_norm": 1.3051395416259766, "learning_rate": 0.0005967013181138742, "loss": 3.6698, "step": 47505 }, { "epoch": 3.228020111428183, "grad_norm": 1.1857445240020752, "learning_rate": 0.0005966588531050414, "loss": 3.5693, "step": 47510 }, { "epoch": 3.228359831498845, "grad_norm": 1.0990554094314575, "learning_rate": 0.0005966163880962088, "loss": 3.7387, "step": 47515 }, { "epoch": 3.2286995515695067, "grad_norm": 1.1908347606658936, "learning_rate": 0.000596573923087376, "loss": 3.2448, "step": 47520 }, { "epoch": 3.2290392716401684, "grad_norm": 1.0519156455993652, "learning_rate": 0.0005965314580785433, "loss": 3.4616, "step": 47525 }, { "epoch": 3.2293789917108304, "grad_norm": 1.1907278299331665, "learning_rate": 0.0005964889930697106, "loss": 3.5551, "step": 47530 }, { "epoch": 3.229718711781492, "grad_norm": 1.4127686023712158, "learning_rate": 0.0005964465280608778, "loss": 3.1081, "step": 47535 }, { "epoch": 3.2300584318521537, "grad_norm": 1.6308995485305786, "learning_rate": 0.0005964040630520451, "loss": 3.3274, "step": 47540 }, { "epoch": 3.2303981519228158, "grad_norm": 1.0691081285476685, "learning_rate": 0.0005963615980432124, "loss": 3.3977, "step": 47545 }, { "epoch": 3.2307378719934774, "grad_norm": 1.4990373849868774, "learning_rate": 0.0005963191330343797, "loss": 3.1316, "step": 47550 }, { "epoch": 3.231077592064139, "grad_norm": 1.3742157220840454, "learning_rate": 0.000596276668025547, "loss": 3.4398, "step": 47555 }, { "epoch": 3.231417312134801, "grad_norm": 1.4358465671539307, "learning_rate": 0.0005962342030167143, "loss": 3.2471, "step": 47560 }, { "epoch": 3.2317570322054627, "grad_norm": 1.0815796852111816, "learning_rate": 0.0005961917380078815, "loss": 3.2827, "step": 47565 }, { "epoch": 3.2320967522761244, "grad_norm": 1.1584293842315674, "learning_rate": 0.0005961492729990487, "loss": 3.4661, "step": 47570 }, { "epoch": 3.2324364723467864, "grad_norm": 1.2731971740722656, "learning_rate": 0.0005961068079902161, "loss": 3.4765, "step": 47575 }, { "epoch": 3.232776192417448, "grad_norm": 1.4390454292297363, "learning_rate": 0.0005960643429813833, "loss": 3.4613, "step": 47580 }, { "epoch": 3.2331159124881097, "grad_norm": 1.4086753129959106, "learning_rate": 0.0005960218779725506, "loss": 3.6268, "step": 47585 }, { "epoch": 3.2334556325587718, "grad_norm": 1.1672965288162231, "learning_rate": 0.000595979412963718, "loss": 3.6827, "step": 47590 }, { "epoch": 3.2337953526294334, "grad_norm": 1.4114402532577515, "learning_rate": 0.0005959369479548852, "loss": 3.3067, "step": 47595 }, { "epoch": 3.234135072700095, "grad_norm": 1.4282854795455933, "learning_rate": 0.0005958944829460524, "loss": 3.7549, "step": 47600 }, { "epoch": 3.234474792770757, "grad_norm": 1.655002474784851, "learning_rate": 0.0005958520179372198, "loss": 3.5974, "step": 47605 }, { "epoch": 3.2348145128414187, "grad_norm": 1.112684965133667, "learning_rate": 0.000595809552928387, "loss": 3.2343, "step": 47610 }, { "epoch": 3.2351542329120804, "grad_norm": 1.4908180236816406, "learning_rate": 0.0005957670879195542, "loss": 3.5617, "step": 47615 }, { "epoch": 3.2354939529827424, "grad_norm": 1.020147442817688, "learning_rate": 0.0005957246229107217, "loss": 3.6239, "step": 47620 }, { "epoch": 3.235833673053404, "grad_norm": 1.546642780303955, "learning_rate": 0.0005956821579018889, "loss": 3.3446, "step": 47625 }, { "epoch": 3.2361733931240657, "grad_norm": 1.1526774168014526, "learning_rate": 0.0005956481858948227, "loss": 3.4366, "step": 47630 }, { "epoch": 3.2365131131947273, "grad_norm": 1.29534912109375, "learning_rate": 0.0005956057208859899, "loss": 3.4409, "step": 47635 }, { "epoch": 3.2368528332653894, "grad_norm": 1.0542855262756348, "learning_rate": 0.0005955632558771572, "loss": 3.5348, "step": 47640 }, { "epoch": 3.237192553336051, "grad_norm": 1.1699023246765137, "learning_rate": 0.0005955207908683246, "loss": 3.5234, "step": 47645 }, { "epoch": 3.2375322734067127, "grad_norm": 1.251968264579773, "learning_rate": 0.0005954783258594918, "loss": 3.2417, "step": 47650 }, { "epoch": 3.2378719934773748, "grad_norm": 1.068299651145935, "learning_rate": 0.000595435860850659, "loss": 3.4156, "step": 47655 }, { "epoch": 3.2382117135480364, "grad_norm": 1.294835090637207, "learning_rate": 0.0005953933958418264, "loss": 3.631, "step": 47660 }, { "epoch": 3.238551433618698, "grad_norm": 1.1983140707015991, "learning_rate": 0.0005953509308329936, "loss": 3.512, "step": 47665 }, { "epoch": 3.23889115368936, "grad_norm": 0.9412567615509033, "learning_rate": 0.0005953084658241608, "loss": 3.4966, "step": 47670 }, { "epoch": 3.2392308737600217, "grad_norm": 1.5241591930389404, "learning_rate": 0.0005952660008153283, "loss": 3.5017, "step": 47675 }, { "epoch": 3.2395705938306834, "grad_norm": 1.0484435558319092, "learning_rate": 0.0005952235358064955, "loss": 3.4315, "step": 47680 }, { "epoch": 3.2399103139013454, "grad_norm": 1.6054149866104126, "learning_rate": 0.0005951810707976627, "loss": 3.7855, "step": 47685 }, { "epoch": 3.240250033972007, "grad_norm": 1.3040640354156494, "learning_rate": 0.0005951386057888301, "loss": 3.335, "step": 47690 }, { "epoch": 3.2405897540426687, "grad_norm": 1.0602469444274902, "learning_rate": 0.0005950961407799973, "loss": 3.8199, "step": 47695 }, { "epoch": 3.2409294741133308, "grad_norm": 1.2343339920043945, "learning_rate": 0.0005950536757711645, "loss": 3.3947, "step": 47700 }, { "epoch": 3.2412691941839924, "grad_norm": 1.1602132320404053, "learning_rate": 0.0005950112107623318, "loss": 3.5463, "step": 47705 }, { "epoch": 3.241608914254654, "grad_norm": 1.246358871459961, "learning_rate": 0.0005949687457534992, "loss": 3.6183, "step": 47710 }, { "epoch": 3.241948634325316, "grad_norm": 1.3547159433364868, "learning_rate": 0.0005949262807446664, "loss": 3.4876, "step": 47715 }, { "epoch": 3.2422883543959777, "grad_norm": 1.6277636289596558, "learning_rate": 0.0005948838157358337, "loss": 3.545, "step": 47720 }, { "epoch": 3.2426280744666394, "grad_norm": 1.3252182006835938, "learning_rate": 0.000594841350727001, "loss": 3.4272, "step": 47725 }, { "epoch": 3.2429677945373014, "grad_norm": 1.3607996702194214, "learning_rate": 0.0005947988857181682, "loss": 3.4233, "step": 47730 }, { "epoch": 3.243307514607963, "grad_norm": 1.3730661869049072, "learning_rate": 0.0005947564207093355, "loss": 3.4812, "step": 47735 }, { "epoch": 3.2436472346786247, "grad_norm": 1.2519186735153198, "learning_rate": 0.0005947139557005028, "loss": 3.5033, "step": 47740 }, { "epoch": 3.2439869547492868, "grad_norm": 1.3282772302627563, "learning_rate": 0.0005946714906916701, "loss": 3.4525, "step": 47745 }, { "epoch": 3.2443266748199484, "grad_norm": 1.490952968597412, "learning_rate": 0.0005946290256828374, "loss": 3.4289, "step": 47750 }, { "epoch": 3.24466639489061, "grad_norm": 1.3851220607757568, "learning_rate": 0.0005945865606740046, "loss": 3.6316, "step": 47755 }, { "epoch": 3.245006114961272, "grad_norm": 1.4438177347183228, "learning_rate": 0.0005945440956651719, "loss": 3.4767, "step": 47760 }, { "epoch": 3.2453458350319337, "grad_norm": 1.2443832159042358, "learning_rate": 0.0005945016306563392, "loss": 3.6475, "step": 47765 }, { "epoch": 3.2456855551025954, "grad_norm": 1.1025300025939941, "learning_rate": 0.0005944591656475064, "loss": 3.4611, "step": 47770 }, { "epoch": 3.246025275173257, "grad_norm": 1.2251092195510864, "learning_rate": 0.0005944167006386737, "loss": 3.3812, "step": 47775 }, { "epoch": 3.246364995243919, "grad_norm": 1.1759761571884155, "learning_rate": 0.0005943742356298411, "loss": 3.5261, "step": 47780 }, { "epoch": 3.2467047153145807, "grad_norm": 1.2617305517196655, "learning_rate": 0.0005943317706210083, "loss": 3.0453, "step": 47785 }, { "epoch": 3.2470444353852423, "grad_norm": 1.5249905586242676, "learning_rate": 0.0005942893056121756, "loss": 3.3971, "step": 47790 }, { "epoch": 3.2473841554559044, "grad_norm": 1.7594151496887207, "learning_rate": 0.0005942468406033429, "loss": 3.3731, "step": 47795 }, { "epoch": 3.247723875526566, "grad_norm": 2.75927472114563, "learning_rate": 0.0005942043755945101, "loss": 3.456, "step": 47800 }, { "epoch": 3.2480635955972277, "grad_norm": 1.4496275186538696, "learning_rate": 0.0005941619105856773, "loss": 3.171, "step": 47805 }, { "epoch": 3.2484033156678898, "grad_norm": 1.0845247507095337, "learning_rate": 0.0005941194455768447, "loss": 3.6089, "step": 47810 }, { "epoch": 3.2487430357385514, "grad_norm": 1.185405969619751, "learning_rate": 0.000594076980568012, "loss": 3.2177, "step": 47815 }, { "epoch": 3.249082755809213, "grad_norm": 1.0934009552001953, "learning_rate": 0.0005940345155591792, "loss": 3.7738, "step": 47820 }, { "epoch": 3.249422475879875, "grad_norm": 1.7849913835525513, "learning_rate": 0.0005939920505503466, "loss": 3.4295, "step": 47825 }, { "epoch": 3.2497621959505367, "grad_norm": 1.378356695175171, "learning_rate": 0.0005939495855415138, "loss": 3.392, "step": 47830 }, { "epoch": 3.2501019160211984, "grad_norm": 1.3491836786270142, "learning_rate": 0.000593907120532681, "loss": 3.553, "step": 47835 }, { "epoch": 3.2504416360918604, "grad_norm": 1.2819451093673706, "learning_rate": 0.0005938646555238484, "loss": 3.6537, "step": 47840 }, { "epoch": 3.250781356162522, "grad_norm": 1.1794354915618896, "learning_rate": 0.0005938221905150156, "loss": 3.5164, "step": 47845 }, { "epoch": 3.2511210762331837, "grad_norm": 1.2855387926101685, "learning_rate": 0.0005937797255061829, "loss": 3.6282, "step": 47850 }, { "epoch": 3.2514607963038458, "grad_norm": 1.5561195611953735, "learning_rate": 0.0005937372604973503, "loss": 3.5913, "step": 47855 }, { "epoch": 3.2518005163745074, "grad_norm": 0.8855029344558716, "learning_rate": 0.0005936947954885175, "loss": 3.454, "step": 47860 }, { "epoch": 3.252140236445169, "grad_norm": 2.259631395339966, "learning_rate": 0.0005936523304796847, "loss": 3.1918, "step": 47865 }, { "epoch": 3.252479956515831, "grad_norm": 1.3750009536743164, "learning_rate": 0.000593609865470852, "loss": 3.3421, "step": 47870 }, { "epoch": 3.2528196765864927, "grad_norm": 1.5174392461776733, "learning_rate": 0.0005935674004620193, "loss": 3.1915, "step": 47875 }, { "epoch": 3.2531593966571544, "grad_norm": 1.320859432220459, "learning_rate": 0.0005935249354531865, "loss": 3.5136, "step": 47880 }, { "epoch": 3.2534991167278164, "grad_norm": 1.4447101354599, "learning_rate": 0.0005934824704443539, "loss": 3.4497, "step": 47885 }, { "epoch": 3.253838836798478, "grad_norm": 1.3446855545043945, "learning_rate": 0.0005934400054355212, "loss": 3.6047, "step": 47890 }, { "epoch": 3.2541785568691397, "grad_norm": 1.4740773439407349, "learning_rate": 0.0005933975404266885, "loss": 3.4277, "step": 47895 }, { "epoch": 3.254518276939802, "grad_norm": 1.434517502784729, "learning_rate": 0.0005933550754178557, "loss": 3.351, "step": 47900 }, { "epoch": 3.2548579970104634, "grad_norm": 1.4292335510253906, "learning_rate": 0.0005933126104090229, "loss": 3.4102, "step": 47905 }, { "epoch": 3.255197717081125, "grad_norm": 1.2502758502960205, "learning_rate": 0.0005932701454001903, "loss": 3.4016, "step": 47910 }, { "epoch": 3.255537437151787, "grad_norm": 1.3593084812164307, "learning_rate": 0.0005932276803913575, "loss": 3.3698, "step": 47915 }, { "epoch": 3.2558771572224487, "grad_norm": 1.1573753356933594, "learning_rate": 0.0005931852153825248, "loss": 3.9102, "step": 47920 }, { "epoch": 3.2562168772931104, "grad_norm": 1.0907214879989624, "learning_rate": 0.0005931427503736922, "loss": 3.4936, "step": 47925 }, { "epoch": 3.2565565973637725, "grad_norm": 1.2824400663375854, "learning_rate": 0.0005931002853648594, "loss": 3.5638, "step": 47930 }, { "epoch": 3.256896317434434, "grad_norm": 1.4859004020690918, "learning_rate": 0.0005930578203560266, "loss": 3.7342, "step": 47935 }, { "epoch": 3.2572360375050957, "grad_norm": 1.3197611570358276, "learning_rate": 0.000593015355347194, "loss": 3.3845, "step": 47940 }, { "epoch": 3.257575757575758, "grad_norm": 1.1769475936889648, "learning_rate": 0.0005929728903383612, "loss": 3.4428, "step": 47945 }, { "epoch": 3.2579154776464194, "grad_norm": 1.146775245666504, "learning_rate": 0.0005929304253295284, "loss": 3.5084, "step": 47950 }, { "epoch": 3.258255197717081, "grad_norm": 1.1548666954040527, "learning_rate": 0.0005928879603206959, "loss": 3.5779, "step": 47955 }, { "epoch": 3.258594917787743, "grad_norm": 1.1263725757598877, "learning_rate": 0.0005928454953118631, "loss": 3.5829, "step": 47960 }, { "epoch": 3.2589346378584048, "grad_norm": 1.1801725625991821, "learning_rate": 0.0005928030303030303, "loss": 3.4517, "step": 47965 }, { "epoch": 3.2592743579290664, "grad_norm": 1.2468994855880737, "learning_rate": 0.0005927605652941976, "loss": 3.5961, "step": 47970 }, { "epoch": 3.2596140779997285, "grad_norm": 1.445039987564087, "learning_rate": 0.0005927181002853649, "loss": 3.2949, "step": 47975 }, { "epoch": 3.25995379807039, "grad_norm": 1.1883060932159424, "learning_rate": 0.0005926756352765321, "loss": 3.3056, "step": 47980 }, { "epoch": 3.2602935181410517, "grad_norm": 1.2603391408920288, "learning_rate": 0.0005926331702676994, "loss": 3.6481, "step": 47985 }, { "epoch": 3.2606332382117134, "grad_norm": 1.334981083869934, "learning_rate": 0.0005925907052588668, "loss": 3.5033, "step": 47990 }, { "epoch": 3.2609729582823754, "grad_norm": 1.349288821220398, "learning_rate": 0.000592548240250034, "loss": 3.6766, "step": 47995 }, { "epoch": 3.261312678353037, "grad_norm": 1.9445176124572754, "learning_rate": 0.0005925057752412013, "loss": 3.5784, "step": 48000 }, { "epoch": 3.2616523984236987, "grad_norm": 1.368672251701355, "learning_rate": 0.0005924633102323685, "loss": 3.4459, "step": 48005 }, { "epoch": 3.2619921184943608, "grad_norm": 1.1145576238632202, "learning_rate": 0.0005924208452235358, "loss": 3.6635, "step": 48010 }, { "epoch": 3.2623318385650224, "grad_norm": 1.3710367679595947, "learning_rate": 0.0005923783802147031, "loss": 3.4505, "step": 48015 }, { "epoch": 3.262671558635684, "grad_norm": 1.3881115913391113, "learning_rate": 0.0005923359152058703, "loss": 3.3965, "step": 48020 }, { "epoch": 3.263011278706346, "grad_norm": 1.167551040649414, "learning_rate": 0.0005922934501970377, "loss": 3.7597, "step": 48025 }, { "epoch": 3.2633509987770077, "grad_norm": 0.9630259275436401, "learning_rate": 0.000592250985188205, "loss": 3.5168, "step": 48030 }, { "epoch": 3.2636907188476694, "grad_norm": 1.2540911436080933, "learning_rate": 0.0005922085201793722, "loss": 3.398, "step": 48035 }, { "epoch": 3.2640304389183314, "grad_norm": 1.0217067003250122, "learning_rate": 0.0005921660551705395, "loss": 3.4959, "step": 48040 }, { "epoch": 3.264370158988993, "grad_norm": 1.322771430015564, "learning_rate": 0.0005921235901617068, "loss": 3.3393, "step": 48045 }, { "epoch": 3.2647098790596547, "grad_norm": 1.5273725986480713, "learning_rate": 0.000592081125152874, "loss": 3.6613, "step": 48050 }, { "epoch": 3.265049599130317, "grad_norm": 1.6505893468856812, "learning_rate": 0.0005920386601440412, "loss": 3.1917, "step": 48055 }, { "epoch": 3.2653893192009784, "grad_norm": 5.469918251037598, "learning_rate": 0.0005919961951352087, "loss": 3.1248, "step": 48060 }, { "epoch": 3.26572903927164, "grad_norm": 2.0006370544433594, "learning_rate": 0.0005919537301263759, "loss": 3.46, "step": 48065 }, { "epoch": 3.266068759342302, "grad_norm": 1.9097448587417603, "learning_rate": 0.0005919112651175431, "loss": 3.4862, "step": 48070 }, { "epoch": 3.2664084794129638, "grad_norm": 1.3865015506744385, "learning_rate": 0.0005918688001087105, "loss": 3.5135, "step": 48075 }, { "epoch": 3.2667481994836254, "grad_norm": 1.3044326305389404, "learning_rate": 0.0005918263350998777, "loss": 3.2874, "step": 48080 }, { "epoch": 3.2670879195542875, "grad_norm": 3.070375442504883, "learning_rate": 0.0005917838700910449, "loss": 3.2891, "step": 48085 }, { "epoch": 3.267427639624949, "grad_norm": 1.7644689083099365, "learning_rate": 0.0005917414050822123, "loss": 3.5112, "step": 48090 }, { "epoch": 3.2677673596956107, "grad_norm": 1.335452914237976, "learning_rate": 0.0005916989400733796, "loss": 3.497, "step": 48095 }, { "epoch": 3.2681070797662723, "grad_norm": 1.2023518085479736, "learning_rate": 0.0005916564750645468, "loss": 3.5069, "step": 48100 }, { "epoch": 3.2684467998369344, "grad_norm": 1.143660306930542, "learning_rate": 0.0005916140100557141, "loss": 3.5409, "step": 48105 }, { "epoch": 3.268786519907596, "grad_norm": 1.8150233030319214, "learning_rate": 0.0005915715450468814, "loss": 3.5211, "step": 48110 }, { "epoch": 3.2691262399782577, "grad_norm": 1.18147873878479, "learning_rate": 0.0005915290800380486, "loss": 3.432, "step": 48115 }, { "epoch": 3.2694659600489198, "grad_norm": 1.805039882659912, "learning_rate": 0.0005914866150292159, "loss": 3.5534, "step": 48120 }, { "epoch": 3.2698056801195814, "grad_norm": 1.4879364967346191, "learning_rate": 0.0005914441500203832, "loss": 3.6654, "step": 48125 }, { "epoch": 3.270145400190243, "grad_norm": 1.3521498441696167, "learning_rate": 0.0005914016850115505, "loss": 3.2639, "step": 48130 }, { "epoch": 3.270485120260905, "grad_norm": 1.4952338933944702, "learning_rate": 0.0005913592200027178, "loss": 3.1976, "step": 48135 }, { "epoch": 3.2708248403315667, "grad_norm": 1.4914406538009644, "learning_rate": 0.000591316754993885, "loss": 3.4497, "step": 48140 }, { "epoch": 3.2711645604022284, "grad_norm": 1.4159810543060303, "learning_rate": 0.0005912742899850523, "loss": 3.5683, "step": 48145 }, { "epoch": 3.2715042804728904, "grad_norm": 1.094245195388794, "learning_rate": 0.0005912318249762196, "loss": 3.6802, "step": 48150 }, { "epoch": 3.271844000543552, "grad_norm": 1.6181352138519287, "learning_rate": 0.0005911893599673868, "loss": 3.6439, "step": 48155 }, { "epoch": 3.2721837206142137, "grad_norm": 1.6792209148406982, "learning_rate": 0.0005911468949585541, "loss": 3.0036, "step": 48160 }, { "epoch": 3.2725234406848758, "grad_norm": 1.7117942571640015, "learning_rate": 0.0005911044299497215, "loss": 3.4922, "step": 48165 }, { "epoch": 3.2728631607555374, "grad_norm": 1.6274882555007935, "learning_rate": 0.0005910619649408887, "loss": 3.1749, "step": 48170 }, { "epoch": 3.273202880826199, "grad_norm": 1.7902445793151855, "learning_rate": 0.000591019499932056, "loss": 3.3938, "step": 48175 }, { "epoch": 3.273542600896861, "grad_norm": 1.2158254384994507, "learning_rate": 0.0005909770349232233, "loss": 3.4669, "step": 48180 }, { "epoch": 3.2738823209675227, "grad_norm": 1.380110502243042, "learning_rate": 0.0005909345699143905, "loss": 3.539, "step": 48185 }, { "epoch": 3.2742220410381844, "grad_norm": 1.717228889465332, "learning_rate": 0.0005908921049055577, "loss": 3.4921, "step": 48190 }, { "epoch": 3.2745617611088464, "grad_norm": 1.3547314405441284, "learning_rate": 0.0005908496398967251, "loss": 3.5694, "step": 48195 }, { "epoch": 3.274901481179508, "grad_norm": 1.4737255573272705, "learning_rate": 0.0005908071748878924, "loss": 3.4688, "step": 48200 }, { "epoch": 3.2752412012501697, "grad_norm": 1.653188705444336, "learning_rate": 0.0005907647098790596, "loss": 3.3968, "step": 48205 }, { "epoch": 3.275580921320832, "grad_norm": 1.4780980348587036, "learning_rate": 0.000590722244870227, "loss": 3.5584, "step": 48210 }, { "epoch": 3.2759206413914934, "grad_norm": 1.301609992980957, "learning_rate": 0.0005906797798613942, "loss": 3.3856, "step": 48215 }, { "epoch": 3.276260361462155, "grad_norm": 0.9933018088340759, "learning_rate": 0.0005906373148525614, "loss": 3.5129, "step": 48220 }, { "epoch": 3.276600081532817, "grad_norm": 1.2897074222564697, "learning_rate": 0.0005905948498437288, "loss": 3.4928, "step": 48225 }, { "epoch": 3.2769398016034788, "grad_norm": 1.3506159782409668, "learning_rate": 0.000590552384834896, "loss": 3.4931, "step": 48230 }, { "epoch": 3.2772795216741404, "grad_norm": 1.7584439516067505, "learning_rate": 0.0005905099198260634, "loss": 3.4732, "step": 48235 }, { "epoch": 3.2776192417448025, "grad_norm": 1.4813902378082275, "learning_rate": 0.0005904674548172307, "loss": 3.5399, "step": 48240 }, { "epoch": 3.277958961815464, "grad_norm": 1.1761565208435059, "learning_rate": 0.0005904249898083979, "loss": 3.684, "step": 48245 }, { "epoch": 3.2782986818861257, "grad_norm": 1.2213305234909058, "learning_rate": 0.0005903825247995652, "loss": 3.5872, "step": 48250 }, { "epoch": 3.278638401956788, "grad_norm": 1.271187663078308, "learning_rate": 0.0005903400597907324, "loss": 3.353, "step": 48255 }, { "epoch": 3.2789781220274494, "grad_norm": 1.6736210584640503, "learning_rate": 0.0005902975947818997, "loss": 3.416, "step": 48260 }, { "epoch": 3.279317842098111, "grad_norm": 1.3214302062988281, "learning_rate": 0.0005902551297730671, "loss": 3.4546, "step": 48265 }, { "epoch": 3.279657562168773, "grad_norm": 1.176542043685913, "learning_rate": 0.0005902126647642343, "loss": 3.4275, "step": 48270 }, { "epoch": 3.2799972822394348, "grad_norm": 1.6131476163864136, "learning_rate": 0.0005901701997554016, "loss": 3.3379, "step": 48275 }, { "epoch": 3.2803370023100964, "grad_norm": 1.0630357265472412, "learning_rate": 0.0005901277347465689, "loss": 3.5072, "step": 48280 }, { "epoch": 3.2806767223807585, "grad_norm": 1.2987984418869019, "learning_rate": 0.0005900852697377361, "loss": 3.5336, "step": 48285 }, { "epoch": 3.28101644245142, "grad_norm": 1.413846731185913, "learning_rate": 0.0005900428047289033, "loss": 3.41, "step": 48290 }, { "epoch": 3.2813561625220817, "grad_norm": 1.1244173049926758, "learning_rate": 0.0005900003397200707, "loss": 3.5537, "step": 48295 }, { "epoch": 3.281695882592744, "grad_norm": 1.417352557182312, "learning_rate": 0.000589957874711238, "loss": 3.4336, "step": 48300 }, { "epoch": 3.2820356026634054, "grad_norm": 1.4955921173095703, "learning_rate": 0.0005899154097024052, "loss": 3.5367, "step": 48305 }, { "epoch": 3.282375322734067, "grad_norm": 1.3684443235397339, "learning_rate": 0.0005898729446935726, "loss": 3.6059, "step": 48310 }, { "epoch": 3.282715042804729, "grad_norm": 1.335421085357666, "learning_rate": 0.0005898304796847398, "loss": 3.5137, "step": 48315 }, { "epoch": 3.2830547628753908, "grad_norm": 1.521121859550476, "learning_rate": 0.000589788014675907, "loss": 3.4511, "step": 48320 }, { "epoch": 3.2833944829460524, "grad_norm": 1.6544898748397827, "learning_rate": 0.0005897455496670744, "loss": 3.5609, "step": 48325 }, { "epoch": 3.283734203016714, "grad_norm": 1.5359985828399658, "learning_rate": 0.0005897030846582416, "loss": 3.4557, "step": 48330 }, { "epoch": 3.284073923087376, "grad_norm": 1.5002877712249756, "learning_rate": 0.0005896606196494089, "loss": 3.3945, "step": 48335 }, { "epoch": 3.2844136431580377, "grad_norm": 1.1738333702087402, "learning_rate": 0.0005896181546405763, "loss": 3.4722, "step": 48340 }, { "epoch": 3.2847533632286994, "grad_norm": 1.0749974250793457, "learning_rate": 0.0005895756896317435, "loss": 3.1067, "step": 48345 }, { "epoch": 3.2850930832993614, "grad_norm": 1.282315969467163, "learning_rate": 0.0005895332246229107, "loss": 3.4912, "step": 48350 }, { "epoch": 3.285432803370023, "grad_norm": 1.6817805767059326, "learning_rate": 0.000589490759614078, "loss": 3.4265, "step": 48355 }, { "epoch": 3.2857725234406847, "grad_norm": 1.1896024942398071, "learning_rate": 0.0005894482946052453, "loss": 3.3397, "step": 48360 }, { "epoch": 3.286112243511347, "grad_norm": 1.2955886125564575, "learning_rate": 0.0005894058295964125, "loss": 3.4275, "step": 48365 }, { "epoch": 3.2864519635820084, "grad_norm": 1.1819556951522827, "learning_rate": 0.0005893633645875799, "loss": 3.534, "step": 48370 }, { "epoch": 3.28679168365267, "grad_norm": 1.2820907831192017, "learning_rate": 0.0005893208995787472, "loss": 3.5916, "step": 48375 }, { "epoch": 3.287131403723332, "grad_norm": 1.3368865251541138, "learning_rate": 0.0005892784345699144, "loss": 3.6064, "step": 48380 }, { "epoch": 3.2874711237939938, "grad_norm": 1.282663106918335, "learning_rate": 0.0005892359695610817, "loss": 3.4507, "step": 48385 }, { "epoch": 3.2878108438646554, "grad_norm": 3.797450065612793, "learning_rate": 0.000589193504552249, "loss": 3.5688, "step": 48390 }, { "epoch": 3.2881505639353175, "grad_norm": 1.2111940383911133, "learning_rate": 0.0005891510395434162, "loss": 3.5092, "step": 48395 }, { "epoch": 3.288490284005979, "grad_norm": 1.2976163625717163, "learning_rate": 0.0005891085745345835, "loss": 3.5835, "step": 48400 }, { "epoch": 3.2888300040766407, "grad_norm": 1.238445520401001, "learning_rate": 0.0005890661095257508, "loss": 3.8346, "step": 48405 }, { "epoch": 3.289169724147303, "grad_norm": 1.1119384765625, "learning_rate": 0.0005890236445169181, "loss": 3.53, "step": 48410 }, { "epoch": 3.2895094442179644, "grad_norm": 1.2496806383132935, "learning_rate": 0.0005889811795080854, "loss": 3.3743, "step": 48415 }, { "epoch": 3.289849164288626, "grad_norm": 2.0366501808166504, "learning_rate": 0.0005889387144992526, "loss": 3.3178, "step": 48420 }, { "epoch": 3.290188884359288, "grad_norm": 1.2636216878890991, "learning_rate": 0.0005888962494904199, "loss": 3.4889, "step": 48425 }, { "epoch": 3.2905286044299498, "grad_norm": 1.6199607849121094, "learning_rate": 0.0005888537844815872, "loss": 3.5426, "step": 48430 }, { "epoch": 3.2908683245006114, "grad_norm": 1.8234105110168457, "learning_rate": 0.0005888113194727544, "loss": 3.34, "step": 48435 }, { "epoch": 3.291208044571273, "grad_norm": 1.6531118154525757, "learning_rate": 0.0005887688544639217, "loss": 3.7906, "step": 48440 }, { "epoch": 3.291547764641935, "grad_norm": 1.2109034061431885, "learning_rate": 0.0005887263894550891, "loss": 3.8552, "step": 48445 }, { "epoch": 3.2918874847125967, "grad_norm": 1.009165644645691, "learning_rate": 0.0005886839244462563, "loss": 3.7597, "step": 48450 }, { "epoch": 3.2922272047832584, "grad_norm": 1.1823136806488037, "learning_rate": 0.0005886414594374235, "loss": 2.9925, "step": 48455 }, { "epoch": 3.2925669248539204, "grad_norm": 1.3396342992782593, "learning_rate": 0.0005885989944285909, "loss": 3.2255, "step": 48460 }, { "epoch": 3.292906644924582, "grad_norm": 1.4025285243988037, "learning_rate": 0.0005885565294197581, "loss": 3.516, "step": 48465 }, { "epoch": 3.2932463649952437, "grad_norm": 2.6358296871185303, "learning_rate": 0.0005885140644109253, "loss": 3.5974, "step": 48470 }, { "epoch": 3.2935860850659058, "grad_norm": 1.2724387645721436, "learning_rate": 0.0005884715994020928, "loss": 3.401, "step": 48475 }, { "epoch": 3.2939258051365674, "grad_norm": 1.128792405128479, "learning_rate": 0.00058842913439326, "loss": 3.6735, "step": 48480 }, { "epoch": 3.294265525207229, "grad_norm": 1.3546782732009888, "learning_rate": 0.0005883866693844272, "loss": 3.3891, "step": 48485 }, { "epoch": 3.294605245277891, "grad_norm": 1.4907453060150146, "learning_rate": 0.0005883442043755945, "loss": 3.5447, "step": 48490 }, { "epoch": 3.2949449653485527, "grad_norm": 1.4257959127426147, "learning_rate": 0.0005883017393667618, "loss": 3.4928, "step": 48495 }, { "epoch": 3.2952846854192144, "grad_norm": 1.3082557916641235, "learning_rate": 0.000588259274357929, "loss": 3.5069, "step": 48500 }, { "epoch": 3.2956244054898765, "grad_norm": 1.2833805084228516, "learning_rate": 0.0005882168093490963, "loss": 3.6214, "step": 48505 }, { "epoch": 3.295964125560538, "grad_norm": 1.2484060525894165, "learning_rate": 0.0005881743443402637, "loss": 3.4177, "step": 48510 }, { "epoch": 3.2963038456311997, "grad_norm": 1.5732959508895874, "learning_rate": 0.0005881318793314309, "loss": 3.5219, "step": 48515 }, { "epoch": 3.296643565701862, "grad_norm": 1.1919538974761963, "learning_rate": 0.0005880894143225982, "loss": 3.3583, "step": 48520 }, { "epoch": 3.2969832857725234, "grad_norm": 1.4595067501068115, "learning_rate": 0.0005880469493137655, "loss": 3.3306, "step": 48525 }, { "epoch": 3.297323005843185, "grad_norm": 1.5400938987731934, "learning_rate": 0.0005880044843049327, "loss": 3.3081, "step": 48530 }, { "epoch": 3.297662725913847, "grad_norm": 3.732665777206421, "learning_rate": 0.0005879620192961, "loss": 3.1756, "step": 48535 }, { "epoch": 3.2980024459845088, "grad_norm": 1.543073296546936, "learning_rate": 0.0005879195542872672, "loss": 3.1566, "step": 48540 }, { "epoch": 3.2983421660551704, "grad_norm": 1.5467396974563599, "learning_rate": 0.0005878770892784346, "loss": 3.5792, "step": 48545 }, { "epoch": 3.2986818861258325, "grad_norm": 1.7026842832565308, "learning_rate": 0.0005878346242696019, "loss": 3.4143, "step": 48550 }, { "epoch": 3.299021606196494, "grad_norm": 1.423871636390686, "learning_rate": 0.0005877921592607691, "loss": 3.5852, "step": 48555 }, { "epoch": 3.2993613262671557, "grad_norm": 1.33280348777771, "learning_rate": 0.0005877496942519364, "loss": 3.5767, "step": 48560 }, { "epoch": 3.299701046337818, "grad_norm": 1.495176911354065, "learning_rate": 0.0005877072292431037, "loss": 3.3007, "step": 48565 }, { "epoch": 3.3000407664084794, "grad_norm": 1.1021846532821655, "learning_rate": 0.0005876647642342709, "loss": 3.5722, "step": 48570 }, { "epoch": 3.300380486479141, "grad_norm": 1.593564748764038, "learning_rate": 0.0005876222992254383, "loss": 3.5375, "step": 48575 }, { "epoch": 3.300720206549803, "grad_norm": 1.1986948251724243, "learning_rate": 0.0005875798342166056, "loss": 3.5906, "step": 48580 }, { "epoch": 3.3010599266204648, "grad_norm": 1.252508282661438, "learning_rate": 0.0005875373692077728, "loss": 3.6122, "step": 48585 }, { "epoch": 3.3013996466911264, "grad_norm": 1.785951018333435, "learning_rate": 0.0005874949041989402, "loss": 3.3273, "step": 48590 }, { "epoch": 3.3017393667617885, "grad_norm": 1.2907487154006958, "learning_rate": 0.0005874524391901074, "loss": 3.5878, "step": 48595 }, { "epoch": 3.30207908683245, "grad_norm": 2.1094424724578857, "learning_rate": 0.0005874099741812746, "loss": 3.4573, "step": 48600 }, { "epoch": 3.3024188069031117, "grad_norm": 1.7427362203598022, "learning_rate": 0.0005873675091724419, "loss": 3.2887, "step": 48605 }, { "epoch": 3.302758526973774, "grad_norm": 1.3608622550964355, "learning_rate": 0.0005873250441636092, "loss": 3.6277, "step": 48610 }, { "epoch": 3.3030982470444354, "grad_norm": 1.3323249816894531, "learning_rate": 0.0005872825791547765, "loss": 3.8036, "step": 48615 }, { "epoch": 3.303437967115097, "grad_norm": 1.2884801626205444, "learning_rate": 0.0005872401141459438, "loss": 3.5358, "step": 48620 }, { "epoch": 3.303777687185759, "grad_norm": 1.7176672220230103, "learning_rate": 0.0005871976491371111, "loss": 3.3144, "step": 48625 }, { "epoch": 3.304117407256421, "grad_norm": 1.387609839439392, "learning_rate": 0.0005871551841282783, "loss": 3.3604, "step": 48630 }, { "epoch": 3.3044571273270824, "grad_norm": 1.2650259733200073, "learning_rate": 0.0005871127191194456, "loss": 3.6686, "step": 48635 }, { "epoch": 3.3047968473977445, "grad_norm": 4.263481616973877, "learning_rate": 0.0005870702541106128, "loss": 3.6432, "step": 48640 }, { "epoch": 3.305136567468406, "grad_norm": 4.300159454345703, "learning_rate": 0.0005870277891017801, "loss": 3.5526, "step": 48645 }, { "epoch": 3.3054762875390677, "grad_norm": 1.1295640468597412, "learning_rate": 0.0005869853240929475, "loss": 3.5841, "step": 48650 }, { "epoch": 3.30581600760973, "grad_norm": 1.1185283660888672, "learning_rate": 0.0005869428590841147, "loss": 3.4437, "step": 48655 }, { "epoch": 3.3061557276803915, "grad_norm": 1.1295561790466309, "learning_rate": 0.000586900394075282, "loss": 3.7423, "step": 48660 }, { "epoch": 3.306495447751053, "grad_norm": 1.2924562692642212, "learning_rate": 0.0005868579290664493, "loss": 3.4097, "step": 48665 }, { "epoch": 3.3068351678217147, "grad_norm": 1.722095012664795, "learning_rate": 0.0005868154640576165, "loss": 3.3942, "step": 48670 }, { "epoch": 3.307174887892377, "grad_norm": 1.2586289644241333, "learning_rate": 0.0005867729990487837, "loss": 3.43, "step": 48675 }, { "epoch": 3.3075146079630384, "grad_norm": 1.3625694513320923, "learning_rate": 0.0005867305340399511, "loss": 3.2866, "step": 48680 }, { "epoch": 3.3078543280337, "grad_norm": 1.5432337522506714, "learning_rate": 0.0005866880690311184, "loss": 3.2212, "step": 48685 }, { "epoch": 3.308194048104362, "grad_norm": 1.3263615369796753, "learning_rate": 0.0005866456040222856, "loss": 3.2508, "step": 48690 }, { "epoch": 3.3085337681750238, "grad_norm": 1.3339166641235352, "learning_rate": 0.000586603139013453, "loss": 3.4878, "step": 48695 }, { "epoch": 3.3088734882456854, "grad_norm": 1.3707398176193237, "learning_rate": 0.0005865606740046202, "loss": 3.6129, "step": 48700 }, { "epoch": 3.3092132083163475, "grad_norm": 1.1807820796966553, "learning_rate": 0.0005865182089957874, "loss": 3.3854, "step": 48705 }, { "epoch": 3.309552928387009, "grad_norm": 1.3584933280944824, "learning_rate": 0.0005864757439869548, "loss": 3.3166, "step": 48710 }, { "epoch": 3.3098926484576707, "grad_norm": 1.3195980787277222, "learning_rate": 0.000586433278978122, "loss": 3.4253, "step": 48715 }, { "epoch": 3.310232368528333, "grad_norm": 1.215102195739746, "learning_rate": 0.0005863908139692893, "loss": 3.2587, "step": 48720 }, { "epoch": 3.3105720885989944, "grad_norm": 1.6812621355056763, "learning_rate": 0.0005863483489604567, "loss": 3.5117, "step": 48725 }, { "epoch": 3.310911808669656, "grad_norm": 1.9798179864883423, "learning_rate": 0.0005863058839516239, "loss": 3.4569, "step": 48730 }, { "epoch": 3.311251528740318, "grad_norm": 1.3727452754974365, "learning_rate": 0.0005862634189427911, "loss": 3.3006, "step": 48735 }, { "epoch": 3.3115912488109798, "grad_norm": 1.3132469654083252, "learning_rate": 0.0005862209539339584, "loss": 3.5613, "step": 48740 }, { "epoch": 3.3119309688816414, "grad_norm": 1.636205792427063, "learning_rate": 0.0005861784889251257, "loss": 3.3008, "step": 48745 }, { "epoch": 3.3122706889523035, "grad_norm": 1.2766011953353882, "learning_rate": 0.0005861360239162929, "loss": 3.7275, "step": 48750 }, { "epoch": 3.312610409022965, "grad_norm": 2.219165086746216, "learning_rate": 0.0005860935589074603, "loss": 3.2957, "step": 48755 }, { "epoch": 3.3129501290936267, "grad_norm": 1.2018388509750366, "learning_rate": 0.0005860510938986276, "loss": 3.4018, "step": 48760 }, { "epoch": 3.313289849164289, "grad_norm": 1.3991329669952393, "learning_rate": 0.0005860086288897948, "loss": 3.5295, "step": 48765 }, { "epoch": 3.3136295692349504, "grad_norm": 1.607043981552124, "learning_rate": 0.0005859661638809621, "loss": 3.3525, "step": 48770 }, { "epoch": 3.313969289305612, "grad_norm": 1.0122644901275635, "learning_rate": 0.0005859236988721294, "loss": 3.3851, "step": 48775 }, { "epoch": 3.3143090093762737, "grad_norm": 1.389603614807129, "learning_rate": 0.0005858812338632966, "loss": 3.4655, "step": 48780 }, { "epoch": 3.314648729446936, "grad_norm": 1.355149269104004, "learning_rate": 0.000585838768854464, "loss": 3.4923, "step": 48785 }, { "epoch": 3.3149884495175974, "grad_norm": 1.282522439956665, "learning_rate": 0.0005857963038456312, "loss": 3.5562, "step": 48790 }, { "epoch": 3.315328169588259, "grad_norm": 1.520403265953064, "learning_rate": 0.0005857538388367985, "loss": 3.628, "step": 48795 }, { "epoch": 3.315667889658921, "grad_norm": 1.4155336618423462, "learning_rate": 0.0005857113738279658, "loss": 3.406, "step": 48800 }, { "epoch": 3.3160076097295828, "grad_norm": 1.3432660102844238, "learning_rate": 0.000585668908819133, "loss": 3.5222, "step": 48805 }, { "epoch": 3.3163473298002444, "grad_norm": 1.1630878448486328, "learning_rate": 0.0005856264438103003, "loss": 3.4094, "step": 48810 }, { "epoch": 3.3166870498709065, "grad_norm": 1.8767386674880981, "learning_rate": 0.0005855839788014676, "loss": 3.4588, "step": 48815 }, { "epoch": 3.317026769941568, "grad_norm": 1.6948953866958618, "learning_rate": 0.0005855415137926349, "loss": 3.4108, "step": 48820 }, { "epoch": 3.3173664900122297, "grad_norm": 1.768096923828125, "learning_rate": 0.0005854990487838022, "loss": 3.5452, "step": 48825 }, { "epoch": 3.317706210082892, "grad_norm": 1.3505363464355469, "learning_rate": 0.0005854565837749695, "loss": 3.3411, "step": 48830 }, { "epoch": 3.3180459301535534, "grad_norm": 1.4504464864730835, "learning_rate": 0.0005854141187661367, "loss": 3.7371, "step": 48835 }, { "epoch": 3.318385650224215, "grad_norm": 2.3292739391326904, "learning_rate": 0.0005853716537573039, "loss": 3.5344, "step": 48840 }, { "epoch": 3.318725370294877, "grad_norm": 1.5693823099136353, "learning_rate": 0.0005853291887484713, "loss": 3.6813, "step": 48845 }, { "epoch": 3.3190650903655388, "grad_norm": 1.4378775358200073, "learning_rate": 0.0005852867237396385, "loss": 3.1644, "step": 48850 }, { "epoch": 3.3194048104362004, "grad_norm": 1.2388678789138794, "learning_rate": 0.0005852442587308058, "loss": 3.6225, "step": 48855 }, { "epoch": 3.3197445305068625, "grad_norm": 1.2446426153182983, "learning_rate": 0.0005852017937219732, "loss": 3.1052, "step": 48860 }, { "epoch": 3.320084250577524, "grad_norm": 1.6199434995651245, "learning_rate": 0.0005851593287131404, "loss": 3.5387, "step": 48865 }, { "epoch": 3.3204239706481857, "grad_norm": 2.0801451206207275, "learning_rate": 0.0005851168637043076, "loss": 3.6806, "step": 48870 }, { "epoch": 3.320763690718848, "grad_norm": 1.2091425657272339, "learning_rate": 0.000585074398695475, "loss": 3.3872, "step": 48875 }, { "epoch": 3.3211034107895094, "grad_norm": 1.485683798789978, "learning_rate": 0.0005850319336866422, "loss": 3.47, "step": 48880 }, { "epoch": 3.321443130860171, "grad_norm": 1.8656775951385498, "learning_rate": 0.0005849894686778094, "loss": 3.3986, "step": 48885 }, { "epoch": 3.321782850930833, "grad_norm": 1.796586036682129, "learning_rate": 0.0005849470036689768, "loss": 3.4735, "step": 48890 }, { "epoch": 3.3221225710014948, "grad_norm": 1.3768655061721802, "learning_rate": 0.0005849045386601441, "loss": 3.591, "step": 48895 }, { "epoch": 3.3224622910721564, "grad_norm": 1.125137209892273, "learning_rate": 0.0005848620736513113, "loss": 3.6135, "step": 48900 }, { "epoch": 3.3228020111428185, "grad_norm": 1.6124951839447021, "learning_rate": 0.0005848196086424786, "loss": 3.4453, "step": 48905 }, { "epoch": 3.32314173121348, "grad_norm": 1.5574727058410645, "learning_rate": 0.0005847771436336459, "loss": 3.8112, "step": 48910 }, { "epoch": 3.3234814512841417, "grad_norm": 1.4569141864776611, "learning_rate": 0.0005847346786248132, "loss": 3.5255, "step": 48915 }, { "epoch": 3.323821171354804, "grad_norm": 1.4176244735717773, "learning_rate": 0.0005846922136159804, "loss": 3.3241, "step": 48920 }, { "epoch": 3.3241608914254654, "grad_norm": 1.3423504829406738, "learning_rate": 0.0005846497486071478, "loss": 3.5921, "step": 48925 }, { "epoch": 3.324500611496127, "grad_norm": 1.3632335662841797, "learning_rate": 0.0005846072835983151, "loss": 3.4915, "step": 48930 }, { "epoch": 3.324840331566789, "grad_norm": 1.2899681329727173, "learning_rate": 0.0005845648185894823, "loss": 3.5218, "step": 48935 }, { "epoch": 3.325180051637451, "grad_norm": 1.3518503904342651, "learning_rate": 0.0005845223535806495, "loss": 3.4335, "step": 48940 }, { "epoch": 3.3255197717081124, "grad_norm": 1.3149631023406982, "learning_rate": 0.0005844798885718169, "loss": 3.4889, "step": 48945 }, { "epoch": 3.3258594917787745, "grad_norm": 1.5700109004974365, "learning_rate": 0.0005844374235629841, "loss": 3.5924, "step": 48950 }, { "epoch": 3.326199211849436, "grad_norm": 1.1803669929504395, "learning_rate": 0.0005843949585541513, "loss": 3.5199, "step": 48955 }, { "epoch": 3.3265389319200978, "grad_norm": 1.2831382751464844, "learning_rate": 0.0005843524935453188, "loss": 3.4831, "step": 48960 }, { "epoch": 3.32687865199076, "grad_norm": 1.2673559188842773, "learning_rate": 0.000584310028536486, "loss": 3.7451, "step": 48965 }, { "epoch": 3.3272183720614215, "grad_norm": 1.3631199598312378, "learning_rate": 0.0005842675635276532, "loss": 3.529, "step": 48970 }, { "epoch": 3.327558092132083, "grad_norm": 1.3399912118911743, "learning_rate": 0.0005842250985188206, "loss": 3.2209, "step": 48975 }, { "epoch": 3.327897812202745, "grad_norm": 1.2308913469314575, "learning_rate": 0.0005841826335099878, "loss": 3.4606, "step": 48980 }, { "epoch": 3.328237532273407, "grad_norm": 1.3003915548324585, "learning_rate": 0.000584140168501155, "loss": 3.3944, "step": 48985 }, { "epoch": 3.3285772523440684, "grad_norm": 1.4341731071472168, "learning_rate": 0.0005840977034923223, "loss": 3.3802, "step": 48990 }, { "epoch": 3.3289169724147305, "grad_norm": 1.7571738958358765, "learning_rate": 0.0005840552384834897, "loss": 3.4624, "step": 48995 }, { "epoch": 3.329256692485392, "grad_norm": 1.5109615325927734, "learning_rate": 0.0005840127734746569, "loss": 3.6282, "step": 49000 }, { "epoch": 3.3295964125560538, "grad_norm": 1.504292368888855, "learning_rate": 0.0005839703084658242, "loss": 3.3948, "step": 49005 }, { "epoch": 3.3299361326267154, "grad_norm": 1.4977275133132935, "learning_rate": 0.0005839278434569915, "loss": 3.5161, "step": 49010 }, { "epoch": 3.3302758526973775, "grad_norm": 1.960276484489441, "learning_rate": 0.0005838853784481587, "loss": 3.367, "step": 49015 }, { "epoch": 3.330615572768039, "grad_norm": 1.4389710426330566, "learning_rate": 0.000583842913439326, "loss": 3.3962, "step": 49020 }, { "epoch": 3.3309552928387007, "grad_norm": 1.3485757112503052, "learning_rate": 0.0005838004484304932, "loss": 3.6949, "step": 49025 }, { "epoch": 3.331295012909363, "grad_norm": 1.4561222791671753, "learning_rate": 0.0005837579834216606, "loss": 3.6297, "step": 49030 }, { "epoch": 3.3316347329800244, "grad_norm": 1.2042148113250732, "learning_rate": 0.0005837155184128279, "loss": 3.6411, "step": 49035 }, { "epoch": 3.331974453050686, "grad_norm": 1.272091269493103, "learning_rate": 0.0005836730534039951, "loss": 3.7274, "step": 49040 }, { "epoch": 3.332314173121348, "grad_norm": 1.0369913578033447, "learning_rate": 0.0005836305883951624, "loss": 3.3658, "step": 49045 }, { "epoch": 3.3326538931920098, "grad_norm": 1.1803821325302124, "learning_rate": 0.0005835881233863297, "loss": 3.6338, "step": 49050 }, { "epoch": 3.3329936132626714, "grad_norm": 1.0046331882476807, "learning_rate": 0.0005835456583774969, "loss": 3.5681, "step": 49055 }, { "epoch": 3.3333333333333335, "grad_norm": 1.1357439756393433, "learning_rate": 0.0005835031933686642, "loss": 3.6412, "step": 49060 }, { "epoch": 3.333673053403995, "grad_norm": 1.3574005365371704, "learning_rate": 0.0005834607283598316, "loss": 3.0511, "step": 49065 }, { "epoch": 3.3340127734746567, "grad_norm": 1.498302936553955, "learning_rate": 0.0005834182633509988, "loss": 3.1825, "step": 49070 }, { "epoch": 3.334352493545319, "grad_norm": 1.214086890220642, "learning_rate": 0.000583375798342166, "loss": 3.5172, "step": 49075 }, { "epoch": 3.3346922136159804, "grad_norm": 1.3811973333358765, "learning_rate": 0.0005833333333333334, "loss": 3.3933, "step": 49080 }, { "epoch": 3.335031933686642, "grad_norm": 1.1062541007995605, "learning_rate": 0.0005832908683245006, "loss": 3.4765, "step": 49085 }, { "epoch": 3.335371653757304, "grad_norm": 1.2791482210159302, "learning_rate": 0.0005832484033156678, "loss": 3.2764, "step": 49090 }, { "epoch": 3.335711373827966, "grad_norm": 1.4721044301986694, "learning_rate": 0.0005832059383068352, "loss": 3.6869, "step": 49095 }, { "epoch": 3.3360510938986274, "grad_norm": 1.17406165599823, "learning_rate": 0.0005831634732980025, "loss": 3.4781, "step": 49100 }, { "epoch": 3.3363908139692895, "grad_norm": 1.3900190591812134, "learning_rate": 0.0005831210082891697, "loss": 3.3047, "step": 49105 }, { "epoch": 3.336730534039951, "grad_norm": 1.2703557014465332, "learning_rate": 0.0005830785432803371, "loss": 3.767, "step": 49110 }, { "epoch": 3.3370702541106128, "grad_norm": 1.3954803943634033, "learning_rate": 0.0005830360782715043, "loss": 3.3996, "step": 49115 }, { "epoch": 3.3374099741812744, "grad_norm": 1.2941951751708984, "learning_rate": 0.0005829936132626715, "loss": 3.3432, "step": 49120 }, { "epoch": 3.3377496942519365, "grad_norm": 1.4783236980438232, "learning_rate": 0.0005829511482538388, "loss": 3.4549, "step": 49125 }, { "epoch": 3.338089414322598, "grad_norm": 1.2451578378677368, "learning_rate": 0.0005829086832450061, "loss": 3.5646, "step": 49130 }, { "epoch": 3.3384291343932597, "grad_norm": 1.076136827468872, "learning_rate": 0.0005828662182361734, "loss": 3.4743, "step": 49135 }, { "epoch": 3.338768854463922, "grad_norm": 1.6516225337982178, "learning_rate": 0.0005828237532273407, "loss": 3.8026, "step": 49140 }, { "epoch": 3.3391085745345834, "grad_norm": 1.571225881576538, "learning_rate": 0.000582781288218508, "loss": 3.6589, "step": 49145 }, { "epoch": 3.339448294605245, "grad_norm": 1.0237231254577637, "learning_rate": 0.0005827388232096752, "loss": 3.5414, "step": 49150 }, { "epoch": 3.339788014675907, "grad_norm": 1.6344808340072632, "learning_rate": 0.0005826963582008425, "loss": 3.324, "step": 49155 }, { "epoch": 3.3401277347465688, "grad_norm": 1.4603301286697388, "learning_rate": 0.0005826538931920098, "loss": 3.3689, "step": 49160 }, { "epoch": 3.3404674548172304, "grad_norm": 1.517194151878357, "learning_rate": 0.000582611428183177, "loss": 3.4085, "step": 49165 }, { "epoch": 3.3408071748878925, "grad_norm": 1.2349120378494263, "learning_rate": 0.0005825689631743444, "loss": 3.4742, "step": 49170 }, { "epoch": 3.341146894958554, "grad_norm": 1.403501033782959, "learning_rate": 0.0005825264981655116, "loss": 3.4262, "step": 49175 }, { "epoch": 3.3414866150292157, "grad_norm": 1.6799575090408325, "learning_rate": 0.0005824840331566789, "loss": 3.2948, "step": 49180 }, { "epoch": 3.341826335099878, "grad_norm": 1.4237301349639893, "learning_rate": 0.0005824415681478462, "loss": 3.099, "step": 49185 }, { "epoch": 3.3421660551705394, "grad_norm": 1.1953555345535278, "learning_rate": 0.0005823991031390134, "loss": 3.298, "step": 49190 }, { "epoch": 3.342505775241201, "grad_norm": 2.0887720584869385, "learning_rate": 0.0005823566381301807, "loss": 3.3452, "step": 49195 }, { "epoch": 3.342845495311863, "grad_norm": 1.9556978940963745, "learning_rate": 0.000582314173121348, "loss": 3.5189, "step": 49200 }, { "epoch": 3.3431852153825248, "grad_norm": 1.2659317255020142, "learning_rate": 0.0005822717081125153, "loss": 3.7177, "step": 49205 }, { "epoch": 3.3435249354531864, "grad_norm": 1.7703948020935059, "learning_rate": 0.0005822292431036826, "loss": 3.5635, "step": 49210 }, { "epoch": 3.3438646555238485, "grad_norm": 1.7683275938034058, "learning_rate": 0.0005821867780948499, "loss": 3.4645, "step": 49215 }, { "epoch": 3.34420437559451, "grad_norm": 1.3531498908996582, "learning_rate": 0.0005821443130860171, "loss": 3.3893, "step": 49220 }, { "epoch": 3.3445440956651717, "grad_norm": 1.3349528312683105, "learning_rate": 0.0005821018480771843, "loss": 3.366, "step": 49225 }, { "epoch": 3.344883815735834, "grad_norm": 1.2317914962768555, "learning_rate": 0.0005820593830683517, "loss": 3.6348, "step": 49230 }, { "epoch": 3.3452235358064955, "grad_norm": 0.980986475944519, "learning_rate": 0.0005820169180595189, "loss": 3.1764, "step": 49235 }, { "epoch": 3.345563255877157, "grad_norm": 1.3474690914154053, "learning_rate": 0.0005819744530506862, "loss": 3.5096, "step": 49240 }, { "epoch": 3.345902975947819, "grad_norm": 1.1605812311172485, "learning_rate": 0.0005819319880418536, "loss": 3.5189, "step": 49245 }, { "epoch": 3.346242696018481, "grad_norm": 1.6696264743804932, "learning_rate": 0.0005818895230330208, "loss": 3.5059, "step": 49250 }, { "epoch": 3.3465824160891424, "grad_norm": 1.967461347579956, "learning_rate": 0.0005818470580241881, "loss": 3.5231, "step": 49255 }, { "epoch": 3.3469221361598045, "grad_norm": 1.3448500633239746, "learning_rate": 0.0005818045930153554, "loss": 3.5931, "step": 49260 }, { "epoch": 3.347261856230466, "grad_norm": 1.5590660572052002, "learning_rate": 0.0005817621280065226, "loss": 3.5313, "step": 49265 }, { "epoch": 3.3476015763011278, "grad_norm": 1.4017163515090942, "learning_rate": 0.0005817196629976899, "loss": 3.6708, "step": 49270 }, { "epoch": 3.34794129637179, "grad_norm": 1.1571223735809326, "learning_rate": 0.0005816771979888573, "loss": 3.5744, "step": 49275 }, { "epoch": 3.3482810164424515, "grad_norm": 1.519112467765808, "learning_rate": 0.0005816347329800245, "loss": 3.5942, "step": 49280 }, { "epoch": 3.348620736513113, "grad_norm": 1.1673845052719116, "learning_rate": 0.0005815922679711918, "loss": 3.5847, "step": 49285 }, { "epoch": 3.348960456583775, "grad_norm": 1.3213101625442505, "learning_rate": 0.000581549802962359, "loss": 3.5532, "step": 49290 }, { "epoch": 3.349300176654437, "grad_norm": 1.1744813919067383, "learning_rate": 0.0005815073379535263, "loss": 3.5706, "step": 49295 }, { "epoch": 3.3496398967250984, "grad_norm": 1.1051485538482666, "learning_rate": 0.0005814648729446936, "loss": 3.3722, "step": 49300 }, { "epoch": 3.3499796167957605, "grad_norm": 1.620341181755066, "learning_rate": 0.0005814224079358608, "loss": 3.4151, "step": 49305 }, { "epoch": 3.350319336866422, "grad_norm": 1.2523939609527588, "learning_rate": 0.0005813799429270282, "loss": 3.4691, "step": 49310 }, { "epoch": 3.3506590569370838, "grad_norm": 1.1067029237747192, "learning_rate": 0.0005813374779181955, "loss": 3.7153, "step": 49315 }, { "epoch": 3.350998777007746, "grad_norm": 2.6852619647979736, "learning_rate": 0.0005812950129093627, "loss": 3.6208, "step": 49320 }, { "epoch": 3.3513384970784075, "grad_norm": 1.4289439916610718, "learning_rate": 0.0005812525479005299, "loss": 3.3711, "step": 49325 }, { "epoch": 3.351678217149069, "grad_norm": 1.4255765676498413, "learning_rate": 0.0005812100828916973, "loss": 3.3233, "step": 49330 }, { "epoch": 3.352017937219731, "grad_norm": 1.6039526462554932, "learning_rate": 0.0005811676178828645, "loss": 3.3989, "step": 49335 }, { "epoch": 3.352357657290393, "grad_norm": 1.1862438917160034, "learning_rate": 0.0005811251528740317, "loss": 3.4853, "step": 49340 }, { "epoch": 3.3526973773610544, "grad_norm": 1.493468999862671, "learning_rate": 0.0005810826878651992, "loss": 3.335, "step": 49345 }, { "epoch": 3.353037097431716, "grad_norm": 1.234584927558899, "learning_rate": 0.0005810402228563664, "loss": 3.3502, "step": 49350 }, { "epoch": 3.353376817502378, "grad_norm": 1.3903003931045532, "learning_rate": 0.0005809977578475336, "loss": 3.1858, "step": 49355 }, { "epoch": 3.35371653757304, "grad_norm": 1.5706448554992676, "learning_rate": 0.000580955292838701, "loss": 3.3608, "step": 49360 }, { "epoch": 3.3540562576437014, "grad_norm": 1.3372297286987305, "learning_rate": 0.0005809128278298682, "loss": 3.444, "step": 49365 }, { "epoch": 3.3543959777143635, "grad_norm": 1.125083327293396, "learning_rate": 0.0005808703628210354, "loss": 3.3687, "step": 49370 }, { "epoch": 3.354735697785025, "grad_norm": 1.2887418270111084, "learning_rate": 0.0005808278978122029, "loss": 3.3232, "step": 49375 }, { "epoch": 3.3550754178556867, "grad_norm": 1.3141906261444092, "learning_rate": 0.0005807854328033701, "loss": 3.4467, "step": 49380 }, { "epoch": 3.355415137926349, "grad_norm": 1.7114887237548828, "learning_rate": 0.0005807429677945373, "loss": 3.5322, "step": 49385 }, { "epoch": 3.3557548579970105, "grad_norm": 1.0408744812011719, "learning_rate": 0.0005807005027857046, "loss": 3.2522, "step": 49390 }, { "epoch": 3.356094578067672, "grad_norm": 1.3734198808670044, "learning_rate": 0.0005806580377768719, "loss": 3.6038, "step": 49395 }, { "epoch": 3.356434298138334, "grad_norm": 1.28118097782135, "learning_rate": 0.0005806155727680391, "loss": 3.588, "step": 49400 }, { "epoch": 3.356774018208996, "grad_norm": 1.1552209854125977, "learning_rate": 0.0005805731077592064, "loss": 3.4091, "step": 49405 }, { "epoch": 3.3571137382796574, "grad_norm": 1.2772817611694336, "learning_rate": 0.0005805306427503738, "loss": 3.7707, "step": 49410 }, { "epoch": 3.3574534583503195, "grad_norm": 1.0861116647720337, "learning_rate": 0.000580488177741541, "loss": 3.324, "step": 49415 }, { "epoch": 3.357793178420981, "grad_norm": 1.0531505346298218, "learning_rate": 0.0005804457127327083, "loss": 3.4465, "step": 49420 }, { "epoch": 3.3581328984916428, "grad_norm": 1.1892008781433105, "learning_rate": 0.0005804032477238755, "loss": 3.5172, "step": 49425 }, { "epoch": 3.358472618562305, "grad_norm": 1.1194803714752197, "learning_rate": 0.0005803607827150428, "loss": 3.4146, "step": 49430 }, { "epoch": 3.3588123386329665, "grad_norm": 1.2785807847976685, "learning_rate": 0.0005803183177062101, "loss": 3.1038, "step": 49435 }, { "epoch": 3.359152058703628, "grad_norm": 1.5909788608551025, "learning_rate": 0.0005802758526973773, "loss": 3.1455, "step": 49440 }, { "epoch": 3.35949177877429, "grad_norm": 1.3700419664382935, "learning_rate": 0.0005802333876885447, "loss": 3.1748, "step": 49445 }, { "epoch": 3.359831498844952, "grad_norm": 1.291382074356079, "learning_rate": 0.000580190922679712, "loss": 3.2511, "step": 49450 }, { "epoch": 3.3601712189156134, "grad_norm": 1.906327724456787, "learning_rate": 0.0005801484576708792, "loss": 3.7828, "step": 49455 }, { "epoch": 3.360510938986275, "grad_norm": 1.177183747291565, "learning_rate": 0.0005801059926620465, "loss": 3.4474, "step": 49460 }, { "epoch": 3.360850659056937, "grad_norm": 1.403319001197815, "learning_rate": 0.0005800635276532138, "loss": 3.2316, "step": 49465 }, { "epoch": 3.3611903791275988, "grad_norm": 1.5222511291503906, "learning_rate": 0.000580021062644381, "loss": 3.5268, "step": 49470 }, { "epoch": 3.3615300991982604, "grad_norm": 1.2257018089294434, "learning_rate": 0.0005799785976355482, "loss": 3.4787, "step": 49475 }, { "epoch": 3.3618698192689225, "grad_norm": 1.9388865232467651, "learning_rate": 0.0005799361326267157, "loss": 3.5214, "step": 49480 }, { "epoch": 3.362209539339584, "grad_norm": 1.4119316339492798, "learning_rate": 0.0005798936676178829, "loss": 3.314, "step": 49485 }, { "epoch": 3.3625492594102457, "grad_norm": 1.3000588417053223, "learning_rate": 0.0005798512026090501, "loss": 3.5215, "step": 49490 }, { "epoch": 3.362888979480908, "grad_norm": 1.5915114879608154, "learning_rate": 0.0005798087376002175, "loss": 3.58, "step": 49495 }, { "epoch": 3.3632286995515694, "grad_norm": 1.6013081073760986, "learning_rate": 0.0005797662725913847, "loss": 3.5233, "step": 49500 }, { "epoch": 3.363568419622231, "grad_norm": 1.5125396251678467, "learning_rate": 0.0005797238075825519, "loss": 3.2916, "step": 49505 }, { "epoch": 3.363908139692893, "grad_norm": 1.4448820352554321, "learning_rate": 0.0005796813425737193, "loss": 3.3954, "step": 49510 }, { "epoch": 3.364247859763555, "grad_norm": 1.516122817993164, "learning_rate": 0.0005796388775648866, "loss": 3.7596, "step": 49515 }, { "epoch": 3.3645875798342164, "grad_norm": 1.5285632610321045, "learning_rate": 0.0005795964125560538, "loss": 3.2849, "step": 49520 }, { "epoch": 3.3649272999048785, "grad_norm": 1.3887360095977783, "learning_rate": 0.0005795539475472211, "loss": 3.5314, "step": 49525 }, { "epoch": 3.36526701997554, "grad_norm": 1.729385495185852, "learning_rate": 0.0005795114825383884, "loss": 3.5805, "step": 49530 }, { "epoch": 3.3656067400462018, "grad_norm": 1.3480879068374634, "learning_rate": 0.0005794690175295556, "loss": 3.7927, "step": 49535 }, { "epoch": 3.365946460116864, "grad_norm": 1.3031684160232544, "learning_rate": 0.0005794265525207229, "loss": 3.4595, "step": 49540 }, { "epoch": 3.3662861801875255, "grad_norm": 1.3340215682983398, "learning_rate": 0.0005793840875118902, "loss": 3.522, "step": 49545 }, { "epoch": 3.366625900258187, "grad_norm": 1.075974464416504, "learning_rate": 0.0005793416225030575, "loss": 3.5761, "step": 49550 }, { "epoch": 3.366965620328849, "grad_norm": 1.561795949935913, "learning_rate": 0.0005792991574942248, "loss": 3.6186, "step": 49555 }, { "epoch": 3.367305340399511, "grad_norm": 1.321130633354187, "learning_rate": 0.000579256692485392, "loss": 3.1578, "step": 49560 }, { "epoch": 3.3676450604701724, "grad_norm": 1.5089776515960693, "learning_rate": 0.0005792142274765593, "loss": 3.4233, "step": 49565 }, { "epoch": 3.3679847805408345, "grad_norm": 1.7223379611968994, "learning_rate": 0.0005791717624677266, "loss": 3.5736, "step": 49570 }, { "epoch": 3.368324500611496, "grad_norm": 1.290091633796692, "learning_rate": 0.0005791292974588938, "loss": 3.3903, "step": 49575 }, { "epoch": 3.3686642206821578, "grad_norm": 1.867588758468628, "learning_rate": 0.0005790868324500611, "loss": 3.5309, "step": 49580 }, { "epoch": 3.36900394075282, "grad_norm": 1.8017112016677856, "learning_rate": 0.0005790443674412285, "loss": 3.2019, "step": 49585 }, { "epoch": 3.3693436608234815, "grad_norm": 0.9890075325965881, "learning_rate": 0.0005790019024323957, "loss": 3.4333, "step": 49590 }, { "epoch": 3.369683380894143, "grad_norm": 1.373399019241333, "learning_rate": 0.0005789594374235631, "loss": 3.4979, "step": 49595 }, { "epoch": 3.370023100964805, "grad_norm": 1.2951689958572388, "learning_rate": 0.0005789169724147303, "loss": 3.5025, "step": 49600 }, { "epoch": 3.370362821035467, "grad_norm": 1.2728087902069092, "learning_rate": 0.0005788745074058975, "loss": 3.5118, "step": 49605 }, { "epoch": 3.3707025411061284, "grad_norm": 1.149710774421692, "learning_rate": 0.0005788320423970649, "loss": 3.5332, "step": 49610 }, { "epoch": 3.3710422611767905, "grad_norm": 1.0566720962524414, "learning_rate": 0.0005787895773882321, "loss": 3.2417, "step": 49615 }, { "epoch": 3.371381981247452, "grad_norm": 1.35402512550354, "learning_rate": 0.0005787471123793994, "loss": 3.4364, "step": 49620 }, { "epoch": 3.3717217013181138, "grad_norm": 1.8102084398269653, "learning_rate": 0.0005787046473705667, "loss": 3.4367, "step": 49625 }, { "epoch": 3.372061421388776, "grad_norm": 1.6369088888168335, "learning_rate": 0.000578662182361734, "loss": 3.5622, "step": 49630 }, { "epoch": 3.3724011414594375, "grad_norm": 1.3983074426651, "learning_rate": 0.0005786197173529012, "loss": 3.4231, "step": 49635 }, { "epoch": 3.372740861530099, "grad_norm": 1.2246201038360596, "learning_rate": 0.0005785772523440685, "loss": 3.3718, "step": 49640 }, { "epoch": 3.373080581600761, "grad_norm": 1.2359578609466553, "learning_rate": 0.0005785347873352358, "loss": 3.3783, "step": 49645 }, { "epoch": 3.373420301671423, "grad_norm": 1.0058609247207642, "learning_rate": 0.000578492322326403, "loss": 3.582, "step": 49650 }, { "epoch": 3.3737600217420844, "grad_norm": 1.538144588470459, "learning_rate": 0.0005784498573175704, "loss": 3.6111, "step": 49655 }, { "epoch": 3.3740997418127465, "grad_norm": 1.4533021450042725, "learning_rate": 0.0005784073923087377, "loss": 3.2907, "step": 49660 }, { "epoch": 3.374439461883408, "grad_norm": 1.2633596658706665, "learning_rate": 0.0005783649272999049, "loss": 3.466, "step": 49665 }, { "epoch": 3.37477918195407, "grad_norm": 1.1178169250488281, "learning_rate": 0.0005783224622910722, "loss": 3.5391, "step": 49670 }, { "epoch": 3.375118902024732, "grad_norm": 1.1728163957595825, "learning_rate": 0.0005782799972822394, "loss": 3.4002, "step": 49675 }, { "epoch": 3.3754586220953935, "grad_norm": 1.3680320978164673, "learning_rate": 0.0005782375322734067, "loss": 3.2841, "step": 49680 }, { "epoch": 3.375798342166055, "grad_norm": 1.125982642173767, "learning_rate": 0.000578195067264574, "loss": 3.51, "step": 49685 }, { "epoch": 3.3761380622367168, "grad_norm": 1.628048062324524, "learning_rate": 0.0005781526022557413, "loss": 3.3561, "step": 49690 }, { "epoch": 3.376477782307379, "grad_norm": 1.419793725013733, "learning_rate": 0.0005781101372469086, "loss": 3.6692, "step": 49695 }, { "epoch": 3.3768175023780405, "grad_norm": 1.4998795986175537, "learning_rate": 0.0005780676722380759, "loss": 3.4569, "step": 49700 }, { "epoch": 3.377157222448702, "grad_norm": 1.2167028188705444, "learning_rate": 0.0005780252072292431, "loss": 3.3356, "step": 49705 }, { "epoch": 3.377496942519364, "grad_norm": 1.268545150756836, "learning_rate": 0.0005779827422204103, "loss": 3.3379, "step": 49710 }, { "epoch": 3.377836662590026, "grad_norm": 1.0596715211868286, "learning_rate": 0.0005779402772115777, "loss": 3.3805, "step": 49715 }, { "epoch": 3.3781763826606874, "grad_norm": 1.3043413162231445, "learning_rate": 0.0005778978122027449, "loss": 3.5828, "step": 49720 }, { "epoch": 3.3785161027313495, "grad_norm": 1.4170364141464233, "learning_rate": 0.0005778553471939122, "loss": 3.3618, "step": 49725 }, { "epoch": 3.378855822802011, "grad_norm": 1.2665886878967285, "learning_rate": 0.0005778128821850796, "loss": 3.4774, "step": 49730 }, { "epoch": 3.3791955428726728, "grad_norm": 1.47158682346344, "learning_rate": 0.0005777704171762468, "loss": 3.4469, "step": 49735 }, { "epoch": 3.379535262943335, "grad_norm": 1.3350129127502441, "learning_rate": 0.000577727952167414, "loss": 3.5016, "step": 49740 }, { "epoch": 3.3798749830139965, "grad_norm": 1.5372989177703857, "learning_rate": 0.0005776854871585814, "loss": 3.3984, "step": 49745 }, { "epoch": 3.380214703084658, "grad_norm": 1.4961894750595093, "learning_rate": 0.0005776430221497486, "loss": 3.4307, "step": 49750 }, { "epoch": 3.38055442315532, "grad_norm": 1.3626686334609985, "learning_rate": 0.0005776005571409158, "loss": 3.4295, "step": 49755 }, { "epoch": 3.380894143225982, "grad_norm": 1.5987142324447632, "learning_rate": 0.0005775580921320833, "loss": 3.2327, "step": 49760 }, { "epoch": 3.3812338632966434, "grad_norm": 1.415621280670166, "learning_rate": 0.0005775156271232505, "loss": 3.5859, "step": 49765 }, { "epoch": 3.3815735833673055, "grad_norm": 1.1499804258346558, "learning_rate": 0.0005774731621144177, "loss": 3.2884, "step": 49770 }, { "epoch": 3.381913303437967, "grad_norm": 1.1542809009552002, "learning_rate": 0.000577430697105585, "loss": 3.5747, "step": 49775 }, { "epoch": 3.3822530235086288, "grad_norm": 1.492786169052124, "learning_rate": 0.0005773882320967523, "loss": 3.6036, "step": 49780 }, { "epoch": 3.382592743579291, "grad_norm": 1.0429388284683228, "learning_rate": 0.0005773457670879195, "loss": 3.4726, "step": 49785 }, { "epoch": 3.3829324636499525, "grad_norm": 1.045029878616333, "learning_rate": 0.0005773033020790868, "loss": 3.5746, "step": 49790 }, { "epoch": 3.383272183720614, "grad_norm": 1.2112700939178467, "learning_rate": 0.0005772608370702542, "loss": 3.5135, "step": 49795 }, { "epoch": 3.3836119037912757, "grad_norm": 1.2263048887252808, "learning_rate": 0.0005772183720614214, "loss": 3.5865, "step": 49800 }, { "epoch": 3.383951623861938, "grad_norm": 1.137831687927246, "learning_rate": 0.0005771759070525887, "loss": 3.1792, "step": 49805 }, { "epoch": 3.3842913439325994, "grad_norm": 1.1745635271072388, "learning_rate": 0.000577133442043756, "loss": 3.6216, "step": 49810 }, { "epoch": 3.384631064003261, "grad_norm": 1.1695421934127808, "learning_rate": 0.0005770909770349232, "loss": 3.4582, "step": 49815 }, { "epoch": 3.384970784073923, "grad_norm": 1.1516623497009277, "learning_rate": 0.0005770485120260905, "loss": 3.3645, "step": 49820 }, { "epoch": 3.385310504144585, "grad_norm": 1.109035849571228, "learning_rate": 0.0005770060470172577, "loss": 3.4684, "step": 49825 }, { "epoch": 3.3856502242152464, "grad_norm": 1.6238152980804443, "learning_rate": 0.0005769635820084251, "loss": 3.6616, "step": 49830 }, { "epoch": 3.3859899442859085, "grad_norm": 1.676144003868103, "learning_rate": 0.0005769211169995924, "loss": 3.8039, "step": 49835 }, { "epoch": 3.38632966435657, "grad_norm": 1.1333677768707275, "learning_rate": 0.0005768786519907596, "loss": 3.3027, "step": 49840 }, { "epoch": 3.3866693844272318, "grad_norm": 1.266785979270935, "learning_rate": 0.0005768361869819269, "loss": 3.2919, "step": 49845 }, { "epoch": 3.387009104497894, "grad_norm": 1.333305835723877, "learning_rate": 0.0005767937219730942, "loss": 3.6224, "step": 49850 }, { "epoch": 3.3873488245685555, "grad_norm": 1.316231369972229, "learning_rate": 0.0005767512569642614, "loss": 3.5015, "step": 49855 }, { "epoch": 3.387688544639217, "grad_norm": 1.4493193626403809, "learning_rate": 0.0005767087919554286, "loss": 3.4462, "step": 49860 }, { "epoch": 3.388028264709879, "grad_norm": 1.1821244955062866, "learning_rate": 0.0005766663269465961, "loss": 3.4887, "step": 49865 }, { "epoch": 3.388367984780541, "grad_norm": 1.4553802013397217, "learning_rate": 0.0005766238619377633, "loss": 3.5646, "step": 49870 }, { "epoch": 3.3887077048512024, "grad_norm": 1.2203354835510254, "learning_rate": 0.0005765813969289305, "loss": 3.6857, "step": 49875 }, { "epoch": 3.3890474249218645, "grad_norm": 1.381639838218689, "learning_rate": 0.0005765389319200979, "loss": 3.5267, "step": 49880 }, { "epoch": 3.389387144992526, "grad_norm": 1.4284818172454834, "learning_rate": 0.0005764964669112651, "loss": 3.4155, "step": 49885 }, { "epoch": 3.3897268650631878, "grad_norm": 2.495204448699951, "learning_rate": 0.0005764540019024323, "loss": 3.3113, "step": 49890 }, { "epoch": 3.39006658513385, "grad_norm": 1.3130764961242676, "learning_rate": 0.0005764115368935997, "loss": 3.5865, "step": 49895 }, { "epoch": 3.3904063052045115, "grad_norm": 1.4326688051223755, "learning_rate": 0.000576369071884767, "loss": 3.6533, "step": 49900 }, { "epoch": 3.390746025275173, "grad_norm": 1.4337700605392456, "learning_rate": 0.0005763266068759342, "loss": 3.3787, "step": 49905 }, { "epoch": 3.391085745345835, "grad_norm": 1.4441194534301758, "learning_rate": 0.0005762841418671015, "loss": 3.4114, "step": 49910 }, { "epoch": 3.391425465416497, "grad_norm": 1.6359649896621704, "learning_rate": 0.0005762416768582688, "loss": 3.3282, "step": 49915 }, { "epoch": 3.3917651854871584, "grad_norm": 1.695758581161499, "learning_rate": 0.000576199211849436, "loss": 3.2991, "step": 49920 }, { "epoch": 3.3921049055578205, "grad_norm": 1.3860535621643066, "learning_rate": 0.0005761567468406033, "loss": 3.5241, "step": 49925 }, { "epoch": 3.392444625628482, "grad_norm": 1.1614736318588257, "learning_rate": 0.0005761142818317706, "loss": 3.5653, "step": 49930 }, { "epoch": 3.3927843456991438, "grad_norm": 1.4618951082229614, "learning_rate": 0.000576071816822938, "loss": 3.6407, "step": 49935 }, { "epoch": 3.393124065769806, "grad_norm": 1.7616382837295532, "learning_rate": 0.0005760293518141052, "loss": 3.3894, "step": 49940 }, { "epoch": 3.3934637858404675, "grad_norm": 1.1614878177642822, "learning_rate": 0.0005759868868052725, "loss": 3.4678, "step": 49945 }, { "epoch": 3.393803505911129, "grad_norm": 1.2935291528701782, "learning_rate": 0.0005759444217964398, "loss": 3.4281, "step": 49950 }, { "epoch": 3.394143225981791, "grad_norm": 1.1393394470214844, "learning_rate": 0.000575901956787607, "loss": 3.6111, "step": 49955 }, { "epoch": 3.394482946052453, "grad_norm": 1.099695086479187, "learning_rate": 0.0005758594917787742, "loss": 3.4923, "step": 49960 }, { "epoch": 3.3948226661231145, "grad_norm": 1.3341305255889893, "learning_rate": 0.0005758170267699417, "loss": 3.5665, "step": 49965 }, { "epoch": 3.3951623861937765, "grad_norm": 1.2488105297088623, "learning_rate": 0.0005757745617611089, "loss": 3.2258, "step": 49970 }, { "epoch": 3.395502106264438, "grad_norm": 1.0343233346939087, "learning_rate": 0.0005757320967522761, "loss": 3.4772, "step": 49975 }, { "epoch": 3.3958418263351, "grad_norm": 1.3680561780929565, "learning_rate": 0.0005756896317434435, "loss": 3.535, "step": 49980 }, { "epoch": 3.396181546405762, "grad_norm": 1.2158905267715454, "learning_rate": 0.0005756471667346107, "loss": 3.4051, "step": 49985 }, { "epoch": 3.3965212664764235, "grad_norm": 1.1835906505584717, "learning_rate": 0.0005756047017257779, "loss": 3.663, "step": 49990 }, { "epoch": 3.396860986547085, "grad_norm": 1.0388946533203125, "learning_rate": 0.0005755622367169453, "loss": 3.3429, "step": 49995 }, { "epoch": 3.397200706617747, "grad_norm": 1.6579532623291016, "learning_rate": 0.0005755197717081126, "loss": 3.5985, "step": 50000 }, { "epoch": 3.397540426688409, "grad_norm": 1.4394304752349854, "learning_rate": 0.0005754773066992798, "loss": 3.5815, "step": 50005 }, { "epoch": 3.3978801467590705, "grad_norm": 1.660104751586914, "learning_rate": 0.0005754348416904472, "loss": 3.6169, "step": 50010 }, { "epoch": 3.3982198668297325, "grad_norm": 1.2858930826187134, "learning_rate": 0.0005753923766816144, "loss": 3.6289, "step": 50015 }, { "epoch": 3.398559586900394, "grad_norm": 1.2357358932495117, "learning_rate": 0.0005753499116727816, "loss": 3.4058, "step": 50020 }, { "epoch": 3.398899306971056, "grad_norm": 2.9792232513427734, "learning_rate": 0.0005753074466639489, "loss": 3.5498, "step": 50025 }, { "epoch": 3.3992390270417174, "grad_norm": 1.0504019260406494, "learning_rate": 0.0005752649816551162, "loss": 3.4207, "step": 50030 }, { "epoch": 3.3995787471123795, "grad_norm": 1.222243070602417, "learning_rate": 0.0005752225166462835, "loss": 3.4648, "step": 50035 }, { "epoch": 3.399918467183041, "grad_norm": 1.281237244606018, "learning_rate": 0.0005751800516374508, "loss": 3.6347, "step": 50040 }, { "epoch": 3.4002581872537028, "grad_norm": 1.4043394327163696, "learning_rate": 0.0005751375866286181, "loss": 3.3869, "step": 50045 }, { "epoch": 3.400597907324365, "grad_norm": 1.1200801134109497, "learning_rate": 0.0005750951216197853, "loss": 3.4949, "step": 50050 }, { "epoch": 3.4009376273950265, "grad_norm": 1.2414058446884155, "learning_rate": 0.0005750526566109526, "loss": 3.5195, "step": 50055 }, { "epoch": 3.401277347465688, "grad_norm": 1.1687045097351074, "learning_rate": 0.0005750101916021198, "loss": 3.3422, "step": 50060 }, { "epoch": 3.40161706753635, "grad_norm": 1.2386841773986816, "learning_rate": 0.0005749677265932871, "loss": 3.5486, "step": 50065 }, { "epoch": 3.401956787607012, "grad_norm": 1.2944667339324951, "learning_rate": 0.0005749252615844545, "loss": 3.4499, "step": 50070 }, { "epoch": 3.4022965076776734, "grad_norm": 1.396573543548584, "learning_rate": 0.0005748827965756217, "loss": 3.5046, "step": 50075 }, { "epoch": 3.4026362277483355, "grad_norm": 1.2259262800216675, "learning_rate": 0.000574840331566789, "loss": 3.1898, "step": 50080 }, { "epoch": 3.402975947818997, "grad_norm": 1.160997986793518, "learning_rate": 0.0005747978665579563, "loss": 3.7163, "step": 50085 }, { "epoch": 3.403315667889659, "grad_norm": 1.2059073448181152, "learning_rate": 0.0005747554015491235, "loss": 3.3852, "step": 50090 }, { "epoch": 3.403655387960321, "grad_norm": 1.607069730758667, "learning_rate": 0.0005747129365402907, "loss": 3.6589, "step": 50095 }, { "epoch": 3.4039951080309825, "grad_norm": 1.250307559967041, "learning_rate": 0.0005746704715314581, "loss": 3.473, "step": 50100 }, { "epoch": 3.404334828101644, "grad_norm": 1.6832752227783203, "learning_rate": 0.0005746280065226254, "loss": 3.5433, "step": 50105 }, { "epoch": 3.404674548172306, "grad_norm": 1.4374948740005493, "learning_rate": 0.0005745855415137926, "loss": 3.6213, "step": 50110 }, { "epoch": 3.405014268242968, "grad_norm": 1.4458249807357788, "learning_rate": 0.00057454307650496, "loss": 3.5829, "step": 50115 }, { "epoch": 3.4053539883136295, "grad_norm": 1.1773266792297363, "learning_rate": 0.0005745006114961272, "loss": 3.5917, "step": 50120 }, { "epoch": 3.4056937083842915, "grad_norm": 1.4783413410186768, "learning_rate": 0.0005744581464872944, "loss": 3.5231, "step": 50125 }, { "epoch": 3.406033428454953, "grad_norm": 1.3204275369644165, "learning_rate": 0.0005744156814784618, "loss": 3.6182, "step": 50130 }, { "epoch": 3.406373148525615, "grad_norm": 1.2678850889205933, "learning_rate": 0.000574373216469629, "loss": 3.4368, "step": 50135 }, { "epoch": 3.4067128685962764, "grad_norm": 1.311474323272705, "learning_rate": 0.0005743307514607963, "loss": 3.5906, "step": 50140 }, { "epoch": 3.4070525886669385, "grad_norm": 1.1855343580245972, "learning_rate": 0.0005742882864519637, "loss": 3.4839, "step": 50145 }, { "epoch": 3.4073923087376, "grad_norm": 1.287708044052124, "learning_rate": 0.0005742458214431309, "loss": 3.4288, "step": 50150 }, { "epoch": 3.4077320288082618, "grad_norm": 1.4626836776733398, "learning_rate": 0.0005742033564342981, "loss": 3.6823, "step": 50155 }, { "epoch": 3.408071748878924, "grad_norm": 1.2595981359481812, "learning_rate": 0.0005741608914254654, "loss": 3.4177, "step": 50160 }, { "epoch": 3.4084114689495855, "grad_norm": 1.310271143913269, "learning_rate": 0.0005741184264166327, "loss": 3.7619, "step": 50165 }, { "epoch": 3.408751189020247, "grad_norm": 1.2071722745895386, "learning_rate": 0.0005740759614077999, "loss": 3.4972, "step": 50170 }, { "epoch": 3.409090909090909, "grad_norm": 3.552145004272461, "learning_rate": 0.0005740334963989673, "loss": 3.3753, "step": 50175 }, { "epoch": 3.409430629161571, "grad_norm": 1.2502816915512085, "learning_rate": 0.0005739910313901346, "loss": 3.4091, "step": 50180 }, { "epoch": 3.4097703492322324, "grad_norm": 1.4999337196350098, "learning_rate": 0.0005739485663813018, "loss": 3.4904, "step": 50185 }, { "epoch": 3.4101100693028945, "grad_norm": 1.463274598121643, "learning_rate": 0.0005739061013724691, "loss": 3.4477, "step": 50190 }, { "epoch": 3.410449789373556, "grad_norm": 1.2915788888931274, "learning_rate": 0.0005738636363636364, "loss": 3.2626, "step": 50195 }, { "epoch": 3.4107895094442178, "grad_norm": 1.613418698310852, "learning_rate": 0.0005738211713548036, "loss": 3.6943, "step": 50200 }, { "epoch": 3.41112922951488, "grad_norm": 1.7326921224594116, "learning_rate": 0.0005737787063459709, "loss": 3.702, "step": 50205 }, { "epoch": 3.4114689495855415, "grad_norm": 1.082932949066162, "learning_rate": 0.0005737362413371382, "loss": 3.2289, "step": 50210 }, { "epoch": 3.411808669656203, "grad_norm": 1.301122784614563, "learning_rate": 0.0005736937763283055, "loss": 3.6461, "step": 50215 }, { "epoch": 3.412148389726865, "grad_norm": 1.273446798324585, "learning_rate": 0.0005736513113194728, "loss": 3.4913, "step": 50220 }, { "epoch": 3.412488109797527, "grad_norm": 1.7230560779571533, "learning_rate": 0.00057360884631064, "loss": 3.4139, "step": 50225 }, { "epoch": 3.4128278298681884, "grad_norm": 1.6487972736358643, "learning_rate": 0.0005735663813018073, "loss": 3.3645, "step": 50230 }, { "epoch": 3.4131675499388505, "grad_norm": 1.4487314224243164, "learning_rate": 0.0005735239162929746, "loss": 3.317, "step": 50235 }, { "epoch": 3.413507270009512, "grad_norm": 1.219151258468628, "learning_rate": 0.0005734814512841418, "loss": 3.4789, "step": 50240 }, { "epoch": 3.413846990080174, "grad_norm": 1.5889798402786255, "learning_rate": 0.0005734389862753092, "loss": 3.2409, "step": 50245 }, { "epoch": 3.414186710150836, "grad_norm": 1.2153314352035522, "learning_rate": 0.0005733965212664765, "loss": 3.5692, "step": 50250 }, { "epoch": 3.4145264302214975, "grad_norm": 1.3659968376159668, "learning_rate": 0.0005733540562576437, "loss": 3.3406, "step": 50255 }, { "epoch": 3.414866150292159, "grad_norm": 1.4137370586395264, "learning_rate": 0.0005733115912488109, "loss": 3.5743, "step": 50260 }, { "epoch": 3.415205870362821, "grad_norm": 1.3983155488967896, "learning_rate": 0.0005732691262399783, "loss": 3.7195, "step": 50265 }, { "epoch": 3.415545590433483, "grad_norm": 1.5514971017837524, "learning_rate": 0.0005732266612311455, "loss": 3.5696, "step": 50270 }, { "epoch": 3.4158853105041445, "grad_norm": 1.5190372467041016, "learning_rate": 0.0005731841962223128, "loss": 3.5242, "step": 50275 }, { "epoch": 3.4162250305748065, "grad_norm": 1.1231648921966553, "learning_rate": 0.0005731417312134802, "loss": 3.666, "step": 50280 }, { "epoch": 3.416564750645468, "grad_norm": 1.363665223121643, "learning_rate": 0.0005730992662046474, "loss": 3.483, "step": 50285 }, { "epoch": 3.41690447071613, "grad_norm": 1.1498637199401855, "learning_rate": 0.0005730568011958147, "loss": 3.5765, "step": 50290 }, { "epoch": 3.417244190786792, "grad_norm": 1.2612228393554688, "learning_rate": 0.000573014336186982, "loss": 3.4232, "step": 50295 }, { "epoch": 3.4175839108574535, "grad_norm": 1.1431989669799805, "learning_rate": 0.0005729718711781492, "loss": 3.444, "step": 50300 }, { "epoch": 3.417923630928115, "grad_norm": 1.0861821174621582, "learning_rate": 0.0005729294061693165, "loss": 3.3697, "step": 50305 }, { "epoch": 3.418263350998777, "grad_norm": 1.2203004360198975, "learning_rate": 0.0005728869411604837, "loss": 3.5522, "step": 50310 }, { "epoch": 3.418603071069439, "grad_norm": 1.3861874341964722, "learning_rate": 0.0005728444761516511, "loss": 3.3245, "step": 50315 }, { "epoch": 3.4189427911401005, "grad_norm": 1.3773669004440308, "learning_rate": 0.0005728020111428184, "loss": 3.543, "step": 50320 }, { "epoch": 3.4192825112107625, "grad_norm": 1.3298088312149048, "learning_rate": 0.0005727595461339856, "loss": 3.3828, "step": 50325 }, { "epoch": 3.419622231281424, "grad_norm": 1.575574278831482, "learning_rate": 0.0005727170811251529, "loss": 3.701, "step": 50330 }, { "epoch": 3.419961951352086, "grad_norm": 1.1961450576782227, "learning_rate": 0.0005726746161163202, "loss": 3.2958, "step": 50335 }, { "epoch": 3.420301671422748, "grad_norm": 1.6963003873825073, "learning_rate": 0.0005726321511074874, "loss": 3.6727, "step": 50340 }, { "epoch": 3.4206413914934095, "grad_norm": 1.3543283939361572, "learning_rate": 0.0005725896860986546, "loss": 3.5774, "step": 50345 }, { "epoch": 3.420981111564071, "grad_norm": 1.4407975673675537, "learning_rate": 0.0005725472210898221, "loss": 3.5081, "step": 50350 }, { "epoch": 3.421320831634733, "grad_norm": 1.392683506011963, "learning_rate": 0.0005725047560809893, "loss": 3.5269, "step": 50355 }, { "epoch": 3.421660551705395, "grad_norm": 1.4796398878097534, "learning_rate": 0.0005724622910721565, "loss": 3.3385, "step": 50360 }, { "epoch": 3.4220002717760565, "grad_norm": 1.0578852891921997, "learning_rate": 0.0005724198260633239, "loss": 3.4281, "step": 50365 }, { "epoch": 3.422339991846718, "grad_norm": 1.6255642175674438, "learning_rate": 0.0005723773610544911, "loss": 3.4923, "step": 50370 }, { "epoch": 3.42267971191738, "grad_norm": 1.641464352607727, "learning_rate": 0.0005723348960456583, "loss": 3.5836, "step": 50375 }, { "epoch": 3.423019431988042, "grad_norm": 1.4742860794067383, "learning_rate": 0.0005722924310368257, "loss": 3.2516, "step": 50380 }, { "epoch": 3.4233591520587034, "grad_norm": 2.2061896324157715, "learning_rate": 0.000572249966027993, "loss": 3.6952, "step": 50385 }, { "epoch": 3.4236988721293655, "grad_norm": 1.3364408016204834, "learning_rate": 0.0005722075010191602, "loss": 3.3556, "step": 50390 }, { "epoch": 3.424038592200027, "grad_norm": 1.1121355295181274, "learning_rate": 0.0005721650360103276, "loss": 3.4408, "step": 50395 }, { "epoch": 3.424378312270689, "grad_norm": 1.0926823616027832, "learning_rate": 0.0005721225710014948, "loss": 3.4348, "step": 50400 }, { "epoch": 3.424718032341351, "grad_norm": 1.2101831436157227, "learning_rate": 0.000572080105992662, "loss": 3.2732, "step": 50405 }, { "epoch": 3.4250577524120125, "grad_norm": 1.8721758127212524, "learning_rate": 0.0005720376409838293, "loss": 3.5329, "step": 50410 }, { "epoch": 3.425397472482674, "grad_norm": 1.3247374296188354, "learning_rate": 0.0005719951759749966, "loss": 3.3271, "step": 50415 }, { "epoch": 3.425737192553336, "grad_norm": 1.3314069509506226, "learning_rate": 0.0005719527109661639, "loss": 3.5035, "step": 50420 }, { "epoch": 3.426076912623998, "grad_norm": 1.3740015029907227, "learning_rate": 0.0005719102459573312, "loss": 3.5809, "step": 50425 }, { "epoch": 3.4264166326946595, "grad_norm": 2.899756669998169, "learning_rate": 0.0005718677809484985, "loss": 3.5613, "step": 50430 }, { "epoch": 3.4267563527653215, "grad_norm": 1.094646692276001, "learning_rate": 0.0005718253159396657, "loss": 3.3359, "step": 50435 }, { "epoch": 3.427096072835983, "grad_norm": 1.4616988897323608, "learning_rate": 0.000571782850930833, "loss": 3.248, "step": 50440 }, { "epoch": 3.427435792906645, "grad_norm": 1.1730880737304688, "learning_rate": 0.0005717403859220002, "loss": 3.4584, "step": 50445 }, { "epoch": 3.427775512977307, "grad_norm": 1.676627516746521, "learning_rate": 0.0005716979209131675, "loss": 3.3155, "step": 50450 }, { "epoch": 3.4281152330479685, "grad_norm": 1.0943700075149536, "learning_rate": 0.0005716554559043349, "loss": 3.4063, "step": 50455 }, { "epoch": 3.42845495311863, "grad_norm": 1.2015149593353271, "learning_rate": 0.0005716129908955021, "loss": 3.574, "step": 50460 }, { "epoch": 3.428794673189292, "grad_norm": 1.4010413885116577, "learning_rate": 0.0005715705258866694, "loss": 3.1895, "step": 50465 }, { "epoch": 3.429134393259954, "grad_norm": 1.2162193059921265, "learning_rate": 0.0005715280608778367, "loss": 3.4135, "step": 50470 }, { "epoch": 3.4294741133306155, "grad_norm": 1.4449574947357178, "learning_rate": 0.0005714855958690039, "loss": 3.5166, "step": 50475 }, { "epoch": 3.429813833401277, "grad_norm": 1.2974358797073364, "learning_rate": 0.0005714431308601712, "loss": 3.5729, "step": 50480 }, { "epoch": 3.430153553471939, "grad_norm": 1.2648792266845703, "learning_rate": 0.0005714006658513386, "loss": 3.5477, "step": 50485 }, { "epoch": 3.430493273542601, "grad_norm": 1.270859718322754, "learning_rate": 0.0005713582008425058, "loss": 3.4763, "step": 50490 }, { "epoch": 3.4308329936132624, "grad_norm": 1.5433177947998047, "learning_rate": 0.000571315735833673, "loss": 3.2946, "step": 50495 }, { "epoch": 3.4311727136839245, "grad_norm": 1.1634653806686401, "learning_rate": 0.0005712732708248404, "loss": 3.6586, "step": 50500 }, { "epoch": 3.431512433754586, "grad_norm": 1.1718686819076538, "learning_rate": 0.0005712308058160076, "loss": 3.412, "step": 50505 }, { "epoch": 3.4318521538252478, "grad_norm": 1.3443186283111572, "learning_rate": 0.0005711883408071748, "loss": 3.2483, "step": 50510 }, { "epoch": 3.43219187389591, "grad_norm": 1.6307438611984253, "learning_rate": 0.0005711458757983422, "loss": 3.504, "step": 50515 }, { "epoch": 3.4325315939665715, "grad_norm": 1.6323857307434082, "learning_rate": 0.0005711034107895095, "loss": 3.4483, "step": 50520 }, { "epoch": 3.432871314037233, "grad_norm": 1.302114725112915, "learning_rate": 0.0005710609457806767, "loss": 3.154, "step": 50525 }, { "epoch": 3.433211034107895, "grad_norm": 1.2330435514450073, "learning_rate": 0.0005710184807718441, "loss": 3.5235, "step": 50530 }, { "epoch": 3.433550754178557, "grad_norm": 1.5071204900741577, "learning_rate": 0.0005709760157630113, "loss": 3.2709, "step": 50535 }, { "epoch": 3.4338904742492184, "grad_norm": 2.613896369934082, "learning_rate": 0.0005709335507541785, "loss": 3.5193, "step": 50540 }, { "epoch": 3.4342301943198805, "grad_norm": 1.2872587442398071, "learning_rate": 0.0005708910857453458, "loss": 3.4276, "step": 50545 }, { "epoch": 3.434569914390542, "grad_norm": 1.3092119693756104, "learning_rate": 0.0005708486207365131, "loss": 3.5965, "step": 50550 }, { "epoch": 3.434909634461204, "grad_norm": 1.212666392326355, "learning_rate": 0.0005708061557276804, "loss": 3.444, "step": 50555 }, { "epoch": 3.435249354531866, "grad_norm": 1.2602595090866089, "learning_rate": 0.0005707636907188477, "loss": 3.6612, "step": 50560 }, { "epoch": 3.4355890746025275, "grad_norm": 1.1880501508712769, "learning_rate": 0.000570721225710015, "loss": 3.4875, "step": 50565 }, { "epoch": 3.435928794673189, "grad_norm": 1.1388282775878906, "learning_rate": 0.0005706787607011822, "loss": 3.423, "step": 50570 }, { "epoch": 3.436268514743851, "grad_norm": 1.2888504266738892, "learning_rate": 0.0005706362956923495, "loss": 3.3383, "step": 50575 }, { "epoch": 3.436608234814513, "grad_norm": 1.5368314981460571, "learning_rate": 0.0005705938306835168, "loss": 3.4325, "step": 50580 }, { "epoch": 3.4369479548851745, "grad_norm": 1.6075918674468994, "learning_rate": 0.000570551365674684, "loss": 3.6054, "step": 50585 }, { "epoch": 3.4372876749558365, "grad_norm": 1.7640438079833984, "learning_rate": 0.0005705089006658514, "loss": 3.5018, "step": 50590 }, { "epoch": 3.437627395026498, "grad_norm": 1.3793622255325317, "learning_rate": 0.0005704664356570186, "loss": 3.6487, "step": 50595 }, { "epoch": 3.43796711509716, "grad_norm": 1.0379916429519653, "learning_rate": 0.0005704239706481859, "loss": 3.6523, "step": 50600 }, { "epoch": 3.438306835167822, "grad_norm": 1.046899437904358, "learning_rate": 0.0005703815056393532, "loss": 3.5609, "step": 50605 }, { "epoch": 3.4386465552384835, "grad_norm": 1.3107860088348389, "learning_rate": 0.0005703390406305204, "loss": 3.5307, "step": 50610 }, { "epoch": 3.438986275309145, "grad_norm": 1.0431174039840698, "learning_rate": 0.0005702965756216878, "loss": 3.171, "step": 50615 }, { "epoch": 3.439325995379807, "grad_norm": 1.4254270792007446, "learning_rate": 0.000570254110612855, "loss": 3.5959, "step": 50620 }, { "epoch": 3.439665715450469, "grad_norm": 1.2729519605636597, "learning_rate": 0.0005702116456040223, "loss": 3.5023, "step": 50625 }, { "epoch": 3.4400054355211305, "grad_norm": 1.3578965663909912, "learning_rate": 0.0005701691805951897, "loss": 3.7161, "step": 50630 }, { "epoch": 3.4403451555917925, "grad_norm": 1.5996711254119873, "learning_rate": 0.0005701267155863569, "loss": 3.4563, "step": 50635 }, { "epoch": 3.440684875662454, "grad_norm": 1.1442066431045532, "learning_rate": 0.0005700842505775241, "loss": 3.546, "step": 50640 }, { "epoch": 3.441024595733116, "grad_norm": 1.7717666625976562, "learning_rate": 0.0005700417855686914, "loss": 3.6653, "step": 50645 }, { "epoch": 3.441364315803778, "grad_norm": 1.23162043094635, "learning_rate": 0.0005699993205598587, "loss": 3.4418, "step": 50650 }, { "epoch": 3.4417040358744395, "grad_norm": 1.6015037298202515, "learning_rate": 0.0005699568555510259, "loss": 3.5914, "step": 50655 }, { "epoch": 3.442043755945101, "grad_norm": 1.125343680381775, "learning_rate": 0.0005699143905421933, "loss": 3.5562, "step": 50660 }, { "epoch": 3.442383476015763, "grad_norm": 0.9587329030036926, "learning_rate": 0.0005698719255333606, "loss": 3.6743, "step": 50665 }, { "epoch": 3.442723196086425, "grad_norm": 1.2424896955490112, "learning_rate": 0.0005698294605245278, "loss": 3.4225, "step": 50670 }, { "epoch": 3.4430629161570865, "grad_norm": 1.255939245223999, "learning_rate": 0.0005697869955156951, "loss": 3.3884, "step": 50675 }, { "epoch": 3.4434026362277486, "grad_norm": 1.2697255611419678, "learning_rate": 0.0005697445305068624, "loss": 3.5385, "step": 50680 }, { "epoch": 3.44374235629841, "grad_norm": 1.3508368730545044, "learning_rate": 0.0005697020654980296, "loss": 3.5792, "step": 50685 }, { "epoch": 3.444082076369072, "grad_norm": 1.4244109392166138, "learning_rate": 0.0005696596004891969, "loss": 3.5735, "step": 50690 }, { "epoch": 3.444421796439734, "grad_norm": 1.3018338680267334, "learning_rate": 0.0005696171354803643, "loss": 3.5079, "step": 50695 }, { "epoch": 3.4447615165103955, "grad_norm": 0.9809957146644592, "learning_rate": 0.0005695746704715315, "loss": 3.4135, "step": 50700 }, { "epoch": 3.445101236581057, "grad_norm": 1.3081501722335815, "learning_rate": 0.0005695322054626988, "loss": 3.3719, "step": 50705 }, { "epoch": 3.4454409566517192, "grad_norm": 1.2598243951797485, "learning_rate": 0.000569489740453866, "loss": 3.6256, "step": 50710 }, { "epoch": 3.445780676722381, "grad_norm": 2.5294668674468994, "learning_rate": 0.0005694472754450333, "loss": 3.6388, "step": 50715 }, { "epoch": 3.4461203967930425, "grad_norm": 7.706198215484619, "learning_rate": 0.0005694048104362006, "loss": 3.6509, "step": 50720 }, { "epoch": 3.446460116863704, "grad_norm": 1.1165376901626587, "learning_rate": 0.0005693623454273678, "loss": 3.6585, "step": 50725 }, { "epoch": 3.446799836934366, "grad_norm": 1.3099303245544434, "learning_rate": 0.0005693198804185352, "loss": 3.8147, "step": 50730 }, { "epoch": 3.447139557005028, "grad_norm": 5.230844497680664, "learning_rate": 0.0005692774154097025, "loss": 3.3153, "step": 50735 }, { "epoch": 3.4474792770756895, "grad_norm": 1.178890347480774, "learning_rate": 0.0005692349504008697, "loss": 3.52, "step": 50740 }, { "epoch": 3.4478189971463515, "grad_norm": 1.1013277769088745, "learning_rate": 0.0005691924853920369, "loss": 3.2016, "step": 50745 }, { "epoch": 3.448158717217013, "grad_norm": 1.6256184577941895, "learning_rate": 0.0005691500203832043, "loss": 3.4865, "step": 50750 }, { "epoch": 3.448498437287675, "grad_norm": 1.292226791381836, "learning_rate": 0.0005691075553743715, "loss": 3.4193, "step": 50755 }, { "epoch": 3.448838157358337, "grad_norm": 1.4632470607757568, "learning_rate": 0.0005690650903655387, "loss": 3.2764, "step": 50760 }, { "epoch": 3.4491778774289985, "grad_norm": 1.2781643867492676, "learning_rate": 0.0005690226253567062, "loss": 3.1791, "step": 50765 }, { "epoch": 3.44951759749966, "grad_norm": 1.5797219276428223, "learning_rate": 0.0005689801603478734, "loss": 3.5186, "step": 50770 }, { "epoch": 3.449857317570322, "grad_norm": 1.8721104860305786, "learning_rate": 0.0005689376953390406, "loss": 3.3438, "step": 50775 }, { "epoch": 3.450197037640984, "grad_norm": 1.1112605333328247, "learning_rate": 0.000568895230330208, "loss": 3.2468, "step": 50780 }, { "epoch": 3.4505367577116455, "grad_norm": 1.2878683805465698, "learning_rate": 0.0005688527653213752, "loss": 3.4426, "step": 50785 }, { "epoch": 3.4508764777823075, "grad_norm": 1.3366992473602295, "learning_rate": 0.0005688103003125424, "loss": 3.8418, "step": 50790 }, { "epoch": 3.451216197852969, "grad_norm": 1.8815536499023438, "learning_rate": 0.0005687678353037097, "loss": 3.3944, "step": 50795 }, { "epoch": 3.451555917923631, "grad_norm": 1.1324023008346558, "learning_rate": 0.0005687253702948771, "loss": 3.632, "step": 50800 }, { "epoch": 3.451895637994293, "grad_norm": 1.2409427165985107, "learning_rate": 0.0005686829052860443, "loss": 3.5569, "step": 50805 }, { "epoch": 3.4522353580649545, "grad_norm": 1.2631243467330933, "learning_rate": 0.0005686404402772116, "loss": 3.4783, "step": 50810 }, { "epoch": 3.452575078135616, "grad_norm": 1.0218740701675415, "learning_rate": 0.0005685979752683789, "loss": 3.6689, "step": 50815 }, { "epoch": 3.452914798206278, "grad_norm": 1.1429697275161743, "learning_rate": 0.0005685555102595461, "loss": 3.4122, "step": 50820 }, { "epoch": 3.45325451827694, "grad_norm": 1.0104351043701172, "learning_rate": 0.0005685130452507134, "loss": 3.5187, "step": 50825 }, { "epoch": 3.4535942383476015, "grad_norm": 1.3182283639907837, "learning_rate": 0.0005684705802418806, "loss": 3.4371, "step": 50830 }, { "epoch": 3.453933958418263, "grad_norm": 4.915347099304199, "learning_rate": 0.000568428115233048, "loss": 3.4051, "step": 50835 }, { "epoch": 3.454273678488925, "grad_norm": 1.194329023361206, "learning_rate": 0.0005683856502242153, "loss": 3.4521, "step": 50840 }, { "epoch": 3.454613398559587, "grad_norm": 1.2775630950927734, "learning_rate": 0.0005683431852153825, "loss": 3.4277, "step": 50845 }, { "epoch": 3.4549531186302485, "grad_norm": 1.2259491682052612, "learning_rate": 0.0005683007202065498, "loss": 3.5082, "step": 50850 }, { "epoch": 3.4552928387009105, "grad_norm": 1.5673331022262573, "learning_rate": 0.0005682582551977171, "loss": 3.6146, "step": 50855 }, { "epoch": 3.455632558771572, "grad_norm": 1.1326754093170166, "learning_rate": 0.0005682157901888843, "loss": 3.2327, "step": 50860 }, { "epoch": 3.455972278842234, "grad_norm": 1.3874781131744385, "learning_rate": 0.0005681733251800516, "loss": 3.4256, "step": 50865 }, { "epoch": 3.456311998912896, "grad_norm": 1.511107325553894, "learning_rate": 0.000568130860171219, "loss": 3.4934, "step": 50870 }, { "epoch": 3.4566517189835575, "grad_norm": 2.103797674179077, "learning_rate": 0.0005680883951623862, "loss": 3.5294, "step": 50875 }, { "epoch": 3.456991439054219, "grad_norm": 1.1862095594406128, "learning_rate": 0.0005680459301535535, "loss": 3.4913, "step": 50880 }, { "epoch": 3.457331159124881, "grad_norm": 1.5079549551010132, "learning_rate": 0.0005680034651447208, "loss": 3.3885, "step": 50885 }, { "epoch": 3.457670879195543, "grad_norm": 2.2107598781585693, "learning_rate": 0.000567961000135888, "loss": 3.5448, "step": 50890 }, { "epoch": 3.4580105992662045, "grad_norm": 1.559444785118103, "learning_rate": 0.0005679185351270552, "loss": 3.2189, "step": 50895 }, { "epoch": 3.4583503193368665, "grad_norm": 1.2216421365737915, "learning_rate": 0.0005678760701182226, "loss": 3.5991, "step": 50900 }, { "epoch": 3.458690039407528, "grad_norm": 1.4376020431518555, "learning_rate": 0.0005678336051093899, "loss": 3.5056, "step": 50905 }, { "epoch": 3.45902975947819, "grad_norm": 1.4667950868606567, "learning_rate": 0.0005677911401005571, "loss": 3.4245, "step": 50910 }, { "epoch": 3.459369479548852, "grad_norm": 1.4942216873168945, "learning_rate": 0.0005677486750917245, "loss": 3.4325, "step": 50915 }, { "epoch": 3.4597091996195135, "grad_norm": 1.7139744758605957, "learning_rate": 0.0005677062100828917, "loss": 3.3512, "step": 50920 }, { "epoch": 3.460048919690175, "grad_norm": 1.377720594406128, "learning_rate": 0.0005676637450740589, "loss": 3.2952, "step": 50925 }, { "epoch": 3.460388639760837, "grad_norm": 1.1953469514846802, "learning_rate": 0.0005676212800652263, "loss": 3.1877, "step": 50930 }, { "epoch": 3.460728359831499, "grad_norm": 1.9687306880950928, "learning_rate": 0.0005675788150563935, "loss": 3.6057, "step": 50935 }, { "epoch": 3.4610680799021605, "grad_norm": 1.3960614204406738, "learning_rate": 0.0005675363500475608, "loss": 3.4549, "step": 50940 }, { "epoch": 3.4614077999728226, "grad_norm": 1.4513096809387207, "learning_rate": 0.0005674938850387281, "loss": 3.5514, "step": 50945 }, { "epoch": 3.461747520043484, "grad_norm": 1.5689054727554321, "learning_rate": 0.0005674514200298954, "loss": 3.5687, "step": 50950 }, { "epoch": 3.462087240114146, "grad_norm": 1.5853369235992432, "learning_rate": 0.0005674089550210627, "loss": 3.4622, "step": 50955 }, { "epoch": 3.462426960184808, "grad_norm": 1.610717535018921, "learning_rate": 0.0005673664900122299, "loss": 3.717, "step": 50960 }, { "epoch": 3.4627666802554695, "grad_norm": 1.4325536489486694, "learning_rate": 0.0005673240250033972, "loss": 3.4862, "step": 50965 }, { "epoch": 3.463106400326131, "grad_norm": 1.2668571472167969, "learning_rate": 0.0005672815599945645, "loss": 3.4422, "step": 50970 }, { "epoch": 3.4634461203967932, "grad_norm": 2.713132858276367, "learning_rate": 0.0005672390949857318, "loss": 3.6119, "step": 50975 }, { "epoch": 3.463785840467455, "grad_norm": 1.383519172668457, "learning_rate": 0.000567196629976899, "loss": 3.4928, "step": 50980 }, { "epoch": 3.4641255605381165, "grad_norm": 1.2338309288024902, "learning_rate": 0.0005671541649680664, "loss": 3.4635, "step": 50985 }, { "epoch": 3.4644652806087786, "grad_norm": 1.3481324911117554, "learning_rate": 0.0005671116999592336, "loss": 3.2855, "step": 50990 }, { "epoch": 3.46480500067944, "grad_norm": 1.5026789903640747, "learning_rate": 0.0005670692349504008, "loss": 3.5673, "step": 50995 }, { "epoch": 3.465144720750102, "grad_norm": 0.9605485200881958, "learning_rate": 0.0005670267699415682, "loss": 3.738, "step": 51000 }, { "epoch": 3.465484440820764, "grad_norm": 1.1030374765396118, "learning_rate": 0.0005669843049327354, "loss": 3.8088, "step": 51005 }, { "epoch": 3.4658241608914255, "grad_norm": 1.3545070886611938, "learning_rate": 0.0005669418399239027, "loss": 3.4034, "step": 51010 }, { "epoch": 3.466163880962087, "grad_norm": 2.3499608039855957, "learning_rate": 0.0005668993749150701, "loss": 3.4438, "step": 51015 }, { "epoch": 3.4665036010327492, "grad_norm": 2.8016953468322754, "learning_rate": 0.0005668569099062373, "loss": 3.3569, "step": 51020 }, { "epoch": 3.466843321103411, "grad_norm": 1.6848676204681396, "learning_rate": 0.0005668144448974045, "loss": 3.5198, "step": 51025 }, { "epoch": 3.4671830411740725, "grad_norm": 1.5812819004058838, "learning_rate": 0.0005667719798885719, "loss": 3.4808, "step": 51030 }, { "epoch": 3.4675227612447346, "grad_norm": 1.4158395528793335, "learning_rate": 0.0005667295148797391, "loss": 3.6258, "step": 51035 }, { "epoch": 3.467862481315396, "grad_norm": 1.3557158708572388, "learning_rate": 0.0005666870498709063, "loss": 3.6087, "step": 51040 }, { "epoch": 3.468202201386058, "grad_norm": 1.6194096803665161, "learning_rate": 0.0005666445848620737, "loss": 3.4034, "step": 51045 }, { "epoch": 3.46854192145672, "grad_norm": 1.2464687824249268, "learning_rate": 0.000566602119853241, "loss": 3.6138, "step": 51050 }, { "epoch": 3.4688816415273815, "grad_norm": 1.2236799001693726, "learning_rate": 0.0005665596548444082, "loss": 3.58, "step": 51055 }, { "epoch": 3.469221361598043, "grad_norm": 1.7224323749542236, "learning_rate": 0.0005665171898355755, "loss": 3.3763, "step": 51060 }, { "epoch": 3.469561081668705, "grad_norm": 1.2483208179473877, "learning_rate": 0.0005664747248267428, "loss": 3.3625, "step": 51065 }, { "epoch": 3.469900801739367, "grad_norm": 1.3620754480361938, "learning_rate": 0.00056643225981791, "loss": 3.2735, "step": 51070 }, { "epoch": 3.4702405218100285, "grad_norm": 1.3206373453140259, "learning_rate": 0.0005663897948090774, "loss": 3.1897, "step": 51075 }, { "epoch": 3.47058024188069, "grad_norm": 1.56571626663208, "learning_rate": 0.0005663473298002447, "loss": 3.6026, "step": 51080 }, { "epoch": 3.470919961951352, "grad_norm": 1.4661327600479126, "learning_rate": 0.0005663048647914119, "loss": 3.5341, "step": 51085 }, { "epoch": 3.471259682022014, "grad_norm": 2.0185718536376953, "learning_rate": 0.0005662623997825792, "loss": 3.7123, "step": 51090 }, { "epoch": 3.4715994020926755, "grad_norm": 1.0303648710250854, "learning_rate": 0.0005662199347737464, "loss": 3.3916, "step": 51095 }, { "epoch": 3.4719391221633376, "grad_norm": 1.4461805820465088, "learning_rate": 0.0005661774697649137, "loss": 3.5618, "step": 51100 }, { "epoch": 3.472278842233999, "grad_norm": 1.3922017812728882, "learning_rate": 0.000566135004756081, "loss": 3.3108, "step": 51105 }, { "epoch": 3.472618562304661, "grad_norm": 1.1763513088226318, "learning_rate": 0.0005660925397472483, "loss": 3.389, "step": 51110 }, { "epoch": 3.472958282375323, "grad_norm": 1.087729573249817, "learning_rate": 0.0005660500747384156, "loss": 3.5343, "step": 51115 }, { "epoch": 3.4732980024459845, "grad_norm": 1.2994054555892944, "learning_rate": 0.0005660076097295829, "loss": 3.5788, "step": 51120 }, { "epoch": 3.473637722516646, "grad_norm": 2.136608839035034, "learning_rate": 0.0005659651447207501, "loss": 3.5506, "step": 51125 }, { "epoch": 3.4739774425873082, "grad_norm": 1.3866565227508545, "learning_rate": 0.0005659226797119173, "loss": 3.4645, "step": 51130 }, { "epoch": 3.47431716265797, "grad_norm": 1.146804690361023, "learning_rate": 0.0005658802147030847, "loss": 3.4856, "step": 51135 }, { "epoch": 3.4746568827286315, "grad_norm": 1.6213304996490479, "learning_rate": 0.0005658377496942519, "loss": 3.3044, "step": 51140 }, { "epoch": 3.4749966027992936, "grad_norm": 1.411083459854126, "learning_rate": 0.0005657952846854192, "loss": 3.5259, "step": 51145 }, { "epoch": 3.475336322869955, "grad_norm": 1.079935073852539, "learning_rate": 0.0005657528196765866, "loss": 3.5636, "step": 51150 }, { "epoch": 3.475676042940617, "grad_norm": 1.639437198638916, "learning_rate": 0.0005657103546677538, "loss": 3.3433, "step": 51155 }, { "epoch": 3.4760157630112785, "grad_norm": 4.591412544250488, "learning_rate": 0.000565667889658921, "loss": 3.5964, "step": 51160 }, { "epoch": 3.4763554830819405, "grad_norm": 1.3647581338882446, "learning_rate": 0.0005656254246500884, "loss": 3.3745, "step": 51165 }, { "epoch": 3.476695203152602, "grad_norm": 1.2361547946929932, "learning_rate": 0.0005655829596412556, "loss": 3.2448, "step": 51170 }, { "epoch": 3.477034923223264, "grad_norm": 1.2502992153167725, "learning_rate": 0.0005655404946324228, "loss": 3.2383, "step": 51175 }, { "epoch": 3.477374643293926, "grad_norm": 1.610156774520874, "learning_rate": 0.0005654980296235903, "loss": 3.5817, "step": 51180 }, { "epoch": 3.4777143633645875, "grad_norm": 1.2352218627929688, "learning_rate": 0.0005654555646147575, "loss": 3.6486, "step": 51185 }, { "epoch": 3.478054083435249, "grad_norm": 0.9476032257080078, "learning_rate": 0.0005654130996059247, "loss": 3.5596, "step": 51190 }, { "epoch": 3.478393803505911, "grad_norm": 1.1997830867767334, "learning_rate": 0.000565370634597092, "loss": 3.4869, "step": 51195 }, { "epoch": 3.478733523576573, "grad_norm": 1.1860733032226562, "learning_rate": 0.0005653281695882593, "loss": 3.3327, "step": 51200 }, { "epoch": 3.4790732436472345, "grad_norm": 1.0516691207885742, "learning_rate": 0.0005652857045794265, "loss": 3.3991, "step": 51205 }, { "epoch": 3.4794129637178965, "grad_norm": 1.8414316177368164, "learning_rate": 0.0005652432395705938, "loss": 3.122, "step": 51210 }, { "epoch": 3.479752683788558, "grad_norm": 1.308893084526062, "learning_rate": 0.0005652007745617612, "loss": 3.7144, "step": 51215 }, { "epoch": 3.48009240385922, "grad_norm": 2.5441458225250244, "learning_rate": 0.0005651583095529284, "loss": 3.4222, "step": 51220 }, { "epoch": 3.480432123929882, "grad_norm": 1.3622610569000244, "learning_rate": 0.0005651158445440957, "loss": 3.5094, "step": 51225 }, { "epoch": 3.4807718440005435, "grad_norm": 1.6305153369903564, "learning_rate": 0.000565073379535263, "loss": 3.5258, "step": 51230 }, { "epoch": 3.481111564071205, "grad_norm": 1.4837394952774048, "learning_rate": 0.0005650309145264302, "loss": 3.6237, "step": 51235 }, { "epoch": 3.481451284141867, "grad_norm": 1.2539806365966797, "learning_rate": 0.0005649884495175975, "loss": 3.3754, "step": 51240 }, { "epoch": 3.481791004212529, "grad_norm": 1.3046643733978271, "learning_rate": 0.0005649459845087647, "loss": 3.2859, "step": 51245 }, { "epoch": 3.4821307242831905, "grad_norm": 1.2698034048080444, "learning_rate": 0.0005649035194999321, "loss": 3.3048, "step": 51250 }, { "epoch": 3.4824704443538526, "grad_norm": 1.0967954397201538, "learning_rate": 0.0005648610544910994, "loss": 3.6819, "step": 51255 }, { "epoch": 3.482810164424514, "grad_norm": 1.4089601039886475, "learning_rate": 0.0005648185894822666, "loss": 3.5205, "step": 51260 }, { "epoch": 3.483149884495176, "grad_norm": 1.256485104560852, "learning_rate": 0.0005647761244734339, "loss": 3.4681, "step": 51265 }, { "epoch": 3.483489604565838, "grad_norm": 1.2147216796875, "learning_rate": 0.0005647336594646012, "loss": 3.6862, "step": 51270 }, { "epoch": 3.4838293246364995, "grad_norm": 1.4045946598052979, "learning_rate": 0.0005646911944557684, "loss": 3.3181, "step": 51275 }, { "epoch": 3.484169044707161, "grad_norm": 1.1444592475891113, "learning_rate": 0.0005646487294469356, "loss": 3.6156, "step": 51280 }, { "epoch": 3.4845087647778232, "grad_norm": 1.7785309553146362, "learning_rate": 0.0005646062644381031, "loss": 3.3697, "step": 51285 }, { "epoch": 3.484848484848485, "grad_norm": 1.7862250804901123, "learning_rate": 0.0005645637994292703, "loss": 3.3622, "step": 51290 }, { "epoch": 3.4851882049191465, "grad_norm": 1.419543981552124, "learning_rate": 0.0005645213344204376, "loss": 3.4309, "step": 51295 }, { "epoch": 3.4855279249898086, "grad_norm": 1.0984727144241333, "learning_rate": 0.0005644788694116049, "loss": 3.415, "step": 51300 }, { "epoch": 3.48586764506047, "grad_norm": 1.163562536239624, "learning_rate": 0.0005644364044027721, "loss": 3.7443, "step": 51305 }, { "epoch": 3.486207365131132, "grad_norm": 1.2945111989974976, "learning_rate": 0.0005643939393939394, "loss": 3.3448, "step": 51310 }, { "epoch": 3.486547085201794, "grad_norm": 1.3032351732254028, "learning_rate": 0.0005643514743851067, "loss": 3.5777, "step": 51315 }, { "epoch": 3.4868868052724555, "grad_norm": 1.3021557331085205, "learning_rate": 0.000564309009376274, "loss": 3.1138, "step": 51320 }, { "epoch": 3.487226525343117, "grad_norm": 1.3828203678131104, "learning_rate": 0.0005642665443674413, "loss": 3.6239, "step": 51325 }, { "epoch": 3.4875662454137792, "grad_norm": 1.242551565170288, "learning_rate": 0.0005642240793586085, "loss": 3.669, "step": 51330 }, { "epoch": 3.487905965484441, "grad_norm": 1.1655995845794678, "learning_rate": 0.0005641816143497758, "loss": 3.6461, "step": 51335 }, { "epoch": 3.4882456855551025, "grad_norm": 1.2446857690811157, "learning_rate": 0.0005641391493409431, "loss": 3.4292, "step": 51340 }, { "epoch": 3.4885854056257646, "grad_norm": 2.244666576385498, "learning_rate": 0.0005640966843321103, "loss": 3.6407, "step": 51345 }, { "epoch": 3.488925125696426, "grad_norm": 1.2458317279815674, "learning_rate": 0.0005640542193232776, "loss": 3.5297, "step": 51350 }, { "epoch": 3.489264845767088, "grad_norm": 1.1834766864776611, "learning_rate": 0.000564011754314445, "loss": 3.3983, "step": 51355 }, { "epoch": 3.48960456583775, "grad_norm": 1.6445119380950928, "learning_rate": 0.0005639692893056122, "loss": 3.3032, "step": 51360 }, { "epoch": 3.4899442859084115, "grad_norm": 3.880591630935669, "learning_rate": 0.0005639268242967795, "loss": 3.4593, "step": 51365 }, { "epoch": 3.490284005979073, "grad_norm": 1.5952978134155273, "learning_rate": 0.0005638843592879468, "loss": 3.5595, "step": 51370 }, { "epoch": 3.4906237260497353, "grad_norm": 1.1712539196014404, "learning_rate": 0.000563841894279114, "loss": 3.6068, "step": 51375 }, { "epoch": 3.490963446120397, "grad_norm": 1.4978581666946411, "learning_rate": 0.0005637994292702812, "loss": 3.4248, "step": 51380 }, { "epoch": 3.4913031661910585, "grad_norm": 1.3883473873138428, "learning_rate": 0.0005637569642614486, "loss": 3.421, "step": 51385 }, { "epoch": 3.4916428862617206, "grad_norm": 1.4767838716506958, "learning_rate": 0.0005637144992526159, "loss": 3.2435, "step": 51390 }, { "epoch": 3.491982606332382, "grad_norm": 1.2558207511901855, "learning_rate": 0.0005636720342437831, "loss": 3.5938, "step": 51395 }, { "epoch": 3.492322326403044, "grad_norm": 1.568402886390686, "learning_rate": 0.0005636295692349505, "loss": 3.5026, "step": 51400 }, { "epoch": 3.4926620464737055, "grad_norm": 1.4244239330291748, "learning_rate": 0.0005635871042261177, "loss": 3.4325, "step": 51405 }, { "epoch": 3.4930017665443676, "grad_norm": 1.8686466217041016, "learning_rate": 0.0005635446392172849, "loss": 3.517, "step": 51410 }, { "epoch": 3.493341486615029, "grad_norm": 1.2825407981872559, "learning_rate": 0.0005635021742084523, "loss": 3.306, "step": 51415 }, { "epoch": 3.493681206685691, "grad_norm": 1.391762375831604, "learning_rate": 0.0005634597091996195, "loss": 3.884, "step": 51420 }, { "epoch": 3.494020926756353, "grad_norm": 1.185948133468628, "learning_rate": 0.0005634172441907868, "loss": 3.5097, "step": 51425 }, { "epoch": 3.4943606468270145, "grad_norm": 1.306343913078308, "learning_rate": 0.0005633747791819542, "loss": 3.5574, "step": 51430 }, { "epoch": 3.494700366897676, "grad_norm": 1.0127789974212646, "learning_rate": 0.0005633323141731214, "loss": 3.6469, "step": 51435 }, { "epoch": 3.4950400869683382, "grad_norm": 1.1662778854370117, "learning_rate": 0.0005632898491642886, "loss": 3.3659, "step": 51440 }, { "epoch": 3.495379807039, "grad_norm": 1.376499891281128, "learning_rate": 0.0005632473841554559, "loss": 3.4404, "step": 51445 }, { "epoch": 3.4957195271096615, "grad_norm": 1.540348768234253, "learning_rate": 0.0005632049191466232, "loss": 3.3134, "step": 51450 }, { "epoch": 3.4960592471803236, "grad_norm": 1.596522569656372, "learning_rate": 0.0005631624541377904, "loss": 3.4526, "step": 51455 }, { "epoch": 3.496398967250985, "grad_norm": 2.307967185974121, "learning_rate": 0.0005631199891289578, "loss": 3.5975, "step": 51460 }, { "epoch": 3.496738687321647, "grad_norm": 1.2025783061981201, "learning_rate": 0.0005630775241201251, "loss": 3.4356, "step": 51465 }, { "epoch": 3.497078407392309, "grad_norm": 1.1934783458709717, "learning_rate": 0.0005630350591112923, "loss": 3.4731, "step": 51470 }, { "epoch": 3.4974181274629705, "grad_norm": 1.4638170003890991, "learning_rate": 0.0005629925941024596, "loss": 3.3711, "step": 51475 }, { "epoch": 3.497757847533632, "grad_norm": 1.1137934923171997, "learning_rate": 0.0005629501290936268, "loss": 3.6718, "step": 51480 }, { "epoch": 3.4980975676042942, "grad_norm": 1.237748622894287, "learning_rate": 0.0005629076640847941, "loss": 3.3291, "step": 51485 }, { "epoch": 3.498437287674956, "grad_norm": 1.2159844636917114, "learning_rate": 0.0005628651990759614, "loss": 3.4955, "step": 51490 }, { "epoch": 3.4987770077456175, "grad_norm": 1.2728731632232666, "learning_rate": 0.0005628227340671287, "loss": 3.4224, "step": 51495 }, { "epoch": 3.499116727816279, "grad_norm": 1.0381646156311035, "learning_rate": 0.000562780269058296, "loss": 3.3176, "step": 51500 }, { "epoch": 3.499456447886941, "grad_norm": 1.5515226125717163, "learning_rate": 0.0005627378040494633, "loss": 3.4755, "step": 51505 }, { "epoch": 3.499796167957603, "grad_norm": 1.0840413570404053, "learning_rate": 0.0005626953390406305, "loss": 3.6757, "step": 51510 }, { "epoch": 3.5001358880282645, "grad_norm": 1.7389161586761475, "learning_rate": 0.0005626528740317977, "loss": 3.384, "step": 51515 }, { "epoch": 3.5004756080989265, "grad_norm": 1.1414613723754883, "learning_rate": 0.0005626104090229651, "loss": 3.1438, "step": 51520 }, { "epoch": 3.500815328169588, "grad_norm": 1.2581994533538818, "learning_rate": 0.0005625679440141323, "loss": 3.3118, "step": 51525 }, { "epoch": 3.50115504824025, "grad_norm": 1.3592219352722168, "learning_rate": 0.0005625254790052996, "loss": 3.5822, "step": 51530 }, { "epoch": 3.501494768310912, "grad_norm": 1.2752317190170288, "learning_rate": 0.000562483013996467, "loss": 3.2414, "step": 51535 }, { "epoch": 3.5018344883815735, "grad_norm": 1.2894424200057983, "learning_rate": 0.0005624405489876342, "loss": 3.4415, "step": 51540 }, { "epoch": 3.502174208452235, "grad_norm": 1.5527524948120117, "learning_rate": 0.0005623980839788014, "loss": 3.3674, "step": 51545 }, { "epoch": 3.5025139285228972, "grad_norm": 1.3033198118209839, "learning_rate": 0.0005623556189699688, "loss": 3.4755, "step": 51550 }, { "epoch": 3.502853648593559, "grad_norm": 1.3498786687850952, "learning_rate": 0.000562313153961136, "loss": 3.5458, "step": 51555 }, { "epoch": 3.5031933686642205, "grad_norm": 1.0779155492782593, "learning_rate": 0.0005622706889523032, "loss": 3.3011, "step": 51560 }, { "epoch": 3.5035330887348826, "grad_norm": 1.1152108907699585, "learning_rate": 0.0005622282239434707, "loss": 3.6157, "step": 51565 }, { "epoch": 3.503872808805544, "grad_norm": 1.1704795360565186, "learning_rate": 0.0005621857589346379, "loss": 3.1907, "step": 51570 }, { "epoch": 3.504212528876206, "grad_norm": 1.4650264978408813, "learning_rate": 0.0005621432939258051, "loss": 3.7886, "step": 51575 }, { "epoch": 3.504552248946868, "grad_norm": 1.2998512983322144, "learning_rate": 0.0005621008289169724, "loss": 3.4483, "step": 51580 }, { "epoch": 3.5048919690175295, "grad_norm": 1.142147421836853, "learning_rate": 0.0005620583639081397, "loss": 3.4214, "step": 51585 }, { "epoch": 3.505231689088191, "grad_norm": 1.2645809650421143, "learning_rate": 0.0005620158988993069, "loss": 3.3335, "step": 51590 }, { "epoch": 3.5055714091588532, "grad_norm": 1.4005563259124756, "learning_rate": 0.0005619734338904742, "loss": 3.6851, "step": 51595 }, { "epoch": 3.505911129229515, "grad_norm": 1.4273673295974731, "learning_rate": 0.0005619309688816416, "loss": 3.2019, "step": 51600 }, { "epoch": 3.5062508493001765, "grad_norm": 1.186275601387024, "learning_rate": 0.0005618885038728088, "loss": 3.0937, "step": 51605 }, { "epoch": 3.5065905693708386, "grad_norm": 1.3509681224822998, "learning_rate": 0.0005618460388639761, "loss": 3.2791, "step": 51610 }, { "epoch": 3.5069302894415, "grad_norm": 1.1960763931274414, "learning_rate": 0.0005618035738551434, "loss": 3.2058, "step": 51615 }, { "epoch": 3.507270009512162, "grad_norm": 1.1782033443450928, "learning_rate": 0.0005617611088463106, "loss": 3.6649, "step": 51620 }, { "epoch": 3.507609729582824, "grad_norm": 1.6408448219299316, "learning_rate": 0.0005617186438374779, "loss": 3.8601, "step": 51625 }, { "epoch": 3.5079494496534855, "grad_norm": 1.421921968460083, "learning_rate": 0.0005616761788286451, "loss": 3.4526, "step": 51630 }, { "epoch": 3.508289169724147, "grad_norm": 1.2711001634597778, "learning_rate": 0.0005616337138198125, "loss": 3.419, "step": 51635 }, { "epoch": 3.5086288897948092, "grad_norm": 1.3868191242218018, "learning_rate": 0.0005615912488109798, "loss": 3.6165, "step": 51640 }, { "epoch": 3.508968609865471, "grad_norm": 1.5703747272491455, "learning_rate": 0.000561548783802147, "loss": 3.5473, "step": 51645 }, { "epoch": 3.5093083299361325, "grad_norm": 1.3197468519210815, "learning_rate": 0.0005615063187933144, "loss": 3.6602, "step": 51650 }, { "epoch": 3.5096480500067946, "grad_norm": 1.2319027185440063, "learning_rate": 0.0005614638537844816, "loss": 3.3257, "step": 51655 }, { "epoch": 3.509987770077456, "grad_norm": 1.2093971967697144, "learning_rate": 0.0005614213887756488, "loss": 3.4296, "step": 51660 }, { "epoch": 3.510327490148118, "grad_norm": 1.144490361213684, "learning_rate": 0.0005613789237668163, "loss": 3.3529, "step": 51665 }, { "epoch": 3.51066721021878, "grad_norm": 1.234391689300537, "learning_rate": 0.0005613364587579835, "loss": 3.5054, "step": 51670 }, { "epoch": 3.5110069302894416, "grad_norm": 1.2451317310333252, "learning_rate": 0.0005612939937491507, "loss": 3.4748, "step": 51675 }, { "epoch": 3.511346650360103, "grad_norm": 1.2897495031356812, "learning_rate": 0.000561251528740318, "loss": 3.3993, "step": 51680 }, { "epoch": 3.5116863704307653, "grad_norm": 1.1841243505477905, "learning_rate": 0.0005612090637314853, "loss": 3.4968, "step": 51685 }, { "epoch": 3.512026090501427, "grad_norm": 1.4104219675064087, "learning_rate": 0.0005611665987226525, "loss": 3.3164, "step": 51690 }, { "epoch": 3.5123658105720885, "grad_norm": 1.1665842533111572, "learning_rate": 0.0005611241337138198, "loss": 3.7061, "step": 51695 }, { "epoch": 3.5127055306427506, "grad_norm": 1.5325278043746948, "learning_rate": 0.0005610816687049872, "loss": 3.6782, "step": 51700 }, { "epoch": 3.5130452507134122, "grad_norm": 1.3339204788208008, "learning_rate": 0.0005610392036961544, "loss": 3.3101, "step": 51705 }, { "epoch": 3.513384970784074, "grad_norm": 1.0942963361740112, "learning_rate": 0.0005609967386873217, "loss": 3.5889, "step": 51710 }, { "epoch": 3.513724690854736, "grad_norm": 1.2581701278686523, "learning_rate": 0.000560954273678489, "loss": 3.2538, "step": 51715 }, { "epoch": 3.5140644109253976, "grad_norm": 1.4277318716049194, "learning_rate": 0.0005609118086696562, "loss": 3.4934, "step": 51720 }, { "epoch": 3.514404130996059, "grad_norm": 1.1309281587600708, "learning_rate": 0.0005608693436608235, "loss": 3.3528, "step": 51725 }, { "epoch": 3.5147438510667213, "grad_norm": 1.3326798677444458, "learning_rate": 0.0005608268786519907, "loss": 3.2134, "step": 51730 }, { "epoch": 3.515083571137383, "grad_norm": 1.2534106969833374, "learning_rate": 0.0005607844136431581, "loss": 3.3791, "step": 51735 }, { "epoch": 3.5154232912080445, "grad_norm": 1.424730896949768, "learning_rate": 0.0005607419486343254, "loss": 3.4924, "step": 51740 }, { "epoch": 3.5157630112787066, "grad_norm": 1.7092126607894897, "learning_rate": 0.0005606994836254926, "loss": 3.6343, "step": 51745 }, { "epoch": 3.5161027313493682, "grad_norm": 1.474116563796997, "learning_rate": 0.0005606570186166599, "loss": 3.8131, "step": 51750 }, { "epoch": 3.51644245142003, "grad_norm": 1.3316144943237305, "learning_rate": 0.0005606145536078272, "loss": 3.4591, "step": 51755 }, { "epoch": 3.516782171490692, "grad_norm": 1.4252396821975708, "learning_rate": 0.0005605720885989944, "loss": 3.4704, "step": 51760 }, { "epoch": 3.5171218915613536, "grad_norm": 1.0866996049880981, "learning_rate": 0.0005605296235901616, "loss": 3.4052, "step": 51765 }, { "epoch": 3.517461611632015, "grad_norm": 1.4631364345550537, "learning_rate": 0.0005604871585813291, "loss": 3.5355, "step": 51770 }, { "epoch": 3.517801331702677, "grad_norm": 1.7677509784698486, "learning_rate": 0.0005604446935724963, "loss": 3.3638, "step": 51775 }, { "epoch": 3.518141051773339, "grad_norm": 2.576901435852051, "learning_rate": 0.0005604022285636635, "loss": 3.4924, "step": 51780 }, { "epoch": 3.5184807718440005, "grad_norm": 1.6313461065292358, "learning_rate": 0.0005603597635548309, "loss": 3.3147, "step": 51785 }, { "epoch": 3.518820491914662, "grad_norm": 1.3337963819503784, "learning_rate": 0.0005603172985459981, "loss": 3.6409, "step": 51790 }, { "epoch": 3.5191602119853242, "grad_norm": 1.4334640502929688, "learning_rate": 0.0005602748335371653, "loss": 3.2566, "step": 51795 }, { "epoch": 3.519499932055986, "grad_norm": 1.373971700668335, "learning_rate": 0.0005602323685283327, "loss": 3.3579, "step": 51800 }, { "epoch": 3.5198396521266475, "grad_norm": 1.2206840515136719, "learning_rate": 0.0005601899035195, "loss": 3.7143, "step": 51805 }, { "epoch": 3.520179372197309, "grad_norm": 1.1768295764923096, "learning_rate": 0.0005601474385106672, "loss": 3.7187, "step": 51810 }, { "epoch": 3.520519092267971, "grad_norm": 1.272682547569275, "learning_rate": 0.0005601049735018346, "loss": 3.4608, "step": 51815 }, { "epoch": 3.520858812338633, "grad_norm": 1.1654386520385742, "learning_rate": 0.0005600625084930018, "loss": 3.2331, "step": 51820 }, { "epoch": 3.5211985324092945, "grad_norm": 1.2039870023727417, "learning_rate": 0.000560020043484169, "loss": 3.3761, "step": 51825 }, { "epoch": 3.5215382524799566, "grad_norm": 1.2339802980422974, "learning_rate": 0.0005599775784753363, "loss": 3.4794, "step": 51830 }, { "epoch": 3.521877972550618, "grad_norm": 1.1438722610473633, "learning_rate": 0.0005599351134665036, "loss": 3.679, "step": 51835 }, { "epoch": 3.52221769262128, "grad_norm": 1.5859216451644897, "learning_rate": 0.0005598926484576709, "loss": 3.2431, "step": 51840 }, { "epoch": 3.522557412691942, "grad_norm": 1.4120233058929443, "learning_rate": 0.0005598501834488382, "loss": 3.3959, "step": 51845 }, { "epoch": 3.5228971327626035, "grad_norm": 1.316983938217163, "learning_rate": 0.0005598077184400055, "loss": 3.4914, "step": 51850 }, { "epoch": 3.523236852833265, "grad_norm": 1.247015118598938, "learning_rate": 0.0005597652534311727, "loss": 3.4073, "step": 51855 }, { "epoch": 3.5235765729039272, "grad_norm": 1.4048781394958496, "learning_rate": 0.00055972278842234, "loss": 3.5881, "step": 51860 }, { "epoch": 3.523916292974589, "grad_norm": 1.24896240234375, "learning_rate": 0.0005596803234135072, "loss": 3.4308, "step": 51865 }, { "epoch": 3.5242560130452505, "grad_norm": 1.1559562683105469, "learning_rate": 0.0005596378584046745, "loss": 3.3577, "step": 51870 }, { "epoch": 3.5245957331159126, "grad_norm": 1.3114835023880005, "learning_rate": 0.0005595953933958419, "loss": 3.5844, "step": 51875 }, { "epoch": 3.524935453186574, "grad_norm": 1.490178108215332, "learning_rate": 0.0005595529283870091, "loss": 3.1825, "step": 51880 }, { "epoch": 3.525275173257236, "grad_norm": 1.0901947021484375, "learning_rate": 0.0005595104633781764, "loss": 3.1607, "step": 51885 }, { "epoch": 3.525614893327898, "grad_norm": 1.1320728063583374, "learning_rate": 0.0005594679983693437, "loss": 3.5711, "step": 51890 }, { "epoch": 3.5259546133985595, "grad_norm": 1.3271678686141968, "learning_rate": 0.0005594255333605109, "loss": 3.432, "step": 51895 }, { "epoch": 3.526294333469221, "grad_norm": 1.3475538492202759, "learning_rate": 0.0005593830683516782, "loss": 3.5258, "step": 51900 }, { "epoch": 3.5266340535398832, "grad_norm": 1.4132989645004272, "learning_rate": 0.0005593406033428455, "loss": 3.5795, "step": 51905 }, { "epoch": 3.526973773610545, "grad_norm": 1.2696293592453003, "learning_rate": 0.0005592981383340128, "loss": 3.531, "step": 51910 }, { "epoch": 3.5273134936812065, "grad_norm": 1.4758617877960205, "learning_rate": 0.00055925567332518, "loss": 3.5311, "step": 51915 }, { "epoch": 3.5276532137518686, "grad_norm": 1.210360050201416, "learning_rate": 0.0005592132083163474, "loss": 3.2094, "step": 51920 }, { "epoch": 3.52799293382253, "grad_norm": 1.4543178081512451, "learning_rate": 0.0005591707433075146, "loss": 3.5551, "step": 51925 }, { "epoch": 3.528332653893192, "grad_norm": 1.1311150789260864, "learning_rate": 0.0005591282782986818, "loss": 3.6861, "step": 51930 }, { "epoch": 3.528672373963854, "grad_norm": 1.273756504058838, "learning_rate": 0.0005590858132898492, "loss": 3.3603, "step": 51935 }, { "epoch": 3.5290120940345155, "grad_norm": 1.6343456506729126, "learning_rate": 0.0005590433482810164, "loss": 3.3336, "step": 51940 }, { "epoch": 3.529351814105177, "grad_norm": 1.8180999755859375, "learning_rate": 0.0005590008832721837, "loss": 3.3362, "step": 51945 }, { "epoch": 3.5296915341758393, "grad_norm": 1.4903666973114014, "learning_rate": 0.0005589584182633511, "loss": 3.2968, "step": 51950 }, { "epoch": 3.530031254246501, "grad_norm": 1.3416297435760498, "learning_rate": 0.0005589159532545183, "loss": 3.3747, "step": 51955 }, { "epoch": 3.5303709743171625, "grad_norm": 1.746351718902588, "learning_rate": 0.0005588734882456855, "loss": 3.5786, "step": 51960 }, { "epoch": 3.5307106943878246, "grad_norm": 1.4102041721343994, "learning_rate": 0.0005588310232368528, "loss": 3.3393, "step": 51965 }, { "epoch": 3.531050414458486, "grad_norm": 1.1696768999099731, "learning_rate": 0.0005587885582280201, "loss": 3.6632, "step": 51970 }, { "epoch": 3.531390134529148, "grad_norm": 1.2164071798324585, "learning_rate": 0.0005587460932191873, "loss": 3.1383, "step": 51975 }, { "epoch": 3.53172985459981, "grad_norm": 1.2788013219833374, "learning_rate": 0.0005587036282103547, "loss": 3.2004, "step": 51980 }, { "epoch": 3.5320695746704716, "grad_norm": 1.2314561605453491, "learning_rate": 0.000558661163201522, "loss": 3.6206, "step": 51985 }, { "epoch": 3.532409294741133, "grad_norm": 1.3091354370117188, "learning_rate": 0.0005586186981926893, "loss": 3.4425, "step": 51990 }, { "epoch": 3.5327490148117953, "grad_norm": 1.5305123329162598, "learning_rate": 0.0005585762331838565, "loss": 3.5557, "step": 51995 }, { "epoch": 3.533088734882457, "grad_norm": 1.6703766584396362, "learning_rate": 0.0005585337681750238, "loss": 3.4853, "step": 52000 }, { "epoch": 3.5334284549531185, "grad_norm": 1.4330989122390747, "learning_rate": 0.0005584913031661911, "loss": 3.7765, "step": 52005 }, { "epoch": 3.5337681750237806, "grad_norm": 1.1854181289672852, "learning_rate": 0.0005584488381573583, "loss": 3.2574, "step": 52010 }, { "epoch": 3.5341078950944422, "grad_norm": 1.3438746929168701, "learning_rate": 0.0005584063731485256, "loss": 3.5493, "step": 52015 }, { "epoch": 3.534447615165104, "grad_norm": 1.3317058086395264, "learning_rate": 0.000558363908139693, "loss": 3.3849, "step": 52020 }, { "epoch": 3.534787335235766, "grad_norm": 1.1556906700134277, "learning_rate": 0.0005583214431308602, "loss": 3.5525, "step": 52025 }, { "epoch": 3.5351270553064276, "grad_norm": 1.196043848991394, "learning_rate": 0.0005582789781220274, "loss": 3.6539, "step": 52030 }, { "epoch": 3.535466775377089, "grad_norm": 1.4469050168991089, "learning_rate": 0.0005582365131131948, "loss": 3.5625, "step": 52035 }, { "epoch": 3.5358064954477513, "grad_norm": 1.5178462266921997, "learning_rate": 0.000558194048104362, "loss": 3.4245, "step": 52040 }, { "epoch": 3.536146215518413, "grad_norm": 1.2470638751983643, "learning_rate": 0.0005581515830955292, "loss": 3.4961, "step": 52045 }, { "epoch": 3.5364859355890745, "grad_norm": 1.3660850524902344, "learning_rate": 0.0005581091180866967, "loss": 3.5415, "step": 52050 }, { "epoch": 3.5368256556597366, "grad_norm": 1.514109492301941, "learning_rate": 0.0005580666530778639, "loss": 3.6326, "step": 52055 }, { "epoch": 3.5371653757303982, "grad_norm": 1.4988263845443726, "learning_rate": 0.0005580241880690311, "loss": 3.4252, "step": 52060 }, { "epoch": 3.53750509580106, "grad_norm": 1.4085742235183716, "learning_rate": 0.0005579817230601984, "loss": 3.3692, "step": 52065 }, { "epoch": 3.537844815871722, "grad_norm": 1.4976214170455933, "learning_rate": 0.0005579392580513657, "loss": 3.3011, "step": 52070 }, { "epoch": 3.5381845359423836, "grad_norm": 1.2566847801208496, "learning_rate": 0.0005578967930425329, "loss": 3.4853, "step": 52075 }, { "epoch": 3.538524256013045, "grad_norm": 1.3999297618865967, "learning_rate": 0.0005578543280337002, "loss": 3.5515, "step": 52080 }, { "epoch": 3.5388639760837073, "grad_norm": 1.469187617301941, "learning_rate": 0.0005578118630248676, "loss": 3.2611, "step": 52085 }, { "epoch": 3.539203696154369, "grad_norm": 1.3476394414901733, "learning_rate": 0.0005577693980160348, "loss": 3.3096, "step": 52090 }, { "epoch": 3.5395434162250305, "grad_norm": 1.4181334972381592, "learning_rate": 0.0005577269330072021, "loss": 3.7709, "step": 52095 }, { "epoch": 3.5398831362956926, "grad_norm": 1.2211570739746094, "learning_rate": 0.0005576844679983694, "loss": 3.465, "step": 52100 }, { "epoch": 3.5402228563663543, "grad_norm": 0.9230791926383972, "learning_rate": 0.0005576420029895366, "loss": 3.6572, "step": 52105 }, { "epoch": 3.540562576437016, "grad_norm": 1.1265366077423096, "learning_rate": 0.0005575995379807039, "loss": 3.3023, "step": 52110 }, { "epoch": 3.5409022965076775, "grad_norm": 1.1932579278945923, "learning_rate": 0.0005575570729718711, "loss": 3.7126, "step": 52115 }, { "epoch": 3.5412420165783396, "grad_norm": 1.2696876525878906, "learning_rate": 0.0005575146079630385, "loss": 3.3315, "step": 52120 }, { "epoch": 3.541581736649001, "grad_norm": 1.1784965991973877, "learning_rate": 0.0005574721429542058, "loss": 3.3269, "step": 52125 }, { "epoch": 3.541921456719663, "grad_norm": 1.4191126823425293, "learning_rate": 0.000557429677945373, "loss": 3.5796, "step": 52130 }, { "epoch": 3.542261176790325, "grad_norm": 1.498252511024475, "learning_rate": 0.0005573872129365403, "loss": 3.5476, "step": 52135 }, { "epoch": 3.5426008968609866, "grad_norm": 1.321440577507019, "learning_rate": 0.0005573447479277076, "loss": 3.3395, "step": 52140 }, { "epoch": 3.542940616931648, "grad_norm": 8.198083877563477, "learning_rate": 0.0005573022829188748, "loss": 3.4984, "step": 52145 }, { "epoch": 3.54328033700231, "grad_norm": 0.9923506379127502, "learning_rate": 0.000557259817910042, "loss": 3.3894, "step": 52150 }, { "epoch": 3.543620057072972, "grad_norm": 1.42500901222229, "learning_rate": 0.0005572173529012095, "loss": 3.5872, "step": 52155 }, { "epoch": 3.5439597771436335, "grad_norm": 1.5546424388885498, "learning_rate": 0.0005571748878923767, "loss": 3.6662, "step": 52160 }, { "epoch": 3.544299497214295, "grad_norm": 1.2793025970458984, "learning_rate": 0.0005571324228835439, "loss": 3.4569, "step": 52165 }, { "epoch": 3.5446392172849572, "grad_norm": 1.2838793992996216, "learning_rate": 0.0005570899578747113, "loss": 3.3574, "step": 52170 }, { "epoch": 3.544978937355619, "grad_norm": 1.5566637516021729, "learning_rate": 0.0005570474928658785, "loss": 3.2801, "step": 52175 }, { "epoch": 3.5453186574262805, "grad_norm": 1.2867087125778198, "learning_rate": 0.0005570050278570457, "loss": 3.3221, "step": 52180 }, { "epoch": 3.5456583774969426, "grad_norm": 1.138988733291626, "learning_rate": 0.0005569625628482131, "loss": 3.0867, "step": 52185 }, { "epoch": 3.545998097567604, "grad_norm": 1.1285699605941772, "learning_rate": 0.0005569200978393804, "loss": 3.5017, "step": 52190 }, { "epoch": 3.546337817638266, "grad_norm": 1.1503592729568481, "learning_rate": 0.0005568776328305476, "loss": 3.2052, "step": 52195 }, { "epoch": 3.546677537708928, "grad_norm": 2.068497657775879, "learning_rate": 0.000556835167821715, "loss": 3.3573, "step": 52200 }, { "epoch": 3.5470172577795895, "grad_norm": 1.5356323719024658, "learning_rate": 0.0005567927028128822, "loss": 3.4054, "step": 52205 }, { "epoch": 3.547356977850251, "grad_norm": 1.2949638366699219, "learning_rate": 0.0005567502378040494, "loss": 3.496, "step": 52210 }, { "epoch": 3.5476966979209132, "grad_norm": 3.833127498626709, "learning_rate": 0.0005567077727952167, "loss": 3.5053, "step": 52215 }, { "epoch": 3.548036417991575, "grad_norm": 1.2788708209991455, "learning_rate": 0.000556665307786384, "loss": 3.4645, "step": 52220 }, { "epoch": 3.5483761380622365, "grad_norm": 1.3266264200210571, "learning_rate": 0.0005566228427775513, "loss": 3.4073, "step": 52225 }, { "epoch": 3.5487158581328986, "grad_norm": 1.413780927658081, "learning_rate": 0.0005565803777687186, "loss": 3.5259, "step": 52230 }, { "epoch": 3.54905557820356, "grad_norm": 1.0491658449172974, "learning_rate": 0.0005565379127598859, "loss": 3.5331, "step": 52235 }, { "epoch": 3.549395298274222, "grad_norm": 1.2902560234069824, "learning_rate": 0.0005564954477510531, "loss": 3.499, "step": 52240 }, { "epoch": 3.549735018344884, "grad_norm": 2.4534554481506348, "learning_rate": 0.0005564529827422204, "loss": 3.5693, "step": 52245 }, { "epoch": 3.5500747384155455, "grad_norm": 1.1736680269241333, "learning_rate": 0.0005564105177333876, "loss": 3.4599, "step": 52250 }, { "epoch": 3.550414458486207, "grad_norm": 1.1709054708480835, "learning_rate": 0.0005563680527245549, "loss": 3.4306, "step": 52255 }, { "epoch": 3.5507541785568693, "grad_norm": 1.7651172876358032, "learning_rate": 0.0005563255877157223, "loss": 3.3308, "step": 52260 }, { "epoch": 3.551093898627531, "grad_norm": 1.273714542388916, "learning_rate": 0.0005562831227068895, "loss": 3.4189, "step": 52265 }, { "epoch": 3.5514336186981925, "grad_norm": 1.228276252746582, "learning_rate": 0.0005562406576980568, "loss": 3.3078, "step": 52270 }, { "epoch": 3.5517733387688546, "grad_norm": 1.1048190593719482, "learning_rate": 0.0005561981926892241, "loss": 3.642, "step": 52275 }, { "epoch": 3.5521130588395162, "grad_norm": 1.5354138612747192, "learning_rate": 0.0005561557276803913, "loss": 3.54, "step": 52280 }, { "epoch": 3.552452778910178, "grad_norm": 1.5842705965042114, "learning_rate": 0.0005561132626715586, "loss": 3.5728, "step": 52285 }, { "epoch": 3.55279249898084, "grad_norm": 1.2722434997558594, "learning_rate": 0.000556070797662726, "loss": 3.5422, "step": 52290 }, { "epoch": 3.5531322190515016, "grad_norm": 1.4874331951141357, "learning_rate": 0.0005560283326538932, "loss": 3.8455, "step": 52295 }, { "epoch": 3.553471939122163, "grad_norm": 1.265576720237732, "learning_rate": 0.0005559858676450605, "loss": 3.3427, "step": 52300 }, { "epoch": 3.5538116591928253, "grad_norm": 1.2040417194366455, "learning_rate": 0.0005559434026362278, "loss": 3.6429, "step": 52305 }, { "epoch": 3.554151379263487, "grad_norm": 1.2932177782058716, "learning_rate": 0.000555900937627395, "loss": 3.3702, "step": 52310 }, { "epoch": 3.5544910993341485, "grad_norm": 1.0817621946334839, "learning_rate": 0.0005558584726185622, "loss": 3.4322, "step": 52315 }, { "epoch": 3.5548308194048106, "grad_norm": 6.149727821350098, "learning_rate": 0.0005558160076097296, "loss": 3.5475, "step": 52320 }, { "epoch": 3.5551705394754722, "grad_norm": 0.9613606929779053, "learning_rate": 0.0005557735426008969, "loss": 3.546, "step": 52325 }, { "epoch": 3.555510259546134, "grad_norm": 1.471228003501892, "learning_rate": 0.0005557310775920642, "loss": 3.5306, "step": 52330 }, { "epoch": 3.555849979616796, "grad_norm": 1.312610149383545, "learning_rate": 0.0005556886125832315, "loss": 3.6258, "step": 52335 }, { "epoch": 3.5561896996874576, "grad_norm": 1.3131507635116577, "learning_rate": 0.0005556461475743987, "loss": 3.3482, "step": 52340 }, { "epoch": 3.556529419758119, "grad_norm": 1.6956125497817993, "learning_rate": 0.000555603682565566, "loss": 3.1632, "step": 52345 }, { "epoch": 3.5568691398287813, "grad_norm": 1.1657034158706665, "learning_rate": 0.0005555612175567333, "loss": 3.3141, "step": 52350 }, { "epoch": 3.557208859899443, "grad_norm": 1.6517177820205688, "learning_rate": 0.0005555187525479005, "loss": 3.4786, "step": 52355 }, { "epoch": 3.5575485799701045, "grad_norm": 1.5612702369689941, "learning_rate": 0.0005554762875390679, "loss": 3.5449, "step": 52360 }, { "epoch": 3.5578883000407666, "grad_norm": 1.344919204711914, "learning_rate": 0.0005554338225302351, "loss": 3.3867, "step": 52365 }, { "epoch": 3.5582280201114282, "grad_norm": 1.384015440940857, "learning_rate": 0.0005553913575214024, "loss": 3.2993, "step": 52370 }, { "epoch": 3.55856774018209, "grad_norm": 1.2655874490737915, "learning_rate": 0.0005553488925125697, "loss": 3.3693, "step": 52375 }, { "epoch": 3.558907460252752, "grad_norm": 1.1815094947814941, "learning_rate": 0.0005553064275037369, "loss": 3.5002, "step": 52380 }, { "epoch": 3.5592471803234136, "grad_norm": 1.0917999744415283, "learning_rate": 0.0005552639624949042, "loss": 3.2824, "step": 52385 }, { "epoch": 3.559586900394075, "grad_norm": 1.574488878250122, "learning_rate": 0.0005552214974860715, "loss": 3.3149, "step": 52390 }, { "epoch": 3.5599266204647373, "grad_norm": 1.8331568241119385, "learning_rate": 0.0005551790324772388, "loss": 3.3439, "step": 52395 }, { "epoch": 3.560266340535399, "grad_norm": 1.3962485790252686, "learning_rate": 0.000555136567468406, "loss": 3.4656, "step": 52400 }, { "epoch": 3.5606060606060606, "grad_norm": 1.1890124082565308, "learning_rate": 0.0005550941024595734, "loss": 3.3771, "step": 52405 }, { "epoch": 3.5609457806767226, "grad_norm": 1.068519949913025, "learning_rate": 0.0005550516374507406, "loss": 3.4984, "step": 52410 }, { "epoch": 3.5612855007473843, "grad_norm": 1.2288280725479126, "learning_rate": 0.0005550091724419078, "loss": 3.5175, "step": 52415 }, { "epoch": 3.561625220818046, "grad_norm": 1.3257346153259277, "learning_rate": 0.0005549667074330752, "loss": 3.5497, "step": 52420 }, { "epoch": 3.561964940888708, "grad_norm": 1.115970253944397, "learning_rate": 0.0005549242424242424, "loss": 3.5816, "step": 52425 }, { "epoch": 3.5623046609593696, "grad_norm": 1.225216031074524, "learning_rate": 0.0005548817774154097, "loss": 3.2373, "step": 52430 }, { "epoch": 3.5626443810300312, "grad_norm": 1.283158302307129, "learning_rate": 0.0005548393124065771, "loss": 3.745, "step": 52435 }, { "epoch": 3.5629841011006933, "grad_norm": 1.6017526388168335, "learning_rate": 0.0005547968473977443, "loss": 3.2112, "step": 52440 }, { "epoch": 3.563323821171355, "grad_norm": 1.6461137533187866, "learning_rate": 0.0005547543823889115, "loss": 3.4381, "step": 52445 }, { "epoch": 3.5636635412420166, "grad_norm": 1.50209379196167, "learning_rate": 0.0005547119173800789, "loss": 3.3174, "step": 52450 }, { "epoch": 3.564003261312678, "grad_norm": 1.8747693300247192, "learning_rate": 0.0005546694523712461, "loss": 3.4165, "step": 52455 }, { "epoch": 3.5643429813833403, "grad_norm": 1.377206802368164, "learning_rate": 0.0005546269873624133, "loss": 3.6272, "step": 52460 }, { "epoch": 3.564682701454002, "grad_norm": 1.276358962059021, "learning_rate": 0.0005545845223535807, "loss": 3.2718, "step": 52465 }, { "epoch": 3.5650224215246635, "grad_norm": 1.4565610885620117, "learning_rate": 0.000554542057344748, "loss": 3.3315, "step": 52470 }, { "epoch": 3.5653621415953256, "grad_norm": 1.4677073955535889, "learning_rate": 0.0005544995923359152, "loss": 3.4548, "step": 52475 }, { "epoch": 3.5657018616659872, "grad_norm": 1.430275321006775, "learning_rate": 0.0005544571273270825, "loss": 3.5554, "step": 52480 }, { "epoch": 3.566041581736649, "grad_norm": 1.3759030103683472, "learning_rate": 0.0005544146623182498, "loss": 3.4842, "step": 52485 }, { "epoch": 3.5663813018073105, "grad_norm": 1.517987608909607, "learning_rate": 0.000554372197309417, "loss": 3.4953, "step": 52490 }, { "epoch": 3.5667210218779726, "grad_norm": 1.1822500228881836, "learning_rate": 0.0005543297323005843, "loss": 3.7198, "step": 52495 }, { "epoch": 3.567060741948634, "grad_norm": 1.1568411588668823, "learning_rate": 0.0005542872672917517, "loss": 3.2951, "step": 52500 }, { "epoch": 3.567400462019296, "grad_norm": 1.1529231071472168, "learning_rate": 0.0005542448022829189, "loss": 3.2984, "step": 52505 }, { "epoch": 3.567740182089958, "grad_norm": 1.0521888732910156, "learning_rate": 0.0005542023372740862, "loss": 3.3483, "step": 52510 }, { "epoch": 3.5680799021606195, "grad_norm": 1.326061725616455, "learning_rate": 0.0005541598722652534, "loss": 3.2211, "step": 52515 }, { "epoch": 3.568419622231281, "grad_norm": 1.3315985202789307, "learning_rate": 0.0005541174072564207, "loss": 3.6562, "step": 52520 }, { "epoch": 3.5687593423019432, "grad_norm": 1.4192569255828857, "learning_rate": 0.000554074942247588, "loss": 3.7806, "step": 52525 }, { "epoch": 3.569099062372605, "grad_norm": 1.23712158203125, "learning_rate": 0.0005540324772387552, "loss": 3.464, "step": 52530 }, { "epoch": 3.5694387824432665, "grad_norm": 1.4614286422729492, "learning_rate": 0.0005539900122299226, "loss": 3.2102, "step": 52535 }, { "epoch": 3.5697785025139286, "grad_norm": 1.4542752504348755, "learning_rate": 0.0005539475472210899, "loss": 3.8257, "step": 52540 }, { "epoch": 3.57011822258459, "grad_norm": 1.3758841753005981, "learning_rate": 0.0005539050822122571, "loss": 3.4046, "step": 52545 }, { "epoch": 3.570457942655252, "grad_norm": 1.3642407655715942, "learning_rate": 0.0005538626172034243, "loss": 3.522, "step": 52550 }, { "epoch": 3.570797662725914, "grad_norm": 1.3552134037017822, "learning_rate": 0.0005538201521945917, "loss": 3.5418, "step": 52555 }, { "epoch": 3.5711373827965756, "grad_norm": 1.3941677808761597, "learning_rate": 0.0005537776871857589, "loss": 3.4116, "step": 52560 }, { "epoch": 3.571477102867237, "grad_norm": 1.2161478996276855, "learning_rate": 0.0005537352221769261, "loss": 3.3371, "step": 52565 }, { "epoch": 3.5718168229378993, "grad_norm": 1.2809001207351685, "learning_rate": 0.0005536927571680936, "loss": 3.5351, "step": 52570 }, { "epoch": 3.572156543008561, "grad_norm": 1.5054752826690674, "learning_rate": 0.0005536502921592608, "loss": 3.5717, "step": 52575 }, { "epoch": 3.5724962630792225, "grad_norm": 1.4688621759414673, "learning_rate": 0.000553607827150428, "loss": 3.5462, "step": 52580 }, { "epoch": 3.5728359831498846, "grad_norm": 1.3327860832214355, "learning_rate": 0.0005535653621415954, "loss": 3.7512, "step": 52585 }, { "epoch": 3.5731757032205462, "grad_norm": 1.6273282766342163, "learning_rate": 0.0005535228971327626, "loss": 3.6617, "step": 52590 }, { "epoch": 3.573515423291208, "grad_norm": 1.6916004419326782, "learning_rate": 0.0005534804321239298, "loss": 3.3958, "step": 52595 }, { "epoch": 3.57385514336187, "grad_norm": 1.2191028594970703, "learning_rate": 0.0005534379671150971, "loss": 3.4867, "step": 52600 }, { "epoch": 3.5741948634325316, "grad_norm": 0.941210925579071, "learning_rate": 0.0005533955021062645, "loss": 3.3509, "step": 52605 }, { "epoch": 3.574534583503193, "grad_norm": 2.063910722732544, "learning_rate": 0.0005533530370974317, "loss": 3.6814, "step": 52610 }, { "epoch": 3.5748743035738553, "grad_norm": 2.298301935195923, "learning_rate": 0.000553310572088599, "loss": 3.6048, "step": 52615 }, { "epoch": 3.575214023644517, "grad_norm": 1.185176134109497, "learning_rate": 0.0005532681070797663, "loss": 3.5199, "step": 52620 }, { "epoch": 3.5755537437151785, "grad_norm": 1.4583219289779663, "learning_rate": 0.0005532256420709335, "loss": 3.6422, "step": 52625 }, { "epoch": 3.5758934637858406, "grad_norm": 1.4663642644882202, "learning_rate": 0.0005531831770621008, "loss": 3.666, "step": 52630 }, { "epoch": 3.5762331838565022, "grad_norm": 1.440390706062317, "learning_rate": 0.000553140712053268, "loss": 3.2198, "step": 52635 }, { "epoch": 3.576572903927164, "grad_norm": 1.3750016689300537, "learning_rate": 0.0005530982470444354, "loss": 3.3459, "step": 52640 }, { "epoch": 3.576912623997826, "grad_norm": 1.2456096410751343, "learning_rate": 0.0005530557820356027, "loss": 3.3728, "step": 52645 }, { "epoch": 3.5772523440684876, "grad_norm": 1.353424072265625, "learning_rate": 0.00055301331702677, "loss": 3.6169, "step": 52650 }, { "epoch": 3.577592064139149, "grad_norm": 1.2958142757415771, "learning_rate": 0.0005529708520179372, "loss": 3.4725, "step": 52655 }, { "epoch": 3.5779317842098113, "grad_norm": 1.3269609212875366, "learning_rate": 0.0005529283870091045, "loss": 3.3523, "step": 52660 }, { "epoch": 3.578271504280473, "grad_norm": 1.1722321510314941, "learning_rate": 0.0005528859220002717, "loss": 3.3882, "step": 52665 }, { "epoch": 3.5786112243511345, "grad_norm": 1.1811025142669678, "learning_rate": 0.0005528434569914391, "loss": 3.4133, "step": 52670 }, { "epoch": 3.5789509444217966, "grad_norm": 1.386216402053833, "learning_rate": 0.0005528009919826064, "loss": 3.5017, "step": 52675 }, { "epoch": 3.5792906644924583, "grad_norm": 1.0186525583267212, "learning_rate": 0.0005527585269737736, "loss": 3.5805, "step": 52680 }, { "epoch": 3.57963038456312, "grad_norm": 1.3914482593536377, "learning_rate": 0.000552716061964941, "loss": 3.5755, "step": 52685 }, { "epoch": 3.579970104633782, "grad_norm": 1.3826454877853394, "learning_rate": 0.0005526735969561082, "loss": 3.6906, "step": 52690 }, { "epoch": 3.5803098247044436, "grad_norm": 1.488879919052124, "learning_rate": 0.0005526311319472754, "loss": 3.3934, "step": 52695 }, { "epoch": 3.580649544775105, "grad_norm": 1.2126312255859375, "learning_rate": 0.0005525886669384427, "loss": 3.7277, "step": 52700 }, { "epoch": 3.5809892648457673, "grad_norm": 1.377860188484192, "learning_rate": 0.00055254620192961, "loss": 3.4644, "step": 52705 }, { "epoch": 3.581328984916429, "grad_norm": 1.3986374139785767, "learning_rate": 0.0005525037369207773, "loss": 3.4579, "step": 52710 }, { "epoch": 3.5816687049870906, "grad_norm": 1.0437735319137573, "learning_rate": 0.0005524612719119446, "loss": 3.4896, "step": 52715 }, { "epoch": 3.5820084250577526, "grad_norm": 1.4278281927108765, "learning_rate": 0.0005524188069031119, "loss": 3.5708, "step": 52720 }, { "epoch": 3.5823481451284143, "grad_norm": 1.271157145500183, "learning_rate": 0.0005523763418942791, "loss": 3.5106, "step": 52725 }, { "epoch": 3.582687865199076, "grad_norm": 1.2792174816131592, "learning_rate": 0.0005523338768854464, "loss": 3.3507, "step": 52730 }, { "epoch": 3.583027585269738, "grad_norm": 1.1226806640625, "learning_rate": 0.0005522914118766137, "loss": 3.5137, "step": 52735 }, { "epoch": 3.5833673053403996, "grad_norm": 1.3795008659362793, "learning_rate": 0.0005522489468677809, "loss": 3.3904, "step": 52740 }, { "epoch": 3.5837070254110612, "grad_norm": 1.2357617616653442, "learning_rate": 0.0005522064818589483, "loss": 3.1415, "step": 52745 }, { "epoch": 3.5840467454817233, "grad_norm": 1.3854892253875732, "learning_rate": 0.0005521640168501155, "loss": 3.4439, "step": 52750 }, { "epoch": 3.584386465552385, "grad_norm": 1.3418060541152954, "learning_rate": 0.0005521215518412828, "loss": 3.3687, "step": 52755 }, { "epoch": 3.5847261856230466, "grad_norm": 1.5464144945144653, "learning_rate": 0.0005520790868324501, "loss": 3.6194, "step": 52760 }, { "epoch": 3.5850659056937086, "grad_norm": 1.2974838018417358, "learning_rate": 0.0005520366218236173, "loss": 3.4438, "step": 52765 }, { "epoch": 3.5854056257643703, "grad_norm": 1.1915620565414429, "learning_rate": 0.0005519941568147846, "loss": 3.4816, "step": 52770 }, { "epoch": 3.585745345835032, "grad_norm": 1.249908208847046, "learning_rate": 0.000551951691805952, "loss": 3.4097, "step": 52775 }, { "epoch": 3.586085065905694, "grad_norm": 0.9706671833992004, "learning_rate": 0.0005519092267971192, "loss": 3.4736, "step": 52780 }, { "epoch": 3.5864247859763556, "grad_norm": 1.850521206855774, "learning_rate": 0.0005518667617882865, "loss": 3.1847, "step": 52785 }, { "epoch": 3.5867645060470172, "grad_norm": 1.1043444871902466, "learning_rate": 0.0005518242967794538, "loss": 3.7153, "step": 52790 }, { "epoch": 3.587104226117679, "grad_norm": 1.5063979625701904, "learning_rate": 0.000551781831770621, "loss": 3.6586, "step": 52795 }, { "epoch": 3.587443946188341, "grad_norm": 1.221779465675354, "learning_rate": 0.0005517393667617882, "loss": 3.7948, "step": 52800 }, { "epoch": 3.5877836662590026, "grad_norm": 1.2794687747955322, "learning_rate": 0.0005516969017529556, "loss": 3.4865, "step": 52805 }, { "epoch": 3.588123386329664, "grad_norm": 1.4491151571273804, "learning_rate": 0.0005516544367441229, "loss": 3.8879, "step": 52810 }, { "epoch": 3.5884631064003263, "grad_norm": 1.2706489562988281, "learning_rate": 0.0005516119717352901, "loss": 3.6087, "step": 52815 }, { "epoch": 3.588802826470988, "grad_norm": 1.558585286140442, "learning_rate": 0.0005515695067264575, "loss": 3.576, "step": 52820 }, { "epoch": 3.5891425465416495, "grad_norm": 1.667608618736267, "learning_rate": 0.0005515270417176247, "loss": 3.5533, "step": 52825 }, { "epoch": 3.589482266612311, "grad_norm": 1.296613097190857, "learning_rate": 0.0005514845767087919, "loss": 3.6626, "step": 52830 }, { "epoch": 3.5898219866829733, "grad_norm": 1.6825695037841797, "learning_rate": 0.0005514421116999593, "loss": 3.4547, "step": 52835 }, { "epoch": 3.590161706753635, "grad_norm": 1.0881998538970947, "learning_rate": 0.0005513996466911265, "loss": 3.6945, "step": 52840 }, { "epoch": 3.5905014268242965, "grad_norm": 1.2647491693496704, "learning_rate": 0.0005513571816822938, "loss": 3.4891, "step": 52845 }, { "epoch": 3.5908411468949586, "grad_norm": 1.2921578884124756, "learning_rate": 0.0005513147166734612, "loss": 3.5222, "step": 52850 }, { "epoch": 3.59118086696562, "grad_norm": 1.7327473163604736, "learning_rate": 0.0005512722516646284, "loss": 3.4519, "step": 52855 }, { "epoch": 3.591520587036282, "grad_norm": 1.1331236362457275, "learning_rate": 0.0005512297866557956, "loss": 3.3645, "step": 52860 }, { "epoch": 3.591860307106944, "grad_norm": 1.2386752367019653, "learning_rate": 0.0005511873216469629, "loss": 3.5025, "step": 52865 }, { "epoch": 3.5922000271776056, "grad_norm": 1.4492294788360596, "learning_rate": 0.0005511448566381302, "loss": 3.5396, "step": 52870 }, { "epoch": 3.592539747248267, "grad_norm": 1.2515902519226074, "learning_rate": 0.0005511023916292974, "loss": 3.2258, "step": 52875 }, { "epoch": 3.5928794673189293, "grad_norm": 1.2437024116516113, "learning_rate": 0.0005510599266204648, "loss": 3.4606, "step": 52880 }, { "epoch": 3.593219187389591, "grad_norm": 1.1747878789901733, "learning_rate": 0.0005510174616116321, "loss": 3.511, "step": 52885 }, { "epoch": 3.5935589074602525, "grad_norm": 1.0209323167800903, "learning_rate": 0.0005509749966027993, "loss": 3.4408, "step": 52890 }, { "epoch": 3.5938986275309146, "grad_norm": 1.2512402534484863, "learning_rate": 0.0005509325315939666, "loss": 3.5326, "step": 52895 }, { "epoch": 3.5942383476015762, "grad_norm": 1.1741554737091064, "learning_rate": 0.0005508900665851338, "loss": 3.3314, "step": 52900 }, { "epoch": 3.594578067672238, "grad_norm": 1.422891616821289, "learning_rate": 0.0005508476015763011, "loss": 3.5027, "step": 52905 }, { "epoch": 3.5949177877429, "grad_norm": 1.384940505027771, "learning_rate": 0.0005508051365674684, "loss": 3.5367, "step": 52910 }, { "epoch": 3.5952575078135616, "grad_norm": 1.3618583679199219, "learning_rate": 0.0005507626715586357, "loss": 3.7269, "step": 52915 }, { "epoch": 3.595597227884223, "grad_norm": 1.0453861951828003, "learning_rate": 0.000550720206549803, "loss": 3.5155, "step": 52920 }, { "epoch": 3.5959369479548853, "grad_norm": 1.1177592277526855, "learning_rate": 0.0005506777415409703, "loss": 3.0784, "step": 52925 }, { "epoch": 3.596276668025547, "grad_norm": 1.4702762365341187, "learning_rate": 0.0005506352765321375, "loss": 3.4908, "step": 52930 }, { "epoch": 3.5966163880962085, "grad_norm": 1.0338389873504639, "learning_rate": 0.0005505928115233047, "loss": 3.3342, "step": 52935 }, { "epoch": 3.5969561081668706, "grad_norm": 1.454897165298462, "learning_rate": 0.0005505503465144721, "loss": 3.327, "step": 52940 }, { "epoch": 3.5972958282375322, "grad_norm": 1.5461400747299194, "learning_rate": 0.0005505078815056393, "loss": 3.3072, "step": 52945 }, { "epoch": 3.597635548308194, "grad_norm": 1.3495917320251465, "learning_rate": 0.0005504654164968066, "loss": 3.5242, "step": 52950 }, { "epoch": 3.597975268378856, "grad_norm": 1.7000997066497803, "learning_rate": 0.000550422951487974, "loss": 3.499, "step": 52955 }, { "epoch": 3.5983149884495176, "grad_norm": 1.207806944847107, "learning_rate": 0.0005503804864791412, "loss": 3.4365, "step": 52960 }, { "epoch": 3.598654708520179, "grad_norm": 1.1588213443756104, "learning_rate": 0.0005503380214703084, "loss": 3.5191, "step": 52965 }, { "epoch": 3.5989944285908413, "grad_norm": 1.3337286710739136, "learning_rate": 0.0005502955564614758, "loss": 3.4634, "step": 52970 }, { "epoch": 3.599334148661503, "grad_norm": 0.9477814435958862, "learning_rate": 0.000550253091452643, "loss": 3.6233, "step": 52975 }, { "epoch": 3.5996738687321646, "grad_norm": 1.172724723815918, "learning_rate": 0.0005502106264438102, "loss": 3.302, "step": 52980 }, { "epoch": 3.6000135888028266, "grad_norm": 1.648568868637085, "learning_rate": 0.0005501681614349777, "loss": 3.2857, "step": 52985 }, { "epoch": 3.6003533088734883, "grad_norm": 1.3780258893966675, "learning_rate": 0.0005501256964261449, "loss": 3.4915, "step": 52990 }, { "epoch": 3.60069302894415, "grad_norm": 1.248826265335083, "learning_rate": 0.0005500832314173121, "loss": 3.7353, "step": 52995 }, { "epoch": 3.601032749014812, "grad_norm": 1.1617155075073242, "learning_rate": 0.0005500407664084794, "loss": 3.4088, "step": 53000 }, { "epoch": 3.6013724690854736, "grad_norm": 1.0869203805923462, "learning_rate": 0.0005499983013996467, "loss": 3.5485, "step": 53005 }, { "epoch": 3.6017121891561352, "grad_norm": 1.2235714197158813, "learning_rate": 0.000549955836390814, "loss": 3.3749, "step": 53010 }, { "epoch": 3.6020519092267973, "grad_norm": 1.1601576805114746, "learning_rate": 0.0005499133713819812, "loss": 3.4313, "step": 53015 }, { "epoch": 3.602391629297459, "grad_norm": 1.2803552150726318, "learning_rate": 0.0005498709063731486, "loss": 3.4569, "step": 53020 }, { "epoch": 3.6027313493681206, "grad_norm": 1.622918963432312, "learning_rate": 0.0005498284413643159, "loss": 3.3852, "step": 53025 }, { "epoch": 3.6030710694387826, "grad_norm": 1.3296759128570557, "learning_rate": 0.0005497859763554831, "loss": 3.3496, "step": 53030 }, { "epoch": 3.6034107895094443, "grad_norm": 1.228760004043579, "learning_rate": 0.0005497435113466504, "loss": 3.4224, "step": 53035 }, { "epoch": 3.603750509580106, "grad_norm": 1.3809866905212402, "learning_rate": 0.0005497010463378177, "loss": 3.2709, "step": 53040 }, { "epoch": 3.604090229650768, "grad_norm": 1.61808180809021, "learning_rate": 0.0005496585813289849, "loss": 3.378, "step": 53045 }, { "epoch": 3.6044299497214296, "grad_norm": 1.5495827198028564, "learning_rate": 0.0005496161163201521, "loss": 3.3325, "step": 53050 }, { "epoch": 3.6047696697920912, "grad_norm": 1.0239750146865845, "learning_rate": 0.0005495736513113196, "loss": 3.5896, "step": 53055 }, { "epoch": 3.6051093898627533, "grad_norm": 1.3006856441497803, "learning_rate": 0.0005495311863024868, "loss": 3.5902, "step": 53060 }, { "epoch": 3.605449109933415, "grad_norm": 1.40126633644104, "learning_rate": 0.000549488721293654, "loss": 3.5004, "step": 53065 }, { "epoch": 3.6057888300040766, "grad_norm": 0.9286918640136719, "learning_rate": 0.0005494462562848214, "loss": 3.4555, "step": 53070 }, { "epoch": 3.6061285500747386, "grad_norm": 1.4274256229400635, "learning_rate": 0.0005494037912759886, "loss": 3.3756, "step": 53075 }, { "epoch": 3.6064682701454003, "grad_norm": 1.2657995223999023, "learning_rate": 0.0005493613262671558, "loss": 3.4644, "step": 53080 }, { "epoch": 3.606807990216062, "grad_norm": 1.311570644378662, "learning_rate": 0.0005493188612583232, "loss": 3.2243, "step": 53085 }, { "epoch": 3.607147710286724, "grad_norm": 1.3502657413482666, "learning_rate": 0.0005492763962494905, "loss": 3.3789, "step": 53090 }, { "epoch": 3.6074874303573856, "grad_norm": 1.1025516986846924, "learning_rate": 0.0005492339312406577, "loss": 3.2649, "step": 53095 }, { "epoch": 3.6078271504280472, "grad_norm": 1.15450918674469, "learning_rate": 0.000549191466231825, "loss": 3.5206, "step": 53100 }, { "epoch": 3.6081668704987093, "grad_norm": 1.062401533126831, "learning_rate": 0.0005491490012229923, "loss": 3.5305, "step": 53105 }, { "epoch": 3.608506590569371, "grad_norm": 1.2223892211914062, "learning_rate": 0.0005491065362141595, "loss": 3.0977, "step": 53110 }, { "epoch": 3.6088463106400326, "grad_norm": 1.1209524869918823, "learning_rate": 0.0005490640712053268, "loss": 3.5194, "step": 53115 }, { "epoch": 3.6091860307106947, "grad_norm": 1.330102562904358, "learning_rate": 0.0005490216061964941, "loss": 3.5384, "step": 53120 }, { "epoch": 3.6095257507813563, "grad_norm": 1.568116545677185, "learning_rate": 0.0005489791411876614, "loss": 3.6235, "step": 53125 }, { "epoch": 3.609865470852018, "grad_norm": 1.6655246019363403, "learning_rate": 0.0005489366761788287, "loss": 3.619, "step": 53130 }, { "epoch": 3.6102051909226796, "grad_norm": 1.2448006868362427, "learning_rate": 0.000548894211169996, "loss": 3.7021, "step": 53135 }, { "epoch": 3.6105449109933416, "grad_norm": 1.61847722530365, "learning_rate": 0.0005488517461611632, "loss": 3.3202, "step": 53140 }, { "epoch": 3.6108846310640033, "grad_norm": 1.378968358039856, "learning_rate": 0.0005488092811523305, "loss": 3.4117, "step": 53145 }, { "epoch": 3.611224351134665, "grad_norm": 1.3008689880371094, "learning_rate": 0.0005487668161434977, "loss": 3.5327, "step": 53150 }, { "epoch": 3.611564071205327, "grad_norm": 1.1482030153274536, "learning_rate": 0.000548724351134665, "loss": 3.4948, "step": 53155 }, { "epoch": 3.6119037912759886, "grad_norm": 1.4911420345306396, "learning_rate": 0.0005486818861258324, "loss": 3.4665, "step": 53160 }, { "epoch": 3.6122435113466502, "grad_norm": 1.3578203916549683, "learning_rate": 0.0005486394211169996, "loss": 3.6339, "step": 53165 }, { "epoch": 3.612583231417312, "grad_norm": 1.118603229522705, "learning_rate": 0.0005485969561081669, "loss": 3.3721, "step": 53170 }, { "epoch": 3.612922951487974, "grad_norm": 1.4513522386550903, "learning_rate": 0.0005485544910993342, "loss": 3.3138, "step": 53175 }, { "epoch": 3.6132626715586356, "grad_norm": 1.3012416362762451, "learning_rate": 0.0005485120260905014, "loss": 3.5171, "step": 53180 }, { "epoch": 3.613602391629297, "grad_norm": 1.5875164270401, "learning_rate": 0.0005484695610816686, "loss": 3.4329, "step": 53185 }, { "epoch": 3.6139421116999593, "grad_norm": 1.6096445322036743, "learning_rate": 0.000548427096072836, "loss": 3.4589, "step": 53190 }, { "epoch": 3.614281831770621, "grad_norm": 1.3923542499542236, "learning_rate": 0.0005483846310640033, "loss": 3.4219, "step": 53195 }, { "epoch": 3.6146215518412825, "grad_norm": 1.5667973756790161, "learning_rate": 0.0005483421660551705, "loss": 3.4898, "step": 53200 }, { "epoch": 3.6149612719119446, "grad_norm": 1.2940431833267212, "learning_rate": 0.0005482997010463379, "loss": 3.3355, "step": 53205 }, { "epoch": 3.6153009919826062, "grad_norm": 1.3580210208892822, "learning_rate": 0.0005482572360375051, "loss": 3.649, "step": 53210 }, { "epoch": 3.615640712053268, "grad_norm": 1.6301329135894775, "learning_rate": 0.0005482147710286723, "loss": 3.6269, "step": 53215 }, { "epoch": 3.61598043212393, "grad_norm": 1.1396803855895996, "learning_rate": 0.0005481723060198397, "loss": 3.4591, "step": 53220 }, { "epoch": 3.6163201521945916, "grad_norm": 1.2257499694824219, "learning_rate": 0.0005481298410110069, "loss": 3.2572, "step": 53225 }, { "epoch": 3.616659872265253, "grad_norm": 1.1163829565048218, "learning_rate": 0.0005480873760021742, "loss": 3.397, "step": 53230 }, { "epoch": 3.6169995923359153, "grad_norm": 1.2678159475326538, "learning_rate": 0.0005480449109933416, "loss": 3.5105, "step": 53235 }, { "epoch": 3.617339312406577, "grad_norm": 1.600057601928711, "learning_rate": 0.0005480024459845088, "loss": 3.6608, "step": 53240 }, { "epoch": 3.6176790324772385, "grad_norm": 1.5399080514907837, "learning_rate": 0.000547959980975676, "loss": 3.2776, "step": 53245 }, { "epoch": 3.6180187525479006, "grad_norm": 1.4555686712265015, "learning_rate": 0.0005479175159668433, "loss": 3.4974, "step": 53250 }, { "epoch": 3.6183584726185622, "grad_norm": 1.5495870113372803, "learning_rate": 0.0005478750509580106, "loss": 3.5358, "step": 53255 }, { "epoch": 3.618698192689224, "grad_norm": 1.4028065204620361, "learning_rate": 0.0005478325859491778, "loss": 3.2893, "step": 53260 }, { "epoch": 3.619037912759886, "grad_norm": 1.2306034564971924, "learning_rate": 0.0005477901209403452, "loss": 3.3788, "step": 53265 }, { "epoch": 3.6193776328305476, "grad_norm": 1.5225114822387695, "learning_rate": 0.0005477476559315125, "loss": 3.3164, "step": 53270 }, { "epoch": 3.619717352901209, "grad_norm": 1.1676913499832153, "learning_rate": 0.0005477051909226797, "loss": 3.3802, "step": 53275 }, { "epoch": 3.6200570729718713, "grad_norm": 1.342644214630127, "learning_rate": 0.000547662725913847, "loss": 3.6296, "step": 53280 }, { "epoch": 3.620396793042533, "grad_norm": 1.4519226551055908, "learning_rate": 0.0005476202609050142, "loss": 3.4095, "step": 53285 }, { "epoch": 3.6207365131131946, "grad_norm": 1.0662461519241333, "learning_rate": 0.0005475777958961815, "loss": 3.3963, "step": 53290 }, { "epoch": 3.6210762331838566, "grad_norm": 1.4660300016403198, "learning_rate": 0.0005475353308873488, "loss": 3.3601, "step": 53295 }, { "epoch": 3.6214159532545183, "grad_norm": 1.3105705976486206, "learning_rate": 0.0005474928658785161, "loss": 3.3876, "step": 53300 }, { "epoch": 3.62175567332518, "grad_norm": 1.1779459714889526, "learning_rate": 0.0005474504008696834, "loss": 3.5505, "step": 53305 }, { "epoch": 3.622095393395842, "grad_norm": 1.4224601984024048, "learning_rate": 0.0005474079358608507, "loss": 3.6359, "step": 53310 }, { "epoch": 3.6224351134665036, "grad_norm": 1.1195476055145264, "learning_rate": 0.0005473654708520179, "loss": 3.3755, "step": 53315 }, { "epoch": 3.6227748335371652, "grad_norm": 1.8420450687408447, "learning_rate": 0.0005473230058431852, "loss": 3.1171, "step": 53320 }, { "epoch": 3.6231145536078273, "grad_norm": 1.0798518657684326, "learning_rate": 0.0005472805408343525, "loss": 3.4069, "step": 53325 }, { "epoch": 3.623454273678489, "grad_norm": 1.3809312582015991, "learning_rate": 0.0005472380758255197, "loss": 3.6207, "step": 53330 }, { "epoch": 3.6237939937491506, "grad_norm": 1.2315199375152588, "learning_rate": 0.000547195610816687, "loss": 3.4328, "step": 53335 }, { "epoch": 3.6241337138198126, "grad_norm": 1.2818289995193481, "learning_rate": 0.0005471531458078544, "loss": 3.615, "step": 53340 }, { "epoch": 3.6244734338904743, "grad_norm": 1.5339040756225586, "learning_rate": 0.0005471106807990216, "loss": 3.2311, "step": 53345 }, { "epoch": 3.624813153961136, "grad_norm": 1.3561630249023438, "learning_rate": 0.0005470682157901889, "loss": 3.264, "step": 53350 }, { "epoch": 3.625152874031798, "grad_norm": 1.0898973941802979, "learning_rate": 0.0005470257507813562, "loss": 3.4024, "step": 53355 }, { "epoch": 3.6254925941024596, "grad_norm": 1.4938673973083496, "learning_rate": 0.0005469832857725234, "loss": 3.6548, "step": 53360 }, { "epoch": 3.6258323141731212, "grad_norm": 1.020064115524292, "learning_rate": 0.0005469408207636908, "loss": 3.3826, "step": 53365 }, { "epoch": 3.6261720342437833, "grad_norm": 1.024623990058899, "learning_rate": 0.0005468983557548581, "loss": 3.5966, "step": 53370 }, { "epoch": 3.626511754314445, "grad_norm": 1.237255334854126, "learning_rate": 0.0005468558907460253, "loss": 3.2141, "step": 53375 }, { "epoch": 3.6268514743851066, "grad_norm": 1.5375505685806274, "learning_rate": 0.0005468134257371926, "loss": 3.6575, "step": 53380 }, { "epoch": 3.6271911944557687, "grad_norm": 1.2275457382202148, "learning_rate": 0.0005467709607283598, "loss": 3.5877, "step": 53385 }, { "epoch": 3.6275309145264303, "grad_norm": 1.7217159271240234, "learning_rate": 0.0005467284957195271, "loss": 3.3244, "step": 53390 }, { "epoch": 3.627870634597092, "grad_norm": 1.725554347038269, "learning_rate": 0.0005466860307106944, "loss": 3.3754, "step": 53395 }, { "epoch": 3.628210354667754, "grad_norm": 1.348932147026062, "learning_rate": 0.0005466435657018617, "loss": 3.3438, "step": 53400 }, { "epoch": 3.6285500747384156, "grad_norm": 1.3367271423339844, "learning_rate": 0.000546601100693029, "loss": 3.4016, "step": 53405 }, { "epoch": 3.6288897948090773, "grad_norm": 1.38823401927948, "learning_rate": 0.0005465586356841963, "loss": 3.6924, "step": 53410 }, { "epoch": 3.6292295148797393, "grad_norm": 1.1969809532165527, "learning_rate": 0.0005465161706753635, "loss": 3.346, "step": 53415 }, { "epoch": 3.629569234950401, "grad_norm": 1.1436094045639038, "learning_rate": 0.0005464737056665308, "loss": 3.2936, "step": 53420 }, { "epoch": 3.6299089550210626, "grad_norm": 1.2142119407653809, "learning_rate": 0.0005464312406576981, "loss": 3.5877, "step": 53425 }, { "epoch": 3.6302486750917247, "grad_norm": 1.404188871383667, "learning_rate": 0.0005463887756488653, "loss": 3.3939, "step": 53430 }, { "epoch": 3.6305883951623863, "grad_norm": 1.4360719919204712, "learning_rate": 0.0005463463106400326, "loss": 3.6397, "step": 53435 }, { "epoch": 3.630928115233048, "grad_norm": 1.3743973970413208, "learning_rate": 0.0005463038456312, "loss": 3.5366, "step": 53440 }, { "epoch": 3.63126783530371, "grad_norm": 1.1616629362106323, "learning_rate": 0.0005462613806223672, "loss": 3.3969, "step": 53445 }, { "epoch": 3.6316075553743716, "grad_norm": 1.0200684070587158, "learning_rate": 0.0005462189156135344, "loss": 3.5151, "step": 53450 }, { "epoch": 3.6319472754450333, "grad_norm": 1.065769910812378, "learning_rate": 0.0005461764506047018, "loss": 3.5392, "step": 53455 }, { "epoch": 3.6322869955156953, "grad_norm": 1.662165641784668, "learning_rate": 0.000546133985595869, "loss": 3.1364, "step": 53460 }, { "epoch": 3.632626715586357, "grad_norm": 1.379327416419983, "learning_rate": 0.0005460915205870362, "loss": 3.4907, "step": 53465 }, { "epoch": 3.6329664356570186, "grad_norm": 1.091216802597046, "learning_rate": 0.0005460490555782037, "loss": 3.3994, "step": 53470 }, { "epoch": 3.6333061557276802, "grad_norm": 1.6442824602127075, "learning_rate": 0.0005460065905693709, "loss": 3.5502, "step": 53475 }, { "epoch": 3.6336458757983423, "grad_norm": 1.4563603401184082, "learning_rate": 0.0005459641255605381, "loss": 3.4986, "step": 53480 }, { "epoch": 3.633985595869004, "grad_norm": 1.2800222635269165, "learning_rate": 0.0005459216605517054, "loss": 3.4983, "step": 53485 }, { "epoch": 3.6343253159396656, "grad_norm": 1.5863971710205078, "learning_rate": 0.0005458791955428727, "loss": 3.5114, "step": 53490 }, { "epoch": 3.6346650360103276, "grad_norm": 1.3682918548583984, "learning_rate": 0.0005458367305340399, "loss": 3.4714, "step": 53495 }, { "epoch": 3.6350047560809893, "grad_norm": 0.9732731580734253, "learning_rate": 0.0005457942655252072, "loss": 3.5508, "step": 53500 }, { "epoch": 3.635344476151651, "grad_norm": 1.3804999589920044, "learning_rate": 0.0005457518005163746, "loss": 3.4264, "step": 53505 }, { "epoch": 3.635684196222313, "grad_norm": 1.088505744934082, "learning_rate": 0.0005457093355075418, "loss": 3.5581, "step": 53510 }, { "epoch": 3.6360239162929746, "grad_norm": 1.072143793106079, "learning_rate": 0.0005456668704987091, "loss": 3.3807, "step": 53515 }, { "epoch": 3.6363636363636362, "grad_norm": 1.1307039260864258, "learning_rate": 0.0005456244054898764, "loss": 3.6967, "step": 53520 }, { "epoch": 3.636703356434298, "grad_norm": 1.8021225929260254, "learning_rate": 0.0005455819404810436, "loss": 3.219, "step": 53525 }, { "epoch": 3.63704307650496, "grad_norm": 1.2538456916809082, "learning_rate": 0.0005455394754722109, "loss": 3.7274, "step": 53530 }, { "epoch": 3.6373827965756216, "grad_norm": 1.068774938583374, "learning_rate": 0.0005454970104633781, "loss": 3.7497, "step": 53535 }, { "epoch": 3.637722516646283, "grad_norm": 1.7846884727478027, "learning_rate": 0.0005454545454545455, "loss": 3.5889, "step": 53540 }, { "epoch": 3.6380622367169453, "grad_norm": 1.0168652534484863, "learning_rate": 0.0005454120804457128, "loss": 3.5495, "step": 53545 }, { "epoch": 3.638401956787607, "grad_norm": 1.3548394441604614, "learning_rate": 0.00054536961543688, "loss": 3.599, "step": 53550 }, { "epoch": 3.6387416768582685, "grad_norm": 1.1908776760101318, "learning_rate": 0.0005453271504280473, "loss": 3.7437, "step": 53555 }, { "epoch": 3.6390813969289306, "grad_norm": 1.475528597831726, "learning_rate": 0.0005452846854192146, "loss": 3.2651, "step": 53560 }, { "epoch": 3.6394211169995923, "grad_norm": 1.354955792427063, "learning_rate": 0.0005452422204103818, "loss": 3.4745, "step": 53565 }, { "epoch": 3.639760837070254, "grad_norm": 1.181809902191162, "learning_rate": 0.000545199755401549, "loss": 3.4001, "step": 53570 }, { "epoch": 3.640100557140916, "grad_norm": 1.6109287738800049, "learning_rate": 0.0005451572903927165, "loss": 3.7351, "step": 53575 }, { "epoch": 3.6404402772115776, "grad_norm": 1.2632445096969604, "learning_rate": 0.0005451148253838837, "loss": 3.6299, "step": 53580 }, { "epoch": 3.6407799972822392, "grad_norm": 1.1415106058120728, "learning_rate": 0.0005450723603750509, "loss": 3.342, "step": 53585 }, { "epoch": 3.6411197173529013, "grad_norm": 1.3058922290802002, "learning_rate": 0.0005450298953662183, "loss": 3.6736, "step": 53590 }, { "epoch": 3.641459437423563, "grad_norm": 1.368445873260498, "learning_rate": 0.0005449874303573855, "loss": 3.3113, "step": 53595 }, { "epoch": 3.6417991574942246, "grad_norm": 1.6865979433059692, "learning_rate": 0.0005449449653485527, "loss": 3.6229, "step": 53600 }, { "epoch": 3.6421388775648866, "grad_norm": 1.1255096197128296, "learning_rate": 0.0005449025003397201, "loss": 3.4246, "step": 53605 }, { "epoch": 3.6424785976355483, "grad_norm": 1.4066771268844604, "learning_rate": 0.0005448600353308874, "loss": 3.3104, "step": 53610 }, { "epoch": 3.64281831770621, "grad_norm": 1.438997507095337, "learning_rate": 0.0005448175703220546, "loss": 3.5547, "step": 53615 }, { "epoch": 3.643158037776872, "grad_norm": 1.4651395082473755, "learning_rate": 0.000544775105313222, "loss": 3.3963, "step": 53620 }, { "epoch": 3.6434977578475336, "grad_norm": 1.2630820274353027, "learning_rate": 0.0005447326403043892, "loss": 3.6521, "step": 53625 }, { "epoch": 3.6438374779181952, "grad_norm": 1.4543360471725464, "learning_rate": 0.0005446901752955564, "loss": 3.7334, "step": 53630 }, { "epoch": 3.6441771979888573, "grad_norm": 1.303518295288086, "learning_rate": 0.0005446477102867237, "loss": 3.3427, "step": 53635 }, { "epoch": 3.644516918059519, "grad_norm": 1.234944224357605, "learning_rate": 0.000544605245277891, "loss": 3.6395, "step": 53640 }, { "epoch": 3.6448566381301806, "grad_norm": 1.2315196990966797, "learning_rate": 0.0005445627802690583, "loss": 3.2969, "step": 53645 }, { "epoch": 3.6451963582008426, "grad_norm": 1.178971290588379, "learning_rate": 0.0005445203152602256, "loss": 3.5264, "step": 53650 }, { "epoch": 3.6455360782715043, "grad_norm": 1.4962904453277588, "learning_rate": 0.0005444778502513929, "loss": 3.4616, "step": 53655 }, { "epoch": 3.645875798342166, "grad_norm": 1.3914316892623901, "learning_rate": 0.0005444353852425601, "loss": 3.6223, "step": 53660 }, { "epoch": 3.646215518412828, "grad_norm": 1.2069494724273682, "learning_rate": 0.0005443929202337274, "loss": 3.4485, "step": 53665 }, { "epoch": 3.6465552384834896, "grad_norm": 1.3590781688690186, "learning_rate": 0.0005443504552248946, "loss": 3.5548, "step": 53670 }, { "epoch": 3.6468949585541512, "grad_norm": 1.6879421472549438, "learning_rate": 0.0005443079902160619, "loss": 3.4197, "step": 53675 }, { "epoch": 3.6472346786248133, "grad_norm": 1.2519758939743042, "learning_rate": 0.0005442655252072293, "loss": 3.3263, "step": 53680 }, { "epoch": 3.647574398695475, "grad_norm": 1.1048624515533447, "learning_rate": 0.0005442230601983965, "loss": 3.5512, "step": 53685 }, { "epoch": 3.6479141187661366, "grad_norm": 1.1541681289672852, "learning_rate": 0.0005441805951895639, "loss": 3.5339, "step": 53690 }, { "epoch": 3.6482538388367987, "grad_norm": 1.5051294565200806, "learning_rate": 0.0005441381301807311, "loss": 3.5898, "step": 53695 }, { "epoch": 3.6485935589074603, "grad_norm": 1.4367657899856567, "learning_rate": 0.0005440956651718983, "loss": 3.5062, "step": 53700 }, { "epoch": 3.648933278978122, "grad_norm": 1.7147890329360962, "learning_rate": 0.0005440532001630657, "loss": 3.4581, "step": 53705 }, { "epoch": 3.649272999048784, "grad_norm": 1.4147019386291504, "learning_rate": 0.0005440107351542329, "loss": 3.6846, "step": 53710 }, { "epoch": 3.6496127191194456, "grad_norm": 1.1027156114578247, "learning_rate": 0.0005439682701454002, "loss": 3.6678, "step": 53715 }, { "epoch": 3.6499524391901073, "grad_norm": 1.2196111679077148, "learning_rate": 0.0005439258051365676, "loss": 3.4846, "step": 53720 }, { "epoch": 3.6502921592607693, "grad_norm": 1.1198842525482178, "learning_rate": 0.0005438833401277348, "loss": 3.5323, "step": 53725 }, { "epoch": 3.650631879331431, "grad_norm": 1.485650658607483, "learning_rate": 0.000543840875118902, "loss": 3.4532, "step": 53730 }, { "epoch": 3.6509715994020926, "grad_norm": 1.2510199546813965, "learning_rate": 0.0005437984101100693, "loss": 3.3331, "step": 53735 }, { "epoch": 3.6513113194727547, "grad_norm": 1.2154459953308105, "learning_rate": 0.0005437559451012366, "loss": 3.3613, "step": 53740 }, { "epoch": 3.6516510395434163, "grad_norm": 1.437968134880066, "learning_rate": 0.0005437134800924038, "loss": 3.2405, "step": 53745 }, { "epoch": 3.651990759614078, "grad_norm": 1.0418214797973633, "learning_rate": 0.0005436710150835712, "loss": 3.4218, "step": 53750 }, { "epoch": 3.65233047968474, "grad_norm": 1.0513510704040527, "learning_rate": 0.0005436285500747385, "loss": 3.5982, "step": 53755 }, { "epoch": 3.6526701997554016, "grad_norm": 1.3614336252212524, "learning_rate": 0.0005435860850659057, "loss": 3.3484, "step": 53760 }, { "epoch": 3.6530099198260633, "grad_norm": 1.3196991682052612, "learning_rate": 0.000543543620057073, "loss": 3.4674, "step": 53765 }, { "epoch": 3.6533496398967253, "grad_norm": 1.427837610244751, "learning_rate": 0.0005435011550482403, "loss": 3.2596, "step": 53770 }, { "epoch": 3.653689359967387, "grad_norm": 1.1274924278259277, "learning_rate": 0.0005434586900394075, "loss": 3.5961, "step": 53775 }, { "epoch": 3.6540290800380486, "grad_norm": 1.310281753540039, "learning_rate": 0.0005434162250305748, "loss": 3.5007, "step": 53780 }, { "epoch": 3.6543688001087107, "grad_norm": 1.1285055875778198, "learning_rate": 0.0005433737600217421, "loss": 3.5466, "step": 53785 }, { "epoch": 3.6547085201793723, "grad_norm": 1.3711143732070923, "learning_rate": 0.0005433312950129094, "loss": 3.3786, "step": 53790 }, { "epoch": 3.655048240250034, "grad_norm": 1.1857149600982666, "learning_rate": 0.0005432888300040767, "loss": 3.4984, "step": 53795 }, { "epoch": 3.655387960320696, "grad_norm": 1.1948198080062866, "learning_rate": 0.0005432463649952439, "loss": 3.4495, "step": 53800 }, { "epoch": 3.6557276803913576, "grad_norm": 1.57228422164917, "learning_rate": 0.0005432038999864112, "loss": 3.6289, "step": 53805 }, { "epoch": 3.6560674004620193, "grad_norm": 1.3597612380981445, "learning_rate": 0.0005431614349775785, "loss": 3.3415, "step": 53810 }, { "epoch": 3.656407120532681, "grad_norm": 1.0659183263778687, "learning_rate": 0.0005431189699687457, "loss": 3.5755, "step": 53815 }, { "epoch": 3.656746840603343, "grad_norm": 1.1626521348953247, "learning_rate": 0.000543076504959913, "loss": 3.624, "step": 53820 }, { "epoch": 3.6570865606740046, "grad_norm": 1.453741431236267, "learning_rate": 0.0005430340399510804, "loss": 3.5388, "step": 53825 }, { "epoch": 3.6574262807446662, "grad_norm": 1.2947965860366821, "learning_rate": 0.0005429915749422476, "loss": 3.2154, "step": 53830 }, { "epoch": 3.6577660008153283, "grad_norm": 1.1835510730743408, "learning_rate": 0.0005429491099334148, "loss": 3.0027, "step": 53835 }, { "epoch": 3.65810572088599, "grad_norm": 1.513434648513794, "learning_rate": 0.0005429066449245822, "loss": 3.1966, "step": 53840 }, { "epoch": 3.6584454409566516, "grad_norm": 1.714576005935669, "learning_rate": 0.0005428641799157494, "loss": 3.5008, "step": 53845 }, { "epoch": 3.6587851610273137, "grad_norm": 1.1707746982574463, "learning_rate": 0.0005428217149069166, "loss": 3.4979, "step": 53850 }, { "epoch": 3.6591248810979753, "grad_norm": 1.5700708627700806, "learning_rate": 0.0005427792498980841, "loss": 3.5674, "step": 53855 }, { "epoch": 3.659464601168637, "grad_norm": 1.3721076250076294, "learning_rate": 0.0005427367848892513, "loss": 3.4078, "step": 53860 }, { "epoch": 3.6598043212392986, "grad_norm": 1.1601696014404297, "learning_rate": 0.0005426943198804185, "loss": 3.1239, "step": 53865 }, { "epoch": 3.6601440413099606, "grad_norm": 1.1078572273254395, "learning_rate": 0.0005426518548715859, "loss": 3.4378, "step": 53870 }, { "epoch": 3.6604837613806223, "grad_norm": 1.078911542892456, "learning_rate": 0.0005426093898627531, "loss": 3.4826, "step": 53875 }, { "epoch": 3.660823481451284, "grad_norm": 1.0203286409378052, "learning_rate": 0.0005425669248539203, "loss": 3.4324, "step": 53880 }, { "epoch": 3.661163201521946, "grad_norm": 1.0876822471618652, "learning_rate": 0.0005425244598450876, "loss": 3.1986, "step": 53885 }, { "epoch": 3.6615029215926076, "grad_norm": 1.1929078102111816, "learning_rate": 0.000542481994836255, "loss": 3.4652, "step": 53890 }, { "epoch": 3.6618426416632692, "grad_norm": 1.4408190250396729, "learning_rate": 0.0005424395298274222, "loss": 3.6407, "step": 53895 }, { "epoch": 3.6621823617339313, "grad_norm": 1.2252064943313599, "learning_rate": 0.0005423970648185895, "loss": 3.4539, "step": 53900 }, { "epoch": 3.662522081804593, "grad_norm": 0.9933871030807495, "learning_rate": 0.0005423545998097568, "loss": 3.6897, "step": 53905 }, { "epoch": 3.6628618018752546, "grad_norm": 1.3924975395202637, "learning_rate": 0.000542312134800924, "loss": 3.4537, "step": 53910 }, { "epoch": 3.6632015219459166, "grad_norm": 1.5621347427368164, "learning_rate": 0.0005422696697920913, "loss": 3.3504, "step": 53915 }, { "epoch": 3.6635412420165783, "grad_norm": 1.310222864151001, "learning_rate": 0.0005422272047832585, "loss": 3.6137, "step": 53920 }, { "epoch": 3.66388096208724, "grad_norm": 1.2838695049285889, "learning_rate": 0.0005421847397744259, "loss": 3.5749, "step": 53925 }, { "epoch": 3.664220682157902, "grad_norm": 1.0677992105484009, "learning_rate": 0.0005421422747655932, "loss": 3.709, "step": 53930 }, { "epoch": 3.6645604022285636, "grad_norm": 1.2972221374511719, "learning_rate": 0.0005420998097567604, "loss": 3.5097, "step": 53935 }, { "epoch": 3.6649001222992252, "grad_norm": 1.0984959602355957, "learning_rate": 0.0005420573447479277, "loss": 3.5515, "step": 53940 }, { "epoch": 3.6652398423698873, "grad_norm": 1.254115343093872, "learning_rate": 0.000542014879739095, "loss": 3.3599, "step": 53945 }, { "epoch": 3.665579562440549, "grad_norm": 1.3308664560317993, "learning_rate": 0.0005419724147302622, "loss": 3.3666, "step": 53950 }, { "epoch": 3.6659192825112106, "grad_norm": 1.5403821468353271, "learning_rate": 0.0005419299497214295, "loss": 3.7896, "step": 53955 }, { "epoch": 3.6662590025818727, "grad_norm": 1.1821328401565552, "learning_rate": 0.0005418874847125969, "loss": 3.4562, "step": 53960 }, { "epoch": 3.6665987226525343, "grad_norm": 1.3348901271820068, "learning_rate": 0.0005418450197037641, "loss": 3.6545, "step": 53965 }, { "epoch": 3.666938442723196, "grad_norm": 1.6161596775054932, "learning_rate": 0.0005418025546949313, "loss": 3.5397, "step": 53970 }, { "epoch": 3.667278162793858, "grad_norm": 1.2580459117889404, "learning_rate": 0.0005417600896860987, "loss": 3.8192, "step": 53975 }, { "epoch": 3.6676178828645196, "grad_norm": 1.0414507389068604, "learning_rate": 0.0005417176246772659, "loss": 3.6491, "step": 53980 }, { "epoch": 3.6679576029351812, "grad_norm": 1.3375827074050903, "learning_rate": 0.0005416751596684331, "loss": 3.5822, "step": 53985 }, { "epoch": 3.6682973230058433, "grad_norm": 1.364935278892517, "learning_rate": 0.0005416326946596006, "loss": 3.1978, "step": 53990 }, { "epoch": 3.668637043076505, "grad_norm": 1.3512375354766846, "learning_rate": 0.0005415902296507678, "loss": 3.5051, "step": 53995 }, { "epoch": 3.6689767631471666, "grad_norm": 1.3566426038742065, "learning_rate": 0.000541547764641935, "loss": 3.5271, "step": 54000 }, { "epoch": 3.6693164832178287, "grad_norm": 1.1666189432144165, "learning_rate": 0.0005415052996331024, "loss": 3.4703, "step": 54005 }, { "epoch": 3.6696562032884903, "grad_norm": 1.007063627243042, "learning_rate": 0.0005414628346242696, "loss": 3.4583, "step": 54010 }, { "epoch": 3.669995923359152, "grad_norm": 1.0736080408096313, "learning_rate": 0.0005414203696154368, "loss": 3.5503, "step": 54015 }, { "epoch": 3.670335643429814, "grad_norm": 1.1077550649642944, "learning_rate": 0.0005413779046066041, "loss": 3.4624, "step": 54020 }, { "epoch": 3.6706753635004756, "grad_norm": 1.377451777458191, "learning_rate": 0.0005413354395977715, "loss": 3.5443, "step": 54025 }, { "epoch": 3.6710150835711373, "grad_norm": 1.146864414215088, "learning_rate": 0.0005412929745889388, "loss": 3.4638, "step": 54030 }, { "epoch": 3.6713548036417993, "grad_norm": 1.138848900794983, "learning_rate": 0.000541250509580106, "loss": 3.4601, "step": 54035 }, { "epoch": 3.671694523712461, "grad_norm": 1.18059504032135, "learning_rate": 0.0005412080445712733, "loss": 3.6787, "step": 54040 }, { "epoch": 3.6720342437831226, "grad_norm": 1.2796885967254639, "learning_rate": 0.0005411655795624406, "loss": 3.5781, "step": 54045 }, { "epoch": 3.6723739638537847, "grad_norm": 1.0735929012298584, "learning_rate": 0.0005411231145536078, "loss": 3.4781, "step": 54050 }, { "epoch": 3.6727136839244463, "grad_norm": 1.4094702005386353, "learning_rate": 0.000541080649544775, "loss": 3.5254, "step": 54055 }, { "epoch": 3.673053403995108, "grad_norm": 1.4122343063354492, "learning_rate": 0.0005410381845359425, "loss": 3.5963, "step": 54060 }, { "epoch": 3.67339312406577, "grad_norm": 1.2567882537841797, "learning_rate": 0.0005409957195271097, "loss": 3.4226, "step": 54065 }, { "epoch": 3.6737328441364316, "grad_norm": 1.7272270917892456, "learning_rate": 0.000540953254518277, "loss": 3.5726, "step": 54070 }, { "epoch": 3.6740725642070933, "grad_norm": 1.1524471044540405, "learning_rate": 0.0005409107895094443, "loss": 3.7559, "step": 54075 }, { "epoch": 3.6744122842777553, "grad_norm": 1.1611109972000122, "learning_rate": 0.0005408683245006115, "loss": 3.6187, "step": 54080 }, { "epoch": 3.674752004348417, "grad_norm": 1.3958956003189087, "learning_rate": 0.0005408258594917787, "loss": 3.6614, "step": 54085 }, { "epoch": 3.6750917244190786, "grad_norm": 1.093592882156372, "learning_rate": 0.0005407833944829461, "loss": 3.4856, "step": 54090 }, { "epoch": 3.6754314444897407, "grad_norm": 1.257249116897583, "learning_rate": 0.0005407409294741134, "loss": 3.1316, "step": 54095 }, { "epoch": 3.6757711645604023, "grad_norm": 1.2948640584945679, "learning_rate": 0.0005406984644652806, "loss": 3.5849, "step": 54100 }, { "epoch": 3.676110884631064, "grad_norm": 1.3680967092514038, "learning_rate": 0.000540655999456448, "loss": 3.3692, "step": 54105 }, { "epoch": 3.676450604701726, "grad_norm": 1.5204440355300903, "learning_rate": 0.0005406135344476152, "loss": 3.4933, "step": 54110 }, { "epoch": 3.6767903247723877, "grad_norm": 1.271954894065857, "learning_rate": 0.0005405710694387824, "loss": 3.4606, "step": 54115 }, { "epoch": 3.6771300448430493, "grad_norm": 1.3855265378952026, "learning_rate": 0.0005405286044299497, "loss": 3.4425, "step": 54120 }, { "epoch": 3.6774697649137114, "grad_norm": 1.3298593759536743, "learning_rate": 0.000540486139421117, "loss": 3.5165, "step": 54125 }, { "epoch": 3.677809484984373, "grad_norm": 1.086240291595459, "learning_rate": 0.0005404436744122843, "loss": 3.3002, "step": 54130 }, { "epoch": 3.6781492050550346, "grad_norm": 1.3034405708312988, "learning_rate": 0.0005404012094034516, "loss": 3.5971, "step": 54135 }, { "epoch": 3.6784889251256967, "grad_norm": 1.5515280961990356, "learning_rate": 0.0005403587443946189, "loss": 3.6079, "step": 54140 }, { "epoch": 3.6788286451963583, "grad_norm": 1.310863733291626, "learning_rate": 0.0005403162793857861, "loss": 3.5563, "step": 54145 }, { "epoch": 3.67916836526702, "grad_norm": 1.2270317077636719, "learning_rate": 0.0005402738143769534, "loss": 3.4928, "step": 54150 }, { "epoch": 3.6795080853376816, "grad_norm": 1.28194260597229, "learning_rate": 0.0005402313493681207, "loss": 3.5054, "step": 54155 }, { "epoch": 3.6798478054083437, "grad_norm": 1.7023768424987793, "learning_rate": 0.0005401888843592879, "loss": 3.4878, "step": 54160 }, { "epoch": 3.6801875254790053, "grad_norm": 1.8205726146697998, "learning_rate": 0.0005401464193504553, "loss": 3.5877, "step": 54165 }, { "epoch": 3.680527245549667, "grad_norm": 1.2522773742675781, "learning_rate": 0.0005401039543416225, "loss": 3.0458, "step": 54170 }, { "epoch": 3.680866965620329, "grad_norm": 1.2217731475830078, "learning_rate": 0.0005400614893327898, "loss": 3.7847, "step": 54175 }, { "epoch": 3.6812066856909906, "grad_norm": 1.0976401567459106, "learning_rate": 0.0005400190243239571, "loss": 3.2991, "step": 54180 }, { "epoch": 3.6815464057616523, "grad_norm": 1.1885021924972534, "learning_rate": 0.0005399765593151243, "loss": 3.401, "step": 54185 }, { "epoch": 3.6818861258323143, "grad_norm": 1.0978665351867676, "learning_rate": 0.0005399340943062916, "loss": 3.4838, "step": 54190 }, { "epoch": 3.682225845902976, "grad_norm": 1.1484594345092773, "learning_rate": 0.0005398916292974589, "loss": 3.3273, "step": 54195 }, { "epoch": 3.6825655659736376, "grad_norm": 1.393709421157837, "learning_rate": 0.0005398491642886262, "loss": 3.6089, "step": 54200 }, { "epoch": 3.6829052860442992, "grad_norm": 1.2930514812469482, "learning_rate": 0.0005398066992797935, "loss": 3.4351, "step": 54205 }, { "epoch": 3.6832450061149613, "grad_norm": 1.4715572595596313, "learning_rate": 0.0005397642342709608, "loss": 3.581, "step": 54210 }, { "epoch": 3.683584726185623, "grad_norm": 1.2556102275848389, "learning_rate": 0.000539721769262128, "loss": 3.4959, "step": 54215 }, { "epoch": 3.6839244462562846, "grad_norm": 1.1449915170669556, "learning_rate": 0.0005396793042532952, "loss": 3.4049, "step": 54220 }, { "epoch": 3.6842641663269466, "grad_norm": 1.4428759813308716, "learning_rate": 0.0005396368392444626, "loss": 3.3817, "step": 54225 }, { "epoch": 3.6846038863976083, "grad_norm": 1.2762138843536377, "learning_rate": 0.0005395943742356298, "loss": 3.5562, "step": 54230 }, { "epoch": 3.68494360646827, "grad_norm": 1.12649405002594, "learning_rate": 0.0005395519092267971, "loss": 3.5929, "step": 54235 }, { "epoch": 3.685283326538932, "grad_norm": 1.3732354640960693, "learning_rate": 0.0005395094442179645, "loss": 3.2113, "step": 54240 }, { "epoch": 3.6856230466095936, "grad_norm": 1.4543492794036865, "learning_rate": 0.0005394669792091317, "loss": 3.5034, "step": 54245 }, { "epoch": 3.6859627666802552, "grad_norm": 1.387133002281189, "learning_rate": 0.0005394245142002989, "loss": 3.6148, "step": 54250 }, { "epoch": 3.6863024867509173, "grad_norm": 1.4712038040161133, "learning_rate": 0.0005393820491914663, "loss": 3.6419, "step": 54255 }, { "epoch": 3.686642206821579, "grad_norm": 1.0724105834960938, "learning_rate": 0.0005393395841826335, "loss": 3.2817, "step": 54260 }, { "epoch": 3.6869819268922406, "grad_norm": 0.923711359500885, "learning_rate": 0.0005392971191738007, "loss": 3.6593, "step": 54265 }, { "epoch": 3.6873216469629027, "grad_norm": 1.1390917301177979, "learning_rate": 0.0005392546541649682, "loss": 3.4843, "step": 54270 }, { "epoch": 3.6876613670335643, "grad_norm": 1.1778994798660278, "learning_rate": 0.0005392121891561354, "loss": 3.3117, "step": 54275 }, { "epoch": 3.688001087104226, "grad_norm": 0.9545568227767944, "learning_rate": 0.0005391697241473026, "loss": 3.6207, "step": 54280 }, { "epoch": 3.688340807174888, "grad_norm": 1.2005231380462646, "learning_rate": 0.0005391272591384699, "loss": 3.5859, "step": 54285 }, { "epoch": 3.6886805272455496, "grad_norm": 1.0978736877441406, "learning_rate": 0.0005390847941296372, "loss": 3.5532, "step": 54290 }, { "epoch": 3.6890202473162113, "grad_norm": 0.9903157949447632, "learning_rate": 0.0005390423291208044, "loss": 3.3843, "step": 54295 }, { "epoch": 3.6893599673868733, "grad_norm": 1.3674463033676147, "learning_rate": 0.0005389998641119717, "loss": 3.4388, "step": 54300 }, { "epoch": 3.689699687457535, "grad_norm": 1.4195079803466797, "learning_rate": 0.0005389573991031391, "loss": 3.5294, "step": 54305 }, { "epoch": 3.6900394075281966, "grad_norm": 1.2474037408828735, "learning_rate": 0.0005389149340943063, "loss": 3.1351, "step": 54310 }, { "epoch": 3.6903791275988587, "grad_norm": 1.1918094158172607, "learning_rate": 0.0005388724690854736, "loss": 3.5608, "step": 54315 }, { "epoch": 3.6907188476695203, "grad_norm": 1.1767348051071167, "learning_rate": 0.0005388300040766408, "loss": 3.6959, "step": 54320 }, { "epoch": 3.691058567740182, "grad_norm": 1.3385088443756104, "learning_rate": 0.0005387875390678081, "loss": 3.3753, "step": 54325 }, { "epoch": 3.691398287810844, "grad_norm": 1.0128520727157593, "learning_rate": 0.0005387450740589754, "loss": 3.3084, "step": 54330 }, { "epoch": 3.6917380078815056, "grad_norm": 1.246504306793213, "learning_rate": 0.0005387026090501426, "loss": 3.46, "step": 54335 }, { "epoch": 3.6920777279521673, "grad_norm": 1.1823999881744385, "learning_rate": 0.00053866014404131, "loss": 3.2939, "step": 54340 }, { "epoch": 3.6924174480228293, "grad_norm": 1.1027482748031616, "learning_rate": 0.0005386176790324773, "loss": 3.3718, "step": 54345 }, { "epoch": 3.692757168093491, "grad_norm": 1.0172981023788452, "learning_rate": 0.0005385752140236445, "loss": 3.6827, "step": 54350 }, { "epoch": 3.6930968881641526, "grad_norm": 1.1517654657363892, "learning_rate": 0.0005385327490148117, "loss": 3.5599, "step": 54355 }, { "epoch": 3.6934366082348147, "grad_norm": 1.0548579692840576, "learning_rate": 0.0005384902840059791, "loss": 3.5002, "step": 54360 }, { "epoch": 3.6937763283054763, "grad_norm": 0.9993215203285217, "learning_rate": 0.0005384478189971463, "loss": 3.4167, "step": 54365 }, { "epoch": 3.694116048376138, "grad_norm": 1.1484410762786865, "learning_rate": 0.0005384053539883136, "loss": 3.4102, "step": 54370 }, { "epoch": 3.6944557684468, "grad_norm": 1.3982460498809814, "learning_rate": 0.000538362888979481, "loss": 3.5288, "step": 54375 }, { "epoch": 3.6947954885174616, "grad_norm": 1.3522080183029175, "learning_rate": 0.0005383204239706482, "loss": 3.0759, "step": 54380 }, { "epoch": 3.6951352085881233, "grad_norm": 1.5147595405578613, "learning_rate": 0.0005382779589618155, "loss": 3.3148, "step": 54385 }, { "epoch": 3.6954749286587854, "grad_norm": 1.3683688640594482, "learning_rate": 0.0005382354939529828, "loss": 3.4122, "step": 54390 }, { "epoch": 3.695814648729447, "grad_norm": 1.2272206544876099, "learning_rate": 0.00053819302894415, "loss": 3.2663, "step": 54395 }, { "epoch": 3.6961543688001086, "grad_norm": 1.3679858446121216, "learning_rate": 0.0005381505639353173, "loss": 3.3459, "step": 54400 }, { "epoch": 3.6964940888707707, "grad_norm": 1.378643274307251, "learning_rate": 0.0005381080989264845, "loss": 3.6321, "step": 54405 }, { "epoch": 3.6968338089414323, "grad_norm": 1.1966363191604614, "learning_rate": 0.0005380656339176519, "loss": 3.4756, "step": 54410 }, { "epoch": 3.697173529012094, "grad_norm": 1.342343807220459, "learning_rate": 0.0005380231689088192, "loss": 3.4322, "step": 54415 }, { "epoch": 3.697513249082756, "grad_norm": 1.7127143144607544, "learning_rate": 0.0005379807038999864, "loss": 3.5469, "step": 54420 }, { "epoch": 3.6978529691534177, "grad_norm": 1.17060387134552, "learning_rate": 0.0005379382388911537, "loss": 3.3942, "step": 54425 }, { "epoch": 3.6981926892240793, "grad_norm": 1.156783103942871, "learning_rate": 0.000537895773882321, "loss": 3.3173, "step": 54430 }, { "epoch": 3.6985324092947414, "grad_norm": 1.4242461919784546, "learning_rate": 0.0005378533088734882, "loss": 3.4662, "step": 54435 }, { "epoch": 3.698872129365403, "grad_norm": 1.3024734258651733, "learning_rate": 0.0005378108438646555, "loss": 3.5012, "step": 54440 }, { "epoch": 3.6992118494360646, "grad_norm": 1.333575963973999, "learning_rate": 0.0005377683788558229, "loss": 3.4906, "step": 54445 }, { "epoch": 3.6995515695067267, "grad_norm": 1.2657331228256226, "learning_rate": 0.0005377259138469901, "loss": 3.3505, "step": 54450 }, { "epoch": 3.6998912895773883, "grad_norm": 1.4350625276565552, "learning_rate": 0.0005376834488381574, "loss": 3.6635, "step": 54455 }, { "epoch": 3.70023100964805, "grad_norm": 1.2254196405410767, "learning_rate": 0.0005376409838293247, "loss": 3.4282, "step": 54460 }, { "epoch": 3.700570729718712, "grad_norm": 1.2336503267288208, "learning_rate": 0.0005375985188204919, "loss": 3.3752, "step": 54465 }, { "epoch": 3.7009104497893737, "grad_norm": 1.2176079750061035, "learning_rate": 0.0005375560538116591, "loss": 3.448, "step": 54470 }, { "epoch": 3.7012501698600353, "grad_norm": 1.087140440940857, "learning_rate": 0.0005375135888028266, "loss": 3.5694, "step": 54475 }, { "epoch": 3.7015898899306974, "grad_norm": 1.3487670421600342, "learning_rate": 0.0005374711237939938, "loss": 3.5489, "step": 54480 }, { "epoch": 3.701929610001359, "grad_norm": 1.2136449813842773, "learning_rate": 0.000537428658785161, "loss": 3.4611, "step": 54485 }, { "epoch": 3.7022693300720206, "grad_norm": 1.1840139627456665, "learning_rate": 0.0005373861937763284, "loss": 3.5295, "step": 54490 }, { "epoch": 3.7026090501426823, "grad_norm": 1.4998348951339722, "learning_rate": 0.0005373437287674956, "loss": 3.5226, "step": 54495 }, { "epoch": 3.7029487702133443, "grad_norm": 1.1479310989379883, "learning_rate": 0.0005373012637586628, "loss": 3.4528, "step": 54500 }, { "epoch": 3.703288490284006, "grad_norm": 1.3275994062423706, "learning_rate": 0.0005372587987498302, "loss": 3.4095, "step": 54505 }, { "epoch": 3.7036282103546676, "grad_norm": 1.705754041671753, "learning_rate": 0.0005372163337409975, "loss": 3.7187, "step": 54510 }, { "epoch": 3.7039679304253297, "grad_norm": 1.1126376390457153, "learning_rate": 0.0005371738687321647, "loss": 3.1744, "step": 54515 }, { "epoch": 3.7043076504959913, "grad_norm": 1.5067511796951294, "learning_rate": 0.000537131403723332, "loss": 3.3659, "step": 54520 }, { "epoch": 3.704647370566653, "grad_norm": 1.1556161642074585, "learning_rate": 0.0005370889387144993, "loss": 3.6289, "step": 54525 }, { "epoch": 3.704987090637315, "grad_norm": 1.1481475830078125, "learning_rate": 0.0005370464737056665, "loss": 3.5127, "step": 54530 }, { "epoch": 3.7053268107079766, "grad_norm": 1.1801061630249023, "learning_rate": 0.0005370040086968338, "loss": 3.4426, "step": 54535 }, { "epoch": 3.7056665307786383, "grad_norm": 1.3177621364593506, "learning_rate": 0.0005369615436880011, "loss": 3.5281, "step": 54540 }, { "epoch": 3.7060062508493, "grad_norm": 1.06463623046875, "learning_rate": 0.0005369190786791684, "loss": 3.5673, "step": 54545 }, { "epoch": 3.706345970919962, "grad_norm": 1.3543860912322998, "learning_rate": 0.0005368766136703357, "loss": 3.5557, "step": 54550 }, { "epoch": 3.7066856909906236, "grad_norm": 1.187158226966858, "learning_rate": 0.000536834148661503, "loss": 3.3385, "step": 54555 }, { "epoch": 3.7070254110612852, "grad_norm": 1.624252438545227, "learning_rate": 0.0005367916836526702, "loss": 3.4967, "step": 54560 }, { "epoch": 3.7073651311319473, "grad_norm": 1.5309867858886719, "learning_rate": 0.0005367492186438375, "loss": 3.7095, "step": 54565 }, { "epoch": 3.707704851202609, "grad_norm": 1.5417710542678833, "learning_rate": 0.0005367067536350047, "loss": 3.3363, "step": 54570 }, { "epoch": 3.7080445712732706, "grad_norm": 1.2118334770202637, "learning_rate": 0.000536664288626172, "loss": 3.4372, "step": 54575 }, { "epoch": 3.7083842913439327, "grad_norm": 1.2424678802490234, "learning_rate": 0.0005366218236173394, "loss": 3.5051, "step": 54580 }, { "epoch": 3.7087240114145943, "grad_norm": 1.5260676145553589, "learning_rate": 0.0005365793586085066, "loss": 3.2626, "step": 54585 }, { "epoch": 3.709063731485256, "grad_norm": 1.5232157707214355, "learning_rate": 0.0005365368935996739, "loss": 3.7334, "step": 54590 }, { "epoch": 3.709403451555918, "grad_norm": 1.1684918403625488, "learning_rate": 0.0005364944285908412, "loss": 3.4482, "step": 54595 }, { "epoch": 3.7097431716265796, "grad_norm": 1.4541785717010498, "learning_rate": 0.0005364604565837749, "loss": 3.474, "step": 54600 }, { "epoch": 3.7100828916972413, "grad_norm": 1.2879579067230225, "learning_rate": 0.0005364179915749424, "loss": 3.5043, "step": 54605 }, { "epoch": 3.7104226117679033, "grad_norm": 1.6349272727966309, "learning_rate": 0.0005363755265661096, "loss": 3.2744, "step": 54610 }, { "epoch": 3.710762331838565, "grad_norm": 1.1960492134094238, "learning_rate": 0.0005363330615572768, "loss": 3.1823, "step": 54615 }, { "epoch": 3.7111020519092266, "grad_norm": 1.071240782737732, "learning_rate": 0.0005362905965484441, "loss": 3.226, "step": 54620 }, { "epoch": 3.7114417719798887, "grad_norm": 1.2335600852966309, "learning_rate": 0.0005362481315396114, "loss": 3.3593, "step": 54625 }, { "epoch": 3.7117814920505503, "grad_norm": 1.6478545665740967, "learning_rate": 0.0005362056665307786, "loss": 3.7714, "step": 54630 }, { "epoch": 3.712121212121212, "grad_norm": 1.111775279045105, "learning_rate": 0.0005361632015219459, "loss": 3.5736, "step": 54635 }, { "epoch": 3.712460932191874, "grad_norm": 1.375191330909729, "learning_rate": 0.0005361207365131133, "loss": 3.4523, "step": 54640 }, { "epoch": 3.7128006522625356, "grad_norm": 1.5111825466156006, "learning_rate": 0.0005360782715042805, "loss": 3.506, "step": 54645 }, { "epoch": 3.7131403723331973, "grad_norm": 1.2577974796295166, "learning_rate": 0.0005360358064954478, "loss": 3.3166, "step": 54650 }, { "epoch": 3.7134800924038593, "grad_norm": 1.7461944818496704, "learning_rate": 0.000535993341486615, "loss": 3.4781, "step": 54655 }, { "epoch": 3.713819812474521, "grad_norm": 1.1604820489883423, "learning_rate": 0.0005359508764777823, "loss": 3.1378, "step": 54660 }, { "epoch": 3.7141595325451826, "grad_norm": 1.280063509941101, "learning_rate": 0.0005359084114689496, "loss": 3.3028, "step": 54665 }, { "epoch": 3.7144992526158447, "grad_norm": 1.319968581199646, "learning_rate": 0.0005358659464601168, "loss": 3.575, "step": 54670 }, { "epoch": 3.7148389726865063, "grad_norm": 1.0377064943313599, "learning_rate": 0.0005358234814512842, "loss": 3.4288, "step": 54675 }, { "epoch": 3.715178692757168, "grad_norm": 1.9179391860961914, "learning_rate": 0.0005357810164424515, "loss": 3.2716, "step": 54680 }, { "epoch": 3.71551841282783, "grad_norm": 1.4596714973449707, "learning_rate": 0.0005357385514336187, "loss": 3.5006, "step": 54685 }, { "epoch": 3.7158581328984917, "grad_norm": 1.4148272275924683, "learning_rate": 0.000535696086424786, "loss": 3.6515, "step": 54690 }, { "epoch": 3.7161978529691533, "grad_norm": 1.2594294548034668, "learning_rate": 0.0005356536214159533, "loss": 3.5758, "step": 54695 }, { "epoch": 3.7165375730398154, "grad_norm": 1.2633477449417114, "learning_rate": 0.0005356111564071205, "loss": 3.5687, "step": 54700 }, { "epoch": 3.716877293110477, "grad_norm": 1.6616379022598267, "learning_rate": 0.0005355686913982877, "loss": 3.5293, "step": 54705 }, { "epoch": 3.7172170131811386, "grad_norm": 1.067908763885498, "learning_rate": 0.0005355262263894552, "loss": 3.4839, "step": 54710 }, { "epoch": 3.7175567332518007, "grad_norm": 1.020021677017212, "learning_rate": 0.0005354837613806224, "loss": 3.2703, "step": 54715 }, { "epoch": 3.7178964533224623, "grad_norm": 1.1231125593185425, "learning_rate": 0.0005354412963717896, "loss": 3.4347, "step": 54720 }, { "epoch": 3.718236173393124, "grad_norm": 1.5331255197525024, "learning_rate": 0.000535398831362957, "loss": 3.4511, "step": 54725 }, { "epoch": 3.718575893463786, "grad_norm": 1.1850885152816772, "learning_rate": 0.0005353563663541242, "loss": 3.4828, "step": 54730 }, { "epoch": 3.7189156135344477, "grad_norm": 1.1426830291748047, "learning_rate": 0.0005353139013452914, "loss": 3.21, "step": 54735 }, { "epoch": 3.7192553336051093, "grad_norm": 1.4936163425445557, "learning_rate": 0.0005352714363364589, "loss": 3.5554, "step": 54740 }, { "epoch": 3.7195950536757714, "grad_norm": 1.3646148443222046, "learning_rate": 0.0005352289713276261, "loss": 3.4509, "step": 54745 }, { "epoch": 3.719934773746433, "grad_norm": 1.2483000755310059, "learning_rate": 0.0005351865063187933, "loss": 3.5109, "step": 54750 }, { "epoch": 3.7202744938170946, "grad_norm": 1.0630546808242798, "learning_rate": 0.0005351440413099606, "loss": 3.4666, "step": 54755 }, { "epoch": 3.7206142138877567, "grad_norm": 0.998598039150238, "learning_rate": 0.0005351015763011279, "loss": 3.5599, "step": 54760 }, { "epoch": 3.7209539339584183, "grad_norm": 0.9919322729110718, "learning_rate": 0.0005350591112922951, "loss": 3.3138, "step": 54765 }, { "epoch": 3.72129365402908, "grad_norm": 1.4964522123336792, "learning_rate": 0.0005350166462834624, "loss": 3.4667, "step": 54770 }, { "epoch": 3.721633374099742, "grad_norm": 1.3883302211761475, "learning_rate": 0.0005349741812746298, "loss": 3.2891, "step": 54775 }, { "epoch": 3.7219730941704037, "grad_norm": 1.437581181526184, "learning_rate": 0.000534931716265797, "loss": 3.6386, "step": 54780 }, { "epoch": 3.7223128142410653, "grad_norm": 1.063865065574646, "learning_rate": 0.0005348892512569643, "loss": 3.2388, "step": 54785 }, { "epoch": 3.7226525343117274, "grad_norm": 1.3524225950241089, "learning_rate": 0.0005348467862481316, "loss": 3.6157, "step": 54790 }, { "epoch": 3.722992254382389, "grad_norm": 2.3713130950927734, "learning_rate": 0.0005348043212392988, "loss": 3.4859, "step": 54795 }, { "epoch": 3.7233319744530506, "grad_norm": 1.221413493156433, "learning_rate": 0.0005347618562304661, "loss": 3.234, "step": 54800 }, { "epoch": 3.7236716945237127, "grad_norm": 1.1725496053695679, "learning_rate": 0.0005347193912216333, "loss": 3.4177, "step": 54805 }, { "epoch": 3.7240114145943743, "grad_norm": 1.3933449983596802, "learning_rate": 0.0005346769262128007, "loss": 3.3601, "step": 54810 }, { "epoch": 3.724351134665036, "grad_norm": 1.1933176517486572, "learning_rate": 0.000534634461203968, "loss": 3.2712, "step": 54815 }, { "epoch": 3.724690854735698, "grad_norm": 1.2507165670394897, "learning_rate": 0.0005345919961951352, "loss": 3.7377, "step": 54820 }, { "epoch": 3.7250305748063597, "grad_norm": 1.5638378858566284, "learning_rate": 0.0005345495311863025, "loss": 3.3124, "step": 54825 }, { "epoch": 3.7253702948770213, "grad_norm": 1.62712562084198, "learning_rate": 0.0005345070661774698, "loss": 3.2888, "step": 54830 }, { "epoch": 3.725710014947683, "grad_norm": 1.2716869115829468, "learning_rate": 0.000534464601168637, "loss": 3.3676, "step": 54835 }, { "epoch": 3.726049735018345, "grad_norm": 1.3754819631576538, "learning_rate": 0.0005344221361598042, "loss": 3.4187, "step": 54840 }, { "epoch": 3.7263894550890067, "grad_norm": 1.1075518131256104, "learning_rate": 0.0005343796711509717, "loss": 3.5043, "step": 54845 }, { "epoch": 3.7267291751596683, "grad_norm": 1.1068271398544312, "learning_rate": 0.0005343372061421389, "loss": 3.1252, "step": 54850 }, { "epoch": 3.7270688952303304, "grad_norm": 1.1678311824798584, "learning_rate": 0.0005342947411333061, "loss": 3.4809, "step": 54855 }, { "epoch": 3.727408615300992, "grad_norm": 1.2989866733551025, "learning_rate": 0.0005342522761244735, "loss": 3.5826, "step": 54860 }, { "epoch": 3.7277483353716536, "grad_norm": 1.4133358001708984, "learning_rate": 0.0005342098111156407, "loss": 3.714, "step": 54865 }, { "epoch": 3.7280880554423157, "grad_norm": 1.384534239768982, "learning_rate": 0.0005341673461068079, "loss": 3.4811, "step": 54870 }, { "epoch": 3.7284277755129773, "grad_norm": 1.3009008169174194, "learning_rate": 0.0005341248810979753, "loss": 3.3223, "step": 54875 }, { "epoch": 3.728767495583639, "grad_norm": 1.127121090888977, "learning_rate": 0.0005340824160891426, "loss": 3.3902, "step": 54880 }, { "epoch": 3.7291072156543006, "grad_norm": 1.190335988998413, "learning_rate": 0.0005340399510803098, "loss": 3.4631, "step": 54885 }, { "epoch": 3.7294469357249627, "grad_norm": 1.4336817264556885, "learning_rate": 0.0005339974860714772, "loss": 3.5064, "step": 54890 }, { "epoch": 3.7297866557956243, "grad_norm": 1.7769668102264404, "learning_rate": 0.0005339550210626444, "loss": 3.5788, "step": 54895 }, { "epoch": 3.730126375866286, "grad_norm": 1.6770097017288208, "learning_rate": 0.0005339125560538116, "loss": 3.4279, "step": 54900 }, { "epoch": 3.730466095936948, "grad_norm": 1.2038809061050415, "learning_rate": 0.0005338700910449789, "loss": 3.3211, "step": 54905 }, { "epoch": 3.7308058160076096, "grad_norm": 1.2994518280029297, "learning_rate": 0.0005338276260361462, "loss": 3.5293, "step": 54910 }, { "epoch": 3.7311455360782713, "grad_norm": 1.5360959768295288, "learning_rate": 0.0005337851610273136, "loss": 3.3644, "step": 54915 }, { "epoch": 3.7314852561489333, "grad_norm": 1.0771785974502563, "learning_rate": 0.0005337426960184808, "loss": 3.3137, "step": 54920 }, { "epoch": 3.731824976219595, "grad_norm": 1.17287278175354, "learning_rate": 0.0005337002310096481, "loss": 3.474, "step": 54925 }, { "epoch": 3.7321646962902566, "grad_norm": 1.4148900508880615, "learning_rate": 0.0005336577660008154, "loss": 3.3885, "step": 54930 }, { "epoch": 3.7325044163609187, "grad_norm": 1.1712702512741089, "learning_rate": 0.0005336153009919826, "loss": 3.592, "step": 54935 }, { "epoch": 3.7328441364315803, "grad_norm": 1.214959979057312, "learning_rate": 0.0005335728359831498, "loss": 3.428, "step": 54940 }, { "epoch": 3.733183856502242, "grad_norm": 1.3162037134170532, "learning_rate": 0.0005335303709743172, "loss": 3.2601, "step": 54945 }, { "epoch": 3.733523576572904, "grad_norm": 1.5086467266082764, "learning_rate": 0.0005334879059654845, "loss": 3.3214, "step": 54950 }, { "epoch": 3.7338632966435656, "grad_norm": 1.295113205909729, "learning_rate": 0.0005334454409566517, "loss": 3.5881, "step": 54955 }, { "epoch": 3.7342030167142273, "grad_norm": 1.1685465574264526, "learning_rate": 0.0005334029759478191, "loss": 3.5061, "step": 54960 }, { "epoch": 3.7345427367848893, "grad_norm": 1.405963659286499, "learning_rate": 0.0005333605109389863, "loss": 3.4186, "step": 54965 }, { "epoch": 3.734882456855551, "grad_norm": 1.1676373481750488, "learning_rate": 0.0005333180459301535, "loss": 3.4282, "step": 54970 }, { "epoch": 3.7352221769262126, "grad_norm": 1.4812273979187012, "learning_rate": 0.0005332755809213209, "loss": 3.6026, "step": 54975 }, { "epoch": 3.7355618969968747, "grad_norm": 1.3256522417068481, "learning_rate": 0.0005332331159124881, "loss": 3.5288, "step": 54980 }, { "epoch": 3.7359016170675363, "grad_norm": 1.1643853187561035, "learning_rate": 0.0005331906509036554, "loss": 3.5898, "step": 54985 }, { "epoch": 3.736241337138198, "grad_norm": 1.7482634782791138, "learning_rate": 0.0005331481858948228, "loss": 3.4511, "step": 54990 }, { "epoch": 3.73658105720886, "grad_norm": 1.3092097043991089, "learning_rate": 0.00053310572088599, "loss": 3.6361, "step": 54995 }, { "epoch": 3.7369207772795217, "grad_norm": 1.4419957399368286, "learning_rate": 0.0005330632558771572, "loss": 3.3753, "step": 55000 }, { "epoch": 3.7372604973501833, "grad_norm": 1.152962327003479, "learning_rate": 0.0005330207908683245, "loss": 3.4401, "step": 55005 }, { "epoch": 3.7376002174208454, "grad_norm": 1.4687206745147705, "learning_rate": 0.0005329783258594918, "loss": 3.4265, "step": 55010 }, { "epoch": 3.737939937491507, "grad_norm": 1.1773223876953125, "learning_rate": 0.000532935860850659, "loss": 3.6826, "step": 55015 }, { "epoch": 3.7382796575621686, "grad_norm": 1.2364106178283691, "learning_rate": 0.0005328933958418264, "loss": 3.6757, "step": 55020 }, { "epoch": 3.7386193776328307, "grad_norm": 1.4802449941635132, "learning_rate": 0.0005328509308329937, "loss": 3.3183, "step": 55025 }, { "epoch": 3.7389590977034923, "grad_norm": 1.6294159889221191, "learning_rate": 0.0005328084658241609, "loss": 3.5212, "step": 55030 }, { "epoch": 3.739298817774154, "grad_norm": 1.3885862827301025, "learning_rate": 0.0005327660008153282, "loss": 3.3334, "step": 55035 }, { "epoch": 3.739638537844816, "grad_norm": 1.844020962715149, "learning_rate": 0.0005327235358064954, "loss": 3.4928, "step": 55040 }, { "epoch": 3.7399782579154777, "grad_norm": 1.251900315284729, "learning_rate": 0.0005326810707976627, "loss": 3.0095, "step": 55045 }, { "epoch": 3.7403179779861393, "grad_norm": 1.8130898475646973, "learning_rate": 0.00053263860578883, "loss": 3.2995, "step": 55050 }, { "epoch": 3.7406576980568014, "grad_norm": 2.208378314971924, "learning_rate": 0.0005325961407799973, "loss": 3.5324, "step": 55055 }, { "epoch": 3.740997418127463, "grad_norm": 1.4499608278274536, "learning_rate": 0.0005325536757711646, "loss": 3.5411, "step": 55060 }, { "epoch": 3.7413371381981246, "grad_norm": 1.255533218383789, "learning_rate": 0.0005325112107623319, "loss": 3.5445, "step": 55065 }, { "epoch": 3.7416768582687867, "grad_norm": 1.076733112335205, "learning_rate": 0.0005324687457534991, "loss": 3.6119, "step": 55070 }, { "epoch": 3.7420165783394483, "grad_norm": 1.0488245487213135, "learning_rate": 0.0005324262807446664, "loss": 3.8281, "step": 55075 }, { "epoch": 3.74235629841011, "grad_norm": 1.1166021823883057, "learning_rate": 0.0005323838157358337, "loss": 3.3297, "step": 55080 }, { "epoch": 3.742696018480772, "grad_norm": 1.294730305671692, "learning_rate": 0.0005323413507270009, "loss": 3.329, "step": 55085 }, { "epoch": 3.7430357385514337, "grad_norm": 1.7036558389663696, "learning_rate": 0.0005322988857181682, "loss": 3.3987, "step": 55090 }, { "epoch": 3.7433754586220953, "grad_norm": 1.1911042928695679, "learning_rate": 0.0005322564207093356, "loss": 3.3796, "step": 55095 }, { "epoch": 3.7437151786927574, "grad_norm": 1.6219934225082397, "learning_rate": 0.0005322139557005028, "loss": 3.4065, "step": 55100 }, { "epoch": 3.744054898763419, "grad_norm": 1.2735512256622314, "learning_rate": 0.00053217149069167, "loss": 3.1955, "step": 55105 }, { "epoch": 3.7443946188340806, "grad_norm": 1.33342707157135, "learning_rate": 0.0005321290256828374, "loss": 3.4055, "step": 55110 }, { "epoch": 3.7447343389047427, "grad_norm": 1.4650192260742188, "learning_rate": 0.0005320865606740046, "loss": 3.6344, "step": 55115 }, { "epoch": 3.7450740589754044, "grad_norm": 1.0848859548568726, "learning_rate": 0.0005320440956651718, "loss": 3.1494, "step": 55120 }, { "epoch": 3.745413779046066, "grad_norm": 1.4242299795150757, "learning_rate": 0.0005320016306563393, "loss": 3.0842, "step": 55125 }, { "epoch": 3.745753499116728, "grad_norm": 1.1990315914154053, "learning_rate": 0.0005319591656475065, "loss": 3.4623, "step": 55130 }, { "epoch": 3.7460932191873897, "grad_norm": 1.184035062789917, "learning_rate": 0.0005319167006386737, "loss": 3.4677, "step": 55135 }, { "epoch": 3.7464329392580513, "grad_norm": 1.012764573097229, "learning_rate": 0.000531874235629841, "loss": 3.3167, "step": 55140 }, { "epoch": 3.7467726593287134, "grad_norm": 1.3330531120300293, "learning_rate": 0.0005318317706210083, "loss": 3.2484, "step": 55145 }, { "epoch": 3.747112379399375, "grad_norm": 1.0868998765945435, "learning_rate": 0.0005317893056121755, "loss": 3.4229, "step": 55150 }, { "epoch": 3.7474520994700367, "grad_norm": 1.4647338390350342, "learning_rate": 0.0005317468406033428, "loss": 3.4899, "step": 55155 }, { "epoch": 3.7477918195406987, "grad_norm": 1.1573266983032227, "learning_rate": 0.0005317043755945102, "loss": 3.3817, "step": 55160 }, { "epoch": 3.7481315396113604, "grad_norm": 1.359043002128601, "learning_rate": 0.0005316619105856774, "loss": 3.4814, "step": 55165 }, { "epoch": 3.748471259682022, "grad_norm": 1.2214754819869995, "learning_rate": 0.0005316194455768447, "loss": 3.2701, "step": 55170 }, { "epoch": 3.7488109797526836, "grad_norm": 1.4558831453323364, "learning_rate": 0.000531576980568012, "loss": 3.4198, "step": 55175 }, { "epoch": 3.7491506998233457, "grad_norm": 1.4969494342803955, "learning_rate": 0.0005315345155591792, "loss": 3.5173, "step": 55180 }, { "epoch": 3.7494904198940073, "grad_norm": 1.2277247905731201, "learning_rate": 0.0005314920505503465, "loss": 3.2961, "step": 55185 }, { "epoch": 3.749830139964669, "grad_norm": 1.1906319856643677, "learning_rate": 0.0005314495855415137, "loss": 3.6485, "step": 55190 }, { "epoch": 3.750169860035331, "grad_norm": 1.2924582958221436, "learning_rate": 0.0005314071205326811, "loss": 3.1302, "step": 55195 }, { "epoch": 3.7505095801059927, "grad_norm": 1.4542152881622314, "learning_rate": 0.0005313646555238484, "loss": 3.2239, "step": 55200 }, { "epoch": 3.7508493001766543, "grad_norm": 1.2284489870071411, "learning_rate": 0.0005313221905150156, "loss": 3.5723, "step": 55205 }, { "epoch": 3.7511890202473164, "grad_norm": 1.6061837673187256, "learning_rate": 0.0005312797255061829, "loss": 3.4431, "step": 55210 }, { "epoch": 3.751528740317978, "grad_norm": 2.378718376159668, "learning_rate": 0.0005312372604973502, "loss": 3.3994, "step": 55215 }, { "epoch": 3.7518684603886396, "grad_norm": 1.1520243883132935, "learning_rate": 0.0005311947954885174, "loss": 3.285, "step": 55220 }, { "epoch": 3.7522081804593013, "grad_norm": 1.3451436758041382, "learning_rate": 0.0005311523304796846, "loss": 3.4155, "step": 55225 }, { "epoch": 3.7525479005299633, "grad_norm": 1.1500130891799927, "learning_rate": 0.0005311098654708521, "loss": 3.6615, "step": 55230 }, { "epoch": 3.752887620600625, "grad_norm": 1.2577450275421143, "learning_rate": 0.0005310674004620193, "loss": 3.4522, "step": 55235 }, { "epoch": 3.7532273406712866, "grad_norm": 1.1034009456634521, "learning_rate": 0.0005310249354531865, "loss": 3.2139, "step": 55240 }, { "epoch": 3.7535670607419487, "grad_norm": 1.574044942855835, "learning_rate": 0.0005309824704443539, "loss": 3.7809, "step": 55245 }, { "epoch": 3.7539067808126103, "grad_norm": 1.2382673025131226, "learning_rate": 0.0005309400054355211, "loss": 3.6357, "step": 55250 }, { "epoch": 3.754246500883272, "grad_norm": 0.9499742388725281, "learning_rate": 0.0005308975404266884, "loss": 3.3258, "step": 55255 }, { "epoch": 3.754586220953934, "grad_norm": 1.2905277013778687, "learning_rate": 0.0005308550754178557, "loss": 3.5143, "step": 55260 }, { "epoch": 3.7549259410245956, "grad_norm": 1.2458422183990479, "learning_rate": 0.000530812610409023, "loss": 3.5841, "step": 55265 }, { "epoch": 3.7552656610952573, "grad_norm": 1.166147232055664, "learning_rate": 0.0005307701454001903, "loss": 3.4927, "step": 55270 }, { "epoch": 3.7556053811659194, "grad_norm": 1.159149408340454, "learning_rate": 0.0005307276803913576, "loss": 3.4649, "step": 55275 }, { "epoch": 3.755945101236581, "grad_norm": 1.4453235864639282, "learning_rate": 0.0005306852153825248, "loss": 3.4096, "step": 55280 }, { "epoch": 3.7562848213072426, "grad_norm": 1.3475756645202637, "learning_rate": 0.0005306427503736921, "loss": 3.4601, "step": 55285 }, { "epoch": 3.7566245413779047, "grad_norm": 1.325024127960205, "learning_rate": 0.0005306002853648593, "loss": 3.4608, "step": 55290 }, { "epoch": 3.7569642614485663, "grad_norm": 1.49109947681427, "learning_rate": 0.0005305578203560266, "loss": 3.4222, "step": 55295 }, { "epoch": 3.757303981519228, "grad_norm": 1.4746536016464233, "learning_rate": 0.000530515355347194, "loss": 3.5593, "step": 55300 }, { "epoch": 3.75764370158989, "grad_norm": 1.3020620346069336, "learning_rate": 0.0005304728903383612, "loss": 3.4126, "step": 55305 }, { "epoch": 3.7579834216605517, "grad_norm": 1.5187034606933594, "learning_rate": 0.0005304304253295285, "loss": 3.6139, "step": 55310 }, { "epoch": 3.7583231417312133, "grad_norm": 1.4304871559143066, "learning_rate": 0.0005303879603206958, "loss": 3.6437, "step": 55315 }, { "epoch": 3.7586628618018754, "grad_norm": 1.2358850240707397, "learning_rate": 0.0005303539883136296, "loss": 3.3382, "step": 55320 }, { "epoch": 3.759002581872537, "grad_norm": 0.992156982421875, "learning_rate": 0.0005303115233047968, "loss": 3.621, "step": 55325 }, { "epoch": 3.7593423019431986, "grad_norm": 1.546239972114563, "learning_rate": 0.0005302690582959642, "loss": 3.422, "step": 55330 }, { "epoch": 3.7596820220138607, "grad_norm": 1.093130350112915, "learning_rate": 0.0005302265932871314, "loss": 3.3603, "step": 55335 }, { "epoch": 3.7600217420845223, "grad_norm": 1.0639818906784058, "learning_rate": 0.0005301841282782986, "loss": 3.5686, "step": 55340 }, { "epoch": 3.760361462155184, "grad_norm": 1.3950748443603516, "learning_rate": 0.000530141663269466, "loss": 3.5534, "step": 55345 }, { "epoch": 3.760701182225846, "grad_norm": 1.2972266674041748, "learning_rate": 0.0005300991982606332, "loss": 3.1914, "step": 55350 }, { "epoch": 3.7610409022965077, "grad_norm": 1.2480902671813965, "learning_rate": 0.0005300567332518005, "loss": 3.7495, "step": 55355 }, { "epoch": 3.7613806223671693, "grad_norm": 1.208603858947754, "learning_rate": 0.0005300142682429679, "loss": 3.6807, "step": 55360 }, { "epoch": 3.7617203424378314, "grad_norm": 1.184572458267212, "learning_rate": 0.0005299718032341351, "loss": 3.488, "step": 55365 }, { "epoch": 3.762060062508493, "grad_norm": 1.4646131992340088, "learning_rate": 0.0005299293382253023, "loss": 3.1591, "step": 55370 }, { "epoch": 3.7623997825791546, "grad_norm": 1.264340877532959, "learning_rate": 0.0005298868732164696, "loss": 3.495, "step": 55375 }, { "epoch": 3.7627395026498167, "grad_norm": 1.2536941766738892, "learning_rate": 0.0005298444082076369, "loss": 3.5358, "step": 55380 }, { "epoch": 3.7630792227204783, "grad_norm": 1.1338611841201782, "learning_rate": 0.0005298019431988041, "loss": 3.2723, "step": 55385 }, { "epoch": 3.76341894279114, "grad_norm": 1.3103828430175781, "learning_rate": 0.0005297594781899715, "loss": 3.4227, "step": 55390 }, { "epoch": 3.763758662861802, "grad_norm": 1.4166237115859985, "learning_rate": 0.0005297170131811388, "loss": 4.0136, "step": 55395 }, { "epoch": 3.7640983829324637, "grad_norm": 1.2331925630569458, "learning_rate": 0.000529674548172306, "loss": 3.4318, "step": 55400 }, { "epoch": 3.7644381030031253, "grad_norm": 1.1505217552185059, "learning_rate": 0.0005296320831634733, "loss": 3.2716, "step": 55405 }, { "epoch": 3.7647778230737874, "grad_norm": 1.0462549924850464, "learning_rate": 0.0005295896181546406, "loss": 3.4419, "step": 55410 }, { "epoch": 3.765117543144449, "grad_norm": 1.0619604587554932, "learning_rate": 0.0005295471531458078, "loss": 3.4098, "step": 55415 }, { "epoch": 3.7654572632151107, "grad_norm": 1.764477252960205, "learning_rate": 0.0005295046881369751, "loss": 3.64, "step": 55420 }, { "epoch": 3.7657969832857727, "grad_norm": 0.9875699281692505, "learning_rate": 0.0005294622231281424, "loss": 3.5925, "step": 55425 }, { "epoch": 3.7661367033564344, "grad_norm": 1.3623360395431519, "learning_rate": 0.0005294197581193097, "loss": 3.3759, "step": 55430 }, { "epoch": 3.766476423427096, "grad_norm": 1.4915297031402588, "learning_rate": 0.000529377293110477, "loss": 3.6314, "step": 55435 }, { "epoch": 3.766816143497758, "grad_norm": 1.1716039180755615, "learning_rate": 0.0005293348281016442, "loss": 3.3973, "step": 55440 }, { "epoch": 3.7671558635684197, "grad_norm": 1.3225868940353394, "learning_rate": 0.0005292923630928115, "loss": 3.3906, "step": 55445 }, { "epoch": 3.7674955836390813, "grad_norm": 1.512862205505371, "learning_rate": 0.0005292498980839788, "loss": 3.6511, "step": 55450 }, { "epoch": 3.7678353037097434, "grad_norm": 1.3022514581680298, "learning_rate": 0.000529207433075146, "loss": 3.6248, "step": 55455 }, { "epoch": 3.768175023780405, "grad_norm": 1.5963104963302612, "learning_rate": 0.0005291649680663135, "loss": 3.3824, "step": 55460 }, { "epoch": 3.7685147438510667, "grad_norm": 1.283712387084961, "learning_rate": 0.0005291225030574807, "loss": 3.2666, "step": 55465 }, { "epoch": 3.7688544639217287, "grad_norm": 1.3822869062423706, "learning_rate": 0.0005290800380486479, "loss": 3.2397, "step": 55470 }, { "epoch": 3.7691941839923904, "grad_norm": 1.7530205249786377, "learning_rate": 0.0005290375730398152, "loss": 3.3716, "step": 55475 }, { "epoch": 3.769533904063052, "grad_norm": 1.2020502090454102, "learning_rate": 0.0005289951080309825, "loss": 3.4415, "step": 55480 }, { "epoch": 3.769873624133714, "grad_norm": 1.116250991821289, "learning_rate": 0.0005289526430221497, "loss": 3.4604, "step": 55485 }, { "epoch": 3.7702133442043757, "grad_norm": 1.0503687858581543, "learning_rate": 0.0005289101780133171, "loss": 3.4796, "step": 55490 }, { "epoch": 3.7705530642750373, "grad_norm": 1.40874445438385, "learning_rate": 0.0005288677130044844, "loss": 3.5244, "step": 55495 }, { "epoch": 3.7708927843456994, "grad_norm": 1.3745183944702148, "learning_rate": 0.0005288252479956516, "loss": 3.5603, "step": 55500 }, { "epoch": 3.771232504416361, "grad_norm": 1.3991165161132812, "learning_rate": 0.0005287827829868189, "loss": 3.3454, "step": 55505 }, { "epoch": 3.7715722244870227, "grad_norm": 1.3157180547714233, "learning_rate": 0.0005287403179779862, "loss": 3.2616, "step": 55510 }, { "epoch": 3.7719119445576843, "grad_norm": 1.3338934183120728, "learning_rate": 0.0005286978529691534, "loss": 3.6998, "step": 55515 }, { "epoch": 3.7722516646283464, "grad_norm": 0.9977795481681824, "learning_rate": 0.0005286553879603207, "loss": 3.6552, "step": 55520 }, { "epoch": 3.772591384699008, "grad_norm": 1.408599615097046, "learning_rate": 0.000528612922951488, "loss": 3.5739, "step": 55525 }, { "epoch": 3.7729311047696696, "grad_norm": 1.2213793992996216, "learning_rate": 0.0005285704579426553, "loss": 3.2176, "step": 55530 }, { "epoch": 3.7732708248403317, "grad_norm": 1.18412184715271, "learning_rate": 0.0005285279929338226, "loss": 3.3139, "step": 55535 }, { "epoch": 3.7736105449109933, "grad_norm": 1.0510857105255127, "learning_rate": 0.0005284855279249898, "loss": 3.3928, "step": 55540 }, { "epoch": 3.773950264981655, "grad_norm": 1.4245855808258057, "learning_rate": 0.0005284430629161571, "loss": 3.5043, "step": 55545 }, { "epoch": 3.774289985052317, "grad_norm": 1.405638575553894, "learning_rate": 0.0005284005979073244, "loss": 3.5476, "step": 55550 }, { "epoch": 3.7746297051229787, "grad_norm": 1.355863094329834, "learning_rate": 0.0005283581328984916, "loss": 3.4122, "step": 55555 }, { "epoch": 3.7749694251936403, "grad_norm": 1.8584681749343872, "learning_rate": 0.000528315667889659, "loss": 3.4024, "step": 55560 }, { "epoch": 3.775309145264302, "grad_norm": 1.2824134826660156, "learning_rate": 0.0005282732028808263, "loss": 3.1745, "step": 55565 }, { "epoch": 3.775648865334964, "grad_norm": 1.3370203971862793, "learning_rate": 0.0005282307378719935, "loss": 3.4307, "step": 55570 }, { "epoch": 3.7759885854056257, "grad_norm": 1.3132376670837402, "learning_rate": 0.0005281882728631607, "loss": 3.818, "step": 55575 }, { "epoch": 3.7763283054762873, "grad_norm": 1.2204747200012207, "learning_rate": 0.0005281458078543281, "loss": 3.3937, "step": 55580 }, { "epoch": 3.7766680255469494, "grad_norm": 1.5191383361816406, "learning_rate": 0.0005281033428454953, "loss": 3.4967, "step": 55585 }, { "epoch": 3.777007745617611, "grad_norm": 1.3905590772628784, "learning_rate": 0.0005280608778366625, "loss": 3.5486, "step": 55590 }, { "epoch": 3.7773474656882726, "grad_norm": 1.5857813358306885, "learning_rate": 0.00052801841282783, "loss": 3.5448, "step": 55595 }, { "epoch": 3.7776871857589347, "grad_norm": 1.2245889902114868, "learning_rate": 0.0005279759478189972, "loss": 3.5806, "step": 55600 }, { "epoch": 3.7780269058295963, "grad_norm": 1.3170026540756226, "learning_rate": 0.0005279334828101644, "loss": 3.3653, "step": 55605 }, { "epoch": 3.778366625900258, "grad_norm": 1.1982495784759521, "learning_rate": 0.0005278910178013318, "loss": 3.4825, "step": 55610 }, { "epoch": 3.77870634597092, "grad_norm": 0.9496110677719116, "learning_rate": 0.000527848552792499, "loss": 3.5494, "step": 55615 }, { "epoch": 3.7790460660415817, "grad_norm": 1.2671728134155273, "learning_rate": 0.0005278060877836662, "loss": 3.4483, "step": 55620 }, { "epoch": 3.7793857861122433, "grad_norm": 1.1306982040405273, "learning_rate": 0.0005277636227748335, "loss": 3.5576, "step": 55625 }, { "epoch": 3.7797255061829054, "grad_norm": 1.3213989734649658, "learning_rate": 0.0005277211577660009, "loss": 3.7133, "step": 55630 }, { "epoch": 3.780065226253567, "grad_norm": 1.0998059511184692, "learning_rate": 0.0005276786927571681, "loss": 3.6463, "step": 55635 }, { "epoch": 3.7804049463242286, "grad_norm": 1.2787327766418457, "learning_rate": 0.0005276362277483354, "loss": 3.7338, "step": 55640 }, { "epoch": 3.7807446663948907, "grad_norm": 1.0229341983795166, "learning_rate": 0.0005275937627395027, "loss": 3.4656, "step": 55645 }, { "epoch": 3.7810843864655523, "grad_norm": 1.3430695533752441, "learning_rate": 0.0005275512977306699, "loss": 3.4171, "step": 55650 }, { "epoch": 3.781424106536214, "grad_norm": 1.127907156944275, "learning_rate": 0.0005275088327218372, "loss": 3.573, "step": 55655 }, { "epoch": 3.781763826606876, "grad_norm": 1.2682760953903198, "learning_rate": 0.0005274663677130044, "loss": 3.4664, "step": 55660 }, { "epoch": 3.7821035466775377, "grad_norm": 1.789066195487976, "learning_rate": 0.0005274239027041718, "loss": 3.3416, "step": 55665 }, { "epoch": 3.7824432667481993, "grad_norm": 1.1043038368225098, "learning_rate": 0.0005273814376953391, "loss": 3.6286, "step": 55670 }, { "epoch": 3.7827829868188614, "grad_norm": 1.3772722482681274, "learning_rate": 0.0005273389726865063, "loss": 3.3993, "step": 55675 }, { "epoch": 3.783122706889523, "grad_norm": 1.1258134841918945, "learning_rate": 0.0005272965076776736, "loss": 3.4212, "step": 55680 }, { "epoch": 3.7834624269601846, "grad_norm": 1.4416747093200684, "learning_rate": 0.0005272540426688409, "loss": 3.4892, "step": 55685 }, { "epoch": 3.7838021470308467, "grad_norm": 1.6423838138580322, "learning_rate": 0.0005272115776600081, "loss": 3.2959, "step": 55690 }, { "epoch": 3.7841418671015083, "grad_norm": 1.4898779392242432, "learning_rate": 0.0005271691126511754, "loss": 3.7875, "step": 55695 }, { "epoch": 3.78448158717217, "grad_norm": 1.2553874254226685, "learning_rate": 0.0005271266476423428, "loss": 3.3185, "step": 55700 }, { "epoch": 3.784821307242832, "grad_norm": 1.490675926208496, "learning_rate": 0.00052708418263351, "loss": 3.4006, "step": 55705 }, { "epoch": 3.7851610273134937, "grad_norm": 1.0459998846054077, "learning_rate": 0.0005270417176246772, "loss": 3.3595, "step": 55710 }, { "epoch": 3.7855007473841553, "grad_norm": 1.3955090045928955, "learning_rate": 0.0005269992526158446, "loss": 3.392, "step": 55715 }, { "epoch": 3.7858404674548174, "grad_norm": 1.4957166910171509, "learning_rate": 0.0005269567876070118, "loss": 3.4592, "step": 55720 }, { "epoch": 3.786180187525479, "grad_norm": 1.3373417854309082, "learning_rate": 0.000526914322598179, "loss": 3.2971, "step": 55725 }, { "epoch": 3.7865199075961407, "grad_norm": 1.4492748975753784, "learning_rate": 0.0005268718575893464, "loss": 3.6502, "step": 55730 }, { "epoch": 3.7868596276668027, "grad_norm": 1.2942211627960205, "learning_rate": 0.0005268293925805137, "loss": 3.3725, "step": 55735 }, { "epoch": 3.7871993477374644, "grad_norm": 2.093897581100464, "learning_rate": 0.0005267869275716809, "loss": 3.663, "step": 55740 }, { "epoch": 3.787539067808126, "grad_norm": 1.055133581161499, "learning_rate": 0.0005267444625628483, "loss": 3.5517, "step": 55745 }, { "epoch": 3.787878787878788, "grad_norm": 1.303173542022705, "learning_rate": 0.0005267019975540155, "loss": 3.4285, "step": 55750 }, { "epoch": 3.7882185079494497, "grad_norm": 1.2144614458084106, "learning_rate": 0.0005266595325451827, "loss": 3.6597, "step": 55755 }, { "epoch": 3.7885582280201113, "grad_norm": 1.15386164188385, "learning_rate": 0.00052661706753635, "loss": 3.4332, "step": 55760 }, { "epoch": 3.7888979480907734, "grad_norm": 1.1191232204437256, "learning_rate": 0.0005265746025275173, "loss": 3.3902, "step": 55765 }, { "epoch": 3.789237668161435, "grad_norm": 1.5006487369537354, "learning_rate": 0.0005265321375186846, "loss": 3.6527, "step": 55770 }, { "epoch": 3.7895773882320967, "grad_norm": 1.2951186895370483, "learning_rate": 0.0005264896725098519, "loss": 3.455, "step": 55775 }, { "epoch": 3.7899171083027587, "grad_norm": 1.5075329542160034, "learning_rate": 0.0005264472075010192, "loss": 3.5756, "step": 55780 }, { "epoch": 3.7902568283734204, "grad_norm": 1.234939455986023, "learning_rate": 0.0005264047424921864, "loss": 3.7962, "step": 55785 }, { "epoch": 3.790596548444082, "grad_norm": 1.4821395874023438, "learning_rate": 0.0005263622774833537, "loss": 3.4504, "step": 55790 }, { "epoch": 3.790936268514744, "grad_norm": 1.2319669723510742, "learning_rate": 0.000526319812474521, "loss": 3.4662, "step": 55795 }, { "epoch": 3.7912759885854057, "grad_norm": 1.5469849109649658, "learning_rate": 0.0005262773474656883, "loss": 3.262, "step": 55800 }, { "epoch": 3.7916157086560673, "grad_norm": 2.111184597015381, "learning_rate": 0.0005262348824568556, "loss": 3.4085, "step": 55805 }, { "epoch": 3.7919554287267294, "grad_norm": 1.166341781616211, "learning_rate": 0.0005261924174480229, "loss": 3.4021, "step": 55810 }, { "epoch": 3.792295148797391, "grad_norm": 1.18702232837677, "learning_rate": 0.0005261499524391902, "loss": 3.315, "step": 55815 }, { "epoch": 3.7926348688680527, "grad_norm": 1.1888678073883057, "learning_rate": 0.0005261074874303574, "loss": 3.7509, "step": 55820 }, { "epoch": 3.7929745889387148, "grad_norm": 1.3905147314071655, "learning_rate": 0.0005260650224215246, "loss": 3.4212, "step": 55825 }, { "epoch": 3.7933143090093764, "grad_norm": 1.183010458946228, "learning_rate": 0.000526022557412692, "loss": 3.6052, "step": 55830 }, { "epoch": 3.793654029080038, "grad_norm": 1.700023889541626, "learning_rate": 0.0005259800924038592, "loss": 3.3545, "step": 55835 }, { "epoch": 3.7939937491507, "grad_norm": 1.5122472047805786, "learning_rate": 0.0005259376273950265, "loss": 3.4402, "step": 55840 }, { "epoch": 3.7943334692213617, "grad_norm": 1.1595910787582397, "learning_rate": 0.0005258951623861939, "loss": 3.4698, "step": 55845 }, { "epoch": 3.7946731892920234, "grad_norm": 1.5094618797302246, "learning_rate": 0.0005258526973773611, "loss": 3.257, "step": 55850 }, { "epoch": 3.795012909362685, "grad_norm": 1.5072803497314453, "learning_rate": 0.0005258102323685283, "loss": 3.315, "step": 55855 }, { "epoch": 3.795352629433347, "grad_norm": 1.2246522903442383, "learning_rate": 0.0005257677673596957, "loss": 3.4911, "step": 55860 }, { "epoch": 3.7956923495040087, "grad_norm": 1.6474922895431519, "learning_rate": 0.0005257253023508629, "loss": 3.4436, "step": 55865 }, { "epoch": 3.7960320695746703, "grad_norm": 1.3425235748291016, "learning_rate": 0.0005256828373420301, "loss": 3.571, "step": 55870 }, { "epoch": 3.7963717896453324, "grad_norm": 1.4498834609985352, "learning_rate": 0.0005256403723331975, "loss": 3.3254, "step": 55875 }, { "epoch": 3.796711509715994, "grad_norm": 1.1578857898712158, "learning_rate": 0.0005255979073243648, "loss": 3.6067, "step": 55880 }, { "epoch": 3.7970512297866557, "grad_norm": 1.18682062625885, "learning_rate": 0.000525555442315532, "loss": 3.6247, "step": 55885 }, { "epoch": 3.7973909498573177, "grad_norm": 1.312233567237854, "learning_rate": 0.0005255129773066993, "loss": 3.6051, "step": 55890 }, { "epoch": 3.7977306699279794, "grad_norm": 1.2892340421676636, "learning_rate": 0.0005254705122978666, "loss": 3.4059, "step": 55895 }, { "epoch": 3.798070389998641, "grad_norm": 1.2138326168060303, "learning_rate": 0.0005254280472890338, "loss": 3.6062, "step": 55900 }, { "epoch": 3.7984101100693026, "grad_norm": 1.7183014154434204, "learning_rate": 0.0005253855822802011, "loss": 3.5877, "step": 55905 }, { "epoch": 3.7987498301399647, "grad_norm": 1.1869616508483887, "learning_rate": 0.0005253431172713685, "loss": 3.0037, "step": 55910 }, { "epoch": 3.7990895502106263, "grad_norm": 1.0017253160476685, "learning_rate": 0.0005253006522625357, "loss": 3.3567, "step": 55915 }, { "epoch": 3.799429270281288, "grad_norm": 1.5774670839309692, "learning_rate": 0.000525258187253703, "loss": 3.5241, "step": 55920 }, { "epoch": 3.79976899035195, "grad_norm": 1.2381706237792969, "learning_rate": 0.0005252157222448702, "loss": 3.6222, "step": 55925 }, { "epoch": 3.8001087104226117, "grad_norm": 1.271519422531128, "learning_rate": 0.0005251732572360375, "loss": 3.5588, "step": 55930 }, { "epoch": 3.8004484304932733, "grad_norm": 1.05498206615448, "learning_rate": 0.0005251307922272048, "loss": 3.6604, "step": 55935 }, { "epoch": 3.8007881505639354, "grad_norm": 1.0917586088180542, "learning_rate": 0.000525088327218372, "loss": 3.2298, "step": 55940 }, { "epoch": 3.801127870634597, "grad_norm": 1.4729564189910889, "learning_rate": 0.0005250458622095394, "loss": 3.5453, "step": 55945 }, { "epoch": 3.8014675907052586, "grad_norm": 1.415036916732788, "learning_rate": 0.0005250033972007067, "loss": 3.4856, "step": 55950 }, { "epoch": 3.8018073107759207, "grad_norm": 1.1923418045043945, "learning_rate": 0.0005249609321918739, "loss": 3.1556, "step": 55955 }, { "epoch": 3.8021470308465823, "grad_norm": 1.2698414325714111, "learning_rate": 0.0005249184671830411, "loss": 3.5504, "step": 55960 }, { "epoch": 3.802486750917244, "grad_norm": 1.16848886013031, "learning_rate": 0.0005248760021742085, "loss": 3.4179, "step": 55965 }, { "epoch": 3.802826470987906, "grad_norm": 1.3679604530334473, "learning_rate": 0.0005248335371653757, "loss": 3.6064, "step": 55970 }, { "epoch": 3.8031661910585677, "grad_norm": 1.491543173789978, "learning_rate": 0.0005247910721565429, "loss": 3.5626, "step": 55975 }, { "epoch": 3.8035059111292293, "grad_norm": 1.1586425304412842, "learning_rate": 0.0005247486071477104, "loss": 3.3417, "step": 55980 }, { "epoch": 3.8038456311998914, "grad_norm": 1.1829060316085815, "learning_rate": 0.0005247061421388776, "loss": 3.6014, "step": 55985 }, { "epoch": 3.804185351270553, "grad_norm": 1.3624879121780396, "learning_rate": 0.0005246636771300448, "loss": 3.4357, "step": 55990 }, { "epoch": 3.8045250713412146, "grad_norm": 1.1506824493408203, "learning_rate": 0.0005246212121212122, "loss": 3.4237, "step": 55995 }, { "epoch": 3.8048647914118767, "grad_norm": 1.413704514503479, "learning_rate": 0.0005245787471123794, "loss": 3.2935, "step": 56000 }, { "epoch": 3.8052045114825384, "grad_norm": 1.3028916120529175, "learning_rate": 0.0005245362821035466, "loss": 3.2544, "step": 56005 }, { "epoch": 3.8055442315532, "grad_norm": 1.7371996641159058, "learning_rate": 0.000524493817094714, "loss": 3.3498, "step": 56010 }, { "epoch": 3.805883951623862, "grad_norm": 1.062072515487671, "learning_rate": 0.0005244513520858813, "loss": 3.3914, "step": 56015 }, { "epoch": 3.8062236716945237, "grad_norm": 1.1335428953170776, "learning_rate": 0.0005244088870770485, "loss": 3.4995, "step": 56020 }, { "epoch": 3.8065633917651853, "grad_norm": 1.4972363710403442, "learning_rate": 0.0005243664220682158, "loss": 3.4018, "step": 56025 }, { "epoch": 3.8069031118358474, "grad_norm": 1.083500623703003, "learning_rate": 0.0005243239570593831, "loss": 3.5586, "step": 56030 }, { "epoch": 3.807242831906509, "grad_norm": 1.2811760902404785, "learning_rate": 0.0005242814920505503, "loss": 3.3195, "step": 56035 }, { "epoch": 3.8075825519771707, "grad_norm": 1.0333576202392578, "learning_rate": 0.0005242390270417176, "loss": 3.5325, "step": 56040 }, { "epoch": 3.8079222720478327, "grad_norm": 1.1472734212875366, "learning_rate": 0.0005241965620328849, "loss": 3.331, "step": 56045 }, { "epoch": 3.8082619921184944, "grad_norm": 1.6437071561813354, "learning_rate": 0.0005241540970240522, "loss": 3.5267, "step": 56050 }, { "epoch": 3.808601712189156, "grad_norm": 1.393266201019287, "learning_rate": 0.0005241116320152195, "loss": 3.6215, "step": 56055 }, { "epoch": 3.808941432259818, "grad_norm": 1.200576663017273, "learning_rate": 0.0005240691670063867, "loss": 3.3519, "step": 56060 }, { "epoch": 3.8092811523304797, "grad_norm": 1.202340006828308, "learning_rate": 0.000524026701997554, "loss": 3.5958, "step": 56065 }, { "epoch": 3.8096208724011413, "grad_norm": 1.3550196886062622, "learning_rate": 0.0005239842369887213, "loss": 3.5312, "step": 56070 }, { "epoch": 3.8099605924718034, "grad_norm": 1.475365400314331, "learning_rate": 0.0005239417719798885, "loss": 3.6491, "step": 56075 }, { "epoch": 3.810300312542465, "grad_norm": 1.6355401277542114, "learning_rate": 0.0005238993069710558, "loss": 3.5651, "step": 56080 }, { "epoch": 3.8106400326131267, "grad_norm": 1.3761101961135864, "learning_rate": 0.0005238568419622232, "loss": 3.4898, "step": 56085 }, { "epoch": 3.8109797526837887, "grad_norm": 1.5621408224105835, "learning_rate": 0.0005238143769533904, "loss": 3.37, "step": 56090 }, { "epoch": 3.8113194727544504, "grad_norm": 1.745833396911621, "learning_rate": 0.0005237719119445577, "loss": 3.5459, "step": 56095 }, { "epoch": 3.811659192825112, "grad_norm": 1.5097402334213257, "learning_rate": 0.000523729446935725, "loss": 3.5612, "step": 56100 }, { "epoch": 3.811998912895774, "grad_norm": 1.2811611890792847, "learning_rate": 0.0005236869819268922, "loss": 3.6078, "step": 56105 }, { "epoch": 3.8123386329664357, "grad_norm": 1.3479241132736206, "learning_rate": 0.0005236445169180594, "loss": 3.6316, "step": 56110 }, { "epoch": 3.8126783530370973, "grad_norm": 1.2642971277236938, "learning_rate": 0.0005236020519092269, "loss": 3.7005, "step": 56115 }, { "epoch": 3.8130180731077594, "grad_norm": 1.2326304912567139, "learning_rate": 0.0005235595869003941, "loss": 3.4799, "step": 56120 }, { "epoch": 3.813357793178421, "grad_norm": 1.1639419794082642, "learning_rate": 0.0005235171218915613, "loss": 3.6019, "step": 56125 }, { "epoch": 3.8136975132490827, "grad_norm": 1.3659671545028687, "learning_rate": 0.0005234746568827287, "loss": 3.4064, "step": 56130 }, { "epoch": 3.8140372333197448, "grad_norm": 1.2919983863830566, "learning_rate": 0.0005234321918738959, "loss": 3.303, "step": 56135 }, { "epoch": 3.8143769533904064, "grad_norm": 1.550523042678833, "learning_rate": 0.0005233897268650632, "loss": 3.3228, "step": 56140 }, { "epoch": 3.814716673461068, "grad_norm": 1.46627938747406, "learning_rate": 0.0005233472618562305, "loss": 3.4435, "step": 56145 }, { "epoch": 3.81505639353173, "grad_norm": 1.2303589582443237, "learning_rate": 0.0005233047968473978, "loss": 3.6354, "step": 56150 }, { "epoch": 3.8153961136023917, "grad_norm": 1.6851270198822021, "learning_rate": 0.0005232623318385651, "loss": 3.5569, "step": 56155 }, { "epoch": 3.8157358336730534, "grad_norm": 1.4801503419876099, "learning_rate": 0.0005232198668297323, "loss": 3.3276, "step": 56160 }, { "epoch": 3.8160755537437154, "grad_norm": 1.6780167818069458, "learning_rate": 0.0005231774018208996, "loss": 3.5636, "step": 56165 }, { "epoch": 3.816415273814377, "grad_norm": 1.3546730279922485, "learning_rate": 0.0005231349368120669, "loss": 3.4242, "step": 56170 }, { "epoch": 3.8167549938850387, "grad_norm": 1.0119553804397583, "learning_rate": 0.0005230924718032341, "loss": 3.3404, "step": 56175 }, { "epoch": 3.8170947139557008, "grad_norm": 1.1751189231872559, "learning_rate": 0.0005230500067944014, "loss": 3.5861, "step": 56180 }, { "epoch": 3.8174344340263624, "grad_norm": 1.0979554653167725, "learning_rate": 0.0005230075417855688, "loss": 3.5802, "step": 56185 }, { "epoch": 3.817774154097024, "grad_norm": 1.2353185415267944, "learning_rate": 0.000522965076776736, "loss": 3.5928, "step": 56190 }, { "epoch": 3.8181138741676857, "grad_norm": 1.320906162261963, "learning_rate": 0.0005229226117679033, "loss": 3.7229, "step": 56195 }, { "epoch": 3.8184535942383477, "grad_norm": 1.2905350923538208, "learning_rate": 0.0005228801467590706, "loss": 3.6375, "step": 56200 }, { "epoch": 3.8187933143090094, "grad_norm": 1.2737146615982056, "learning_rate": 0.0005228376817502378, "loss": 3.1365, "step": 56205 }, { "epoch": 3.819133034379671, "grad_norm": 1.2074346542358398, "learning_rate": 0.000522795216741405, "loss": 3.5409, "step": 56210 }, { "epoch": 3.819472754450333, "grad_norm": 1.2674155235290527, "learning_rate": 0.0005227527517325724, "loss": 3.472, "step": 56215 }, { "epoch": 3.8198124745209947, "grad_norm": 1.4289541244506836, "learning_rate": 0.0005227102867237397, "loss": 3.6902, "step": 56220 }, { "epoch": 3.8201521945916563, "grad_norm": 1.3445277214050293, "learning_rate": 0.0005226678217149069, "loss": 3.38, "step": 56225 }, { "epoch": 3.8204919146623184, "grad_norm": 1.886610507965088, "learning_rate": 0.0005226253567060743, "loss": 3.2822, "step": 56230 }, { "epoch": 3.82083163473298, "grad_norm": 1.2015947103500366, "learning_rate": 0.0005225828916972415, "loss": 3.5891, "step": 56235 }, { "epoch": 3.8211713548036417, "grad_norm": 1.1775695085525513, "learning_rate": 0.0005225404266884087, "loss": 3.5665, "step": 56240 }, { "epoch": 3.8215110748743033, "grad_norm": 1.3073350191116333, "learning_rate": 0.0005224979616795761, "loss": 3.3758, "step": 56245 }, { "epoch": 3.8218507949449654, "grad_norm": 1.4645723104476929, "learning_rate": 0.0005224554966707433, "loss": 3.3716, "step": 56250 }, { "epoch": 3.822190515015627, "grad_norm": 1.394485354423523, "learning_rate": 0.0005224130316619106, "loss": 3.6659, "step": 56255 }, { "epoch": 3.8225302350862886, "grad_norm": 1.4033851623535156, "learning_rate": 0.000522370566653078, "loss": 3.3261, "step": 56260 }, { "epoch": 3.8228699551569507, "grad_norm": 1.0252556800842285, "learning_rate": 0.0005223281016442452, "loss": 3.4095, "step": 56265 }, { "epoch": 3.8232096752276123, "grad_norm": 1.0531706809997559, "learning_rate": 0.0005222856366354124, "loss": 3.172, "step": 56270 }, { "epoch": 3.823549395298274, "grad_norm": 1.2574254274368286, "learning_rate": 0.0005222431716265797, "loss": 3.4638, "step": 56275 }, { "epoch": 3.823889115368936, "grad_norm": 1.4765470027923584, "learning_rate": 0.000522200706617747, "loss": 3.5492, "step": 56280 }, { "epoch": 3.8242288354395977, "grad_norm": 1.7118637561798096, "learning_rate": 0.0005221582416089142, "loss": 3.6014, "step": 56285 }, { "epoch": 3.8245685555102593, "grad_norm": 1.2735369205474854, "learning_rate": 0.0005221157766000816, "loss": 3.6563, "step": 56290 }, { "epoch": 3.8249082755809214, "grad_norm": 1.1857516765594482, "learning_rate": 0.0005220733115912489, "loss": 3.5991, "step": 56295 }, { "epoch": 3.825247995651583, "grad_norm": 1.099484920501709, "learning_rate": 0.0005220308465824161, "loss": 3.5339, "step": 56300 }, { "epoch": 3.8255877157222447, "grad_norm": 1.201436161994934, "learning_rate": 0.0005219883815735834, "loss": 3.4766, "step": 56305 }, { "epoch": 3.8259274357929067, "grad_norm": 2.14532732963562, "learning_rate": 0.0005219459165647506, "loss": 3.635, "step": 56310 }, { "epoch": 3.8262671558635684, "grad_norm": 1.072402000427246, "learning_rate": 0.0005219034515559179, "loss": 3.5791, "step": 56315 }, { "epoch": 3.82660687593423, "grad_norm": 1.1435338258743286, "learning_rate": 0.0005218609865470852, "loss": 3.1307, "step": 56320 }, { "epoch": 3.826946596004892, "grad_norm": 1.2696328163146973, "learning_rate": 0.0005218185215382525, "loss": 3.4445, "step": 56325 }, { "epoch": 3.8272863160755537, "grad_norm": 1.0727128982543945, "learning_rate": 0.0005217760565294198, "loss": 3.3371, "step": 56330 }, { "epoch": 3.8276260361462153, "grad_norm": 1.3787190914154053, "learning_rate": 0.0005217335915205871, "loss": 3.6098, "step": 56335 }, { "epoch": 3.8279657562168774, "grad_norm": 0.9918201565742493, "learning_rate": 0.0005216911265117543, "loss": 3.5702, "step": 56340 }, { "epoch": 3.828305476287539, "grad_norm": 1.2189700603485107, "learning_rate": 0.0005216486615029215, "loss": 3.4878, "step": 56345 }, { "epoch": 3.8286451963582007, "grad_norm": 1.1286797523498535, "learning_rate": 0.0005216061964940889, "loss": 3.6708, "step": 56350 }, { "epoch": 3.8289849164288627, "grad_norm": 1.6055446863174438, "learning_rate": 0.0005215637314852561, "loss": 3.1283, "step": 56355 }, { "epoch": 3.8293246364995244, "grad_norm": 1.4850668907165527, "learning_rate": 0.0005215212664764234, "loss": 3.4325, "step": 56360 }, { "epoch": 3.829664356570186, "grad_norm": 1.265531063079834, "learning_rate": 0.0005214788014675908, "loss": 3.6001, "step": 56365 }, { "epoch": 3.830004076640848, "grad_norm": 1.1051247119903564, "learning_rate": 0.000521436336458758, "loss": 3.4366, "step": 56370 }, { "epoch": 3.8303437967115097, "grad_norm": 1.6564767360687256, "learning_rate": 0.0005213938714499252, "loss": 3.3979, "step": 56375 }, { "epoch": 3.8306835167821713, "grad_norm": 1.7466131448745728, "learning_rate": 0.0005213514064410926, "loss": 3.3642, "step": 56380 }, { "epoch": 3.8310232368528334, "grad_norm": 1.2370685338974, "learning_rate": 0.0005213089414322598, "loss": 3.3356, "step": 56385 }, { "epoch": 3.831362956923495, "grad_norm": 1.4905661344528198, "learning_rate": 0.000521266476423427, "loss": 3.3135, "step": 56390 }, { "epoch": 3.8317026769941567, "grad_norm": 4.245719909667969, "learning_rate": 0.0005212240114145945, "loss": 3.5392, "step": 56395 }, { "epoch": 3.8320423970648188, "grad_norm": 1.0588719844818115, "learning_rate": 0.0005211815464057617, "loss": 3.5408, "step": 56400 }, { "epoch": 3.8323821171354804, "grad_norm": 1.3333187103271484, "learning_rate": 0.0005211390813969289, "loss": 3.5856, "step": 56405 }, { "epoch": 3.832721837206142, "grad_norm": 1.1194404363632202, "learning_rate": 0.0005210966163880962, "loss": 3.5929, "step": 56410 }, { "epoch": 3.833061557276804, "grad_norm": 1.2093838453292847, "learning_rate": 0.0005210541513792635, "loss": 3.3007, "step": 56415 }, { "epoch": 3.8334012773474657, "grad_norm": 1.2141777276992798, "learning_rate": 0.0005210116863704307, "loss": 3.3132, "step": 56420 }, { "epoch": 3.8337409974181273, "grad_norm": 2.5632922649383545, "learning_rate": 0.000520969221361598, "loss": 3.2811, "step": 56425 }, { "epoch": 3.8340807174887894, "grad_norm": 1.4469470977783203, "learning_rate": 0.0005209267563527654, "loss": 3.3572, "step": 56430 }, { "epoch": 3.834420437559451, "grad_norm": 1.4107486009597778, "learning_rate": 0.0005208842913439326, "loss": 3.403, "step": 56435 }, { "epoch": 3.8347601576301127, "grad_norm": 1.4818141460418701, "learning_rate": 0.0005208418263350999, "loss": 3.2779, "step": 56440 }, { "epoch": 3.8350998777007748, "grad_norm": 1.3444139957427979, "learning_rate": 0.0005207993613262672, "loss": 3.4662, "step": 56445 }, { "epoch": 3.8354395977714364, "grad_norm": 1.219883680343628, "learning_rate": 0.0005207568963174344, "loss": 3.4514, "step": 56450 }, { "epoch": 3.835779317842098, "grad_norm": 1.0930931568145752, "learning_rate": 0.0005207144313086017, "loss": 3.5986, "step": 56455 }, { "epoch": 3.83611903791276, "grad_norm": 1.1701610088348389, "learning_rate": 0.0005206719662997689, "loss": 3.5547, "step": 56460 }, { "epoch": 3.8364587579834217, "grad_norm": 1.1921569108963013, "learning_rate": 0.0005206295012909363, "loss": 3.4125, "step": 56465 }, { "epoch": 3.8367984780540834, "grad_norm": 1.1140400171279907, "learning_rate": 0.0005205870362821036, "loss": 3.4346, "step": 56470 }, { "epoch": 3.8371381981247454, "grad_norm": 1.245551347732544, "learning_rate": 0.0005205445712732708, "loss": 3.495, "step": 56475 }, { "epoch": 3.837477918195407, "grad_norm": 1.558412790298462, "learning_rate": 0.0005205021062644382, "loss": 3.4732, "step": 56480 }, { "epoch": 3.8378176382660687, "grad_norm": 1.5129307508468628, "learning_rate": 0.0005204596412556054, "loss": 3.7166, "step": 56485 }, { "epoch": 3.8381573583367308, "grad_norm": 1.8902519941329956, "learning_rate": 0.0005204171762467726, "loss": 3.423, "step": 56490 }, { "epoch": 3.8384970784073924, "grad_norm": 1.1557068824768066, "learning_rate": 0.00052037471123794, "loss": 3.7632, "step": 56495 }, { "epoch": 3.838836798478054, "grad_norm": 3.8550429344177246, "learning_rate": 0.0005203322462291073, "loss": 3.3297, "step": 56500 }, { "epoch": 3.839176518548716, "grad_norm": 1.1939438581466675, "learning_rate": 0.0005202897812202745, "loss": 3.4483, "step": 56505 }, { "epoch": 3.8395162386193777, "grad_norm": 1.3412214517593384, "learning_rate": 0.0005202473162114418, "loss": 3.6547, "step": 56510 }, { "epoch": 3.8398559586900394, "grad_norm": 1.3365230560302734, "learning_rate": 0.0005202048512026091, "loss": 3.5545, "step": 56515 }, { "epoch": 3.8401956787607014, "grad_norm": 2.13258957862854, "learning_rate": 0.0005201623861937763, "loss": 3.5114, "step": 56520 }, { "epoch": 3.840535398831363, "grad_norm": 1.0278483629226685, "learning_rate": 0.0005201199211849436, "loss": 3.2737, "step": 56525 }, { "epoch": 3.8408751189020247, "grad_norm": 1.317821741104126, "learning_rate": 0.0005200774561761109, "loss": 3.4784, "step": 56530 }, { "epoch": 3.8412148389726863, "grad_norm": 1.7662075757980347, "learning_rate": 0.0005200349911672782, "loss": 3.3807, "step": 56535 }, { "epoch": 3.8415545590433484, "grad_norm": 1.8385988473892212, "learning_rate": 0.0005199925261584455, "loss": 3.7149, "step": 56540 }, { "epoch": 3.84189427911401, "grad_norm": 1.5858529806137085, "learning_rate": 0.0005199500611496128, "loss": 3.5768, "step": 56545 }, { "epoch": 3.8422339991846717, "grad_norm": 1.0652114152908325, "learning_rate": 0.00051990759614078, "loss": 3.5401, "step": 56550 }, { "epoch": 3.8425737192553338, "grad_norm": 1.4353001117706299, "learning_rate": 0.0005198651311319473, "loss": 3.6396, "step": 56555 }, { "epoch": 3.8429134393259954, "grad_norm": 1.6171698570251465, "learning_rate": 0.0005198226661231145, "loss": 3.6163, "step": 56560 }, { "epoch": 3.843253159396657, "grad_norm": 1.573922038078308, "learning_rate": 0.0005197802011142818, "loss": 3.3387, "step": 56565 }, { "epoch": 3.843592879467319, "grad_norm": 1.2323757410049438, "learning_rate": 0.0005197377361054492, "loss": 3.4169, "step": 56570 }, { "epoch": 3.8439325995379807, "grad_norm": 1.3413327932357788, "learning_rate": 0.0005196952710966164, "loss": 3.3606, "step": 56575 }, { "epoch": 3.8442723196086424, "grad_norm": 1.0280389785766602, "learning_rate": 0.0005196528060877837, "loss": 3.5248, "step": 56580 }, { "epoch": 3.844612039679304, "grad_norm": 1.5584681034088135, "learning_rate": 0.000519610341078951, "loss": 3.7213, "step": 56585 }, { "epoch": 3.844951759749966, "grad_norm": 1.116068959236145, "learning_rate": 0.0005195678760701182, "loss": 3.2882, "step": 56590 }, { "epoch": 3.8452914798206277, "grad_norm": 1.0639464855194092, "learning_rate": 0.0005195254110612854, "loss": 3.3599, "step": 56595 }, { "epoch": 3.8456311998912893, "grad_norm": 1.3434399366378784, "learning_rate": 0.0005194829460524528, "loss": 3.5874, "step": 56600 }, { "epoch": 3.8459709199619514, "grad_norm": 1.2162764072418213, "learning_rate": 0.0005194404810436201, "loss": 3.2982, "step": 56605 }, { "epoch": 3.846310640032613, "grad_norm": 1.0451875925064087, "learning_rate": 0.0005193980160347873, "loss": 3.3168, "step": 56610 }, { "epoch": 3.8466503601032747, "grad_norm": 1.7543866634368896, "learning_rate": 0.0005193555510259547, "loss": 3.7302, "step": 56615 }, { "epoch": 3.8469900801739367, "grad_norm": 1.2486519813537598, "learning_rate": 0.0005193130860171219, "loss": 3.2959, "step": 56620 }, { "epoch": 3.8473298002445984, "grad_norm": 1.1733572483062744, "learning_rate": 0.0005192706210082891, "loss": 3.3243, "step": 56625 }, { "epoch": 3.84766952031526, "grad_norm": 1.4820722341537476, "learning_rate": 0.0005192281559994565, "loss": 3.5091, "step": 56630 }, { "epoch": 3.848009240385922, "grad_norm": 1.3554917573928833, "learning_rate": 0.0005191856909906237, "loss": 3.6574, "step": 56635 }, { "epoch": 3.8483489604565837, "grad_norm": 1.1087887287139893, "learning_rate": 0.000519143225981791, "loss": 3.4616, "step": 56640 }, { "epoch": 3.8486886805272453, "grad_norm": 1.0762596130371094, "learning_rate": 0.0005191007609729584, "loss": 3.5235, "step": 56645 }, { "epoch": 3.8490284005979074, "grad_norm": 1.0654090642929077, "learning_rate": 0.0005190582959641256, "loss": 3.4518, "step": 56650 }, { "epoch": 3.849368120668569, "grad_norm": 1.159363031387329, "learning_rate": 0.0005190158309552928, "loss": 3.439, "step": 56655 }, { "epoch": 3.8497078407392307, "grad_norm": 1.1513664722442627, "learning_rate": 0.0005189733659464601, "loss": 3.5339, "step": 56660 }, { "epoch": 3.8500475608098927, "grad_norm": 1.405322551727295, "learning_rate": 0.0005189309009376274, "loss": 3.3314, "step": 56665 }, { "epoch": 3.8503872808805544, "grad_norm": 1.135754942893982, "learning_rate": 0.0005188884359287946, "loss": 3.2602, "step": 56670 }, { "epoch": 3.850727000951216, "grad_norm": 1.1557506322860718, "learning_rate": 0.000518845970919962, "loss": 3.6413, "step": 56675 }, { "epoch": 3.851066721021878, "grad_norm": 1.739172101020813, "learning_rate": 0.0005188035059111293, "loss": 3.3618, "step": 56680 }, { "epoch": 3.8514064410925397, "grad_norm": 1.2748218774795532, "learning_rate": 0.0005187610409022965, "loss": 3.2126, "step": 56685 }, { "epoch": 3.8517461611632013, "grad_norm": 1.4035271406173706, "learning_rate": 0.0005187185758934638, "loss": 3.4673, "step": 56690 }, { "epoch": 3.8520858812338634, "grad_norm": 1.5925090312957764, "learning_rate": 0.000518676110884631, "loss": 3.6435, "step": 56695 }, { "epoch": 3.852425601304525, "grad_norm": 1.2901179790496826, "learning_rate": 0.0005186336458757983, "loss": 3.2958, "step": 56700 }, { "epoch": 3.8527653213751867, "grad_norm": 1.269170880317688, "learning_rate": 0.0005185911808669657, "loss": 3.2255, "step": 56705 }, { "epoch": 3.8531050414458488, "grad_norm": 1.1638540029525757, "learning_rate": 0.0005185487158581329, "loss": 3.4585, "step": 56710 }, { "epoch": 3.8534447615165104, "grad_norm": 1.4976670742034912, "learning_rate": 0.0005185062508493002, "loss": 3.4346, "step": 56715 }, { "epoch": 3.853784481587172, "grad_norm": 1.033111810684204, "learning_rate": 0.0005184637858404675, "loss": 3.4088, "step": 56720 }, { "epoch": 3.854124201657834, "grad_norm": 1.326583743095398, "learning_rate": 0.0005184213208316347, "loss": 3.4274, "step": 56725 }, { "epoch": 3.8544639217284957, "grad_norm": 1.1626888513565063, "learning_rate": 0.000518378855822802, "loss": 3.4978, "step": 56730 }, { "epoch": 3.8548036417991574, "grad_norm": 1.1182289123535156, "learning_rate": 0.0005183363908139693, "loss": 3.4009, "step": 56735 }, { "epoch": 3.8551433618698194, "grad_norm": 1.3568856716156006, "learning_rate": 0.0005182939258051366, "loss": 3.4893, "step": 56740 }, { "epoch": 3.855483081940481, "grad_norm": 1.4142485857009888, "learning_rate": 0.0005182514607963038, "loss": 3.2527, "step": 56745 }, { "epoch": 3.8558228020111427, "grad_norm": 1.0533579587936401, "learning_rate": 0.0005182089957874712, "loss": 3.4704, "step": 56750 }, { "epoch": 3.8561625220818048, "grad_norm": 1.1849175691604614, "learning_rate": 0.0005181665307786384, "loss": 3.4288, "step": 56755 }, { "epoch": 3.8565022421524664, "grad_norm": 1.1176091432571411, "learning_rate": 0.0005181240657698056, "loss": 3.5633, "step": 56760 }, { "epoch": 3.856841962223128, "grad_norm": 1.1305031776428223, "learning_rate": 0.000518081600760973, "loss": 3.3096, "step": 56765 }, { "epoch": 3.85718168229379, "grad_norm": 1.5539084672927856, "learning_rate": 0.0005180391357521402, "loss": 3.6939, "step": 56770 }, { "epoch": 3.8575214023644517, "grad_norm": 0.9839348196983337, "learning_rate": 0.0005179966707433075, "loss": 3.4871, "step": 56775 }, { "epoch": 3.8578611224351134, "grad_norm": 1.3524945974349976, "learning_rate": 0.0005179542057344749, "loss": 3.4018, "step": 56780 }, { "epoch": 3.8582008425057754, "grad_norm": 1.1241824626922607, "learning_rate": 0.0005179117407256421, "loss": 3.4257, "step": 56785 }, { "epoch": 3.858540562576437, "grad_norm": 1.2314790487289429, "learning_rate": 0.0005178692757168093, "loss": 3.3407, "step": 56790 }, { "epoch": 3.8588802826470987, "grad_norm": 1.0156726837158203, "learning_rate": 0.0005178268107079766, "loss": 3.5889, "step": 56795 }, { "epoch": 3.8592200027177608, "grad_norm": 1.3104817867279053, "learning_rate": 0.0005177843456991439, "loss": 3.3267, "step": 56800 }, { "epoch": 3.8595597227884224, "grad_norm": 1.2811987400054932, "learning_rate": 0.0005177418806903111, "loss": 3.3685, "step": 56805 }, { "epoch": 3.859899442859084, "grad_norm": 1.1243484020233154, "learning_rate": 0.0005176994156814785, "loss": 3.5136, "step": 56810 }, { "epoch": 3.860239162929746, "grad_norm": 1.4831072092056274, "learning_rate": 0.0005176569506726458, "loss": 3.3826, "step": 56815 }, { "epoch": 3.8605788830004077, "grad_norm": 1.2008451223373413, "learning_rate": 0.0005176144856638131, "loss": 3.5582, "step": 56820 }, { "epoch": 3.8609186030710694, "grad_norm": 1.177404761314392, "learning_rate": 0.0005175720206549803, "loss": 3.5035, "step": 56825 }, { "epoch": 3.8612583231417315, "grad_norm": 1.1068685054779053, "learning_rate": 0.0005175295556461476, "loss": 3.3576, "step": 56830 }, { "epoch": 3.861598043212393, "grad_norm": 1.1943957805633545, "learning_rate": 0.0005174870906373149, "loss": 3.2574, "step": 56835 }, { "epoch": 3.8619377632830547, "grad_norm": 1.1635156869888306, "learning_rate": 0.0005174446256284821, "loss": 3.2979, "step": 56840 }, { "epoch": 3.862277483353717, "grad_norm": 1.3454036712646484, "learning_rate": 0.0005174021606196494, "loss": 3.3585, "step": 56845 }, { "epoch": 3.8626172034243784, "grad_norm": 1.3039820194244385, "learning_rate": 0.0005173596956108168, "loss": 3.2074, "step": 56850 }, { "epoch": 3.86295692349504, "grad_norm": 1.3390840291976929, "learning_rate": 0.000517317230601984, "loss": 3.6141, "step": 56855 }, { "epoch": 3.863296643565702, "grad_norm": 1.0144072771072388, "learning_rate": 0.0005172747655931512, "loss": 3.1764, "step": 56860 }, { "epoch": 3.8636363636363638, "grad_norm": 1.6762423515319824, "learning_rate": 0.0005172323005843186, "loss": 3.1508, "step": 56865 }, { "epoch": 3.8639760837070254, "grad_norm": 1.1886364221572876, "learning_rate": 0.0005171898355754858, "loss": 3.441, "step": 56870 }, { "epoch": 3.864315803777687, "grad_norm": 1.275747537612915, "learning_rate": 0.000517147370566653, "loss": 3.7178, "step": 56875 }, { "epoch": 3.864655523848349, "grad_norm": 1.4877545833587646, "learning_rate": 0.0005171049055578205, "loss": 3.6244, "step": 56880 }, { "epoch": 3.8649952439190107, "grad_norm": 1.4902242422103882, "learning_rate": 0.0005170624405489877, "loss": 3.3797, "step": 56885 }, { "epoch": 3.8653349639896724, "grad_norm": 1.331760287284851, "learning_rate": 0.0005170199755401549, "loss": 3.5202, "step": 56890 }, { "epoch": 3.8656746840603344, "grad_norm": 1.2193037271499634, "learning_rate": 0.0005169775105313222, "loss": 3.4284, "step": 56895 }, { "epoch": 3.866014404130996, "grad_norm": 1.3729292154312134, "learning_rate": 0.0005169350455224895, "loss": 3.4565, "step": 56900 }, { "epoch": 3.8663541242016577, "grad_norm": 1.040187120437622, "learning_rate": 0.0005168925805136567, "loss": 3.4584, "step": 56905 }, { "epoch": 3.8666938442723198, "grad_norm": 1.4101266860961914, "learning_rate": 0.000516850115504824, "loss": 3.689, "step": 56910 }, { "epoch": 3.8670335643429814, "grad_norm": 1.4059813022613525, "learning_rate": 0.0005168076504959914, "loss": 3.4381, "step": 56915 }, { "epoch": 3.867373284413643, "grad_norm": 1.195910930633545, "learning_rate": 0.0005167651854871586, "loss": 3.5071, "step": 56920 }, { "epoch": 3.8677130044843047, "grad_norm": 1.1897757053375244, "learning_rate": 0.0005167227204783259, "loss": 3.3638, "step": 56925 }, { "epoch": 3.8680527245549667, "grad_norm": 1.3877373933792114, "learning_rate": 0.0005166802554694932, "loss": 3.4956, "step": 56930 }, { "epoch": 3.8683924446256284, "grad_norm": 1.3464782238006592, "learning_rate": 0.0005166377904606604, "loss": 3.2686, "step": 56935 }, { "epoch": 3.86873216469629, "grad_norm": 1.5986777544021606, "learning_rate": 0.0005165953254518277, "loss": 3.4803, "step": 56940 }, { "epoch": 3.869071884766952, "grad_norm": 1.4058352708816528, "learning_rate": 0.0005165528604429949, "loss": 3.7161, "step": 56945 }, { "epoch": 3.8694116048376137, "grad_norm": 1.34333336353302, "learning_rate": 0.0005165103954341623, "loss": 3.3645, "step": 56950 }, { "epoch": 3.8697513249082753, "grad_norm": 1.227325201034546, "learning_rate": 0.0005164679304253296, "loss": 3.4375, "step": 56955 }, { "epoch": 3.8700910449789374, "grad_norm": 1.4762736558914185, "learning_rate": 0.0005164254654164968, "loss": 3.6424, "step": 56960 }, { "epoch": 3.870430765049599, "grad_norm": 1.223029613494873, "learning_rate": 0.0005163830004076641, "loss": 3.6052, "step": 56965 }, { "epoch": 3.8707704851202607, "grad_norm": 1.2442278861999512, "learning_rate": 0.0005163405353988314, "loss": 3.5138, "step": 56970 }, { "epoch": 3.8711102051909227, "grad_norm": 1.5605148077011108, "learning_rate": 0.0005162980703899986, "loss": 3.3491, "step": 56975 }, { "epoch": 3.8714499252615844, "grad_norm": 1.0802264213562012, "learning_rate": 0.0005162556053811658, "loss": 3.4746, "step": 56980 }, { "epoch": 3.871789645332246, "grad_norm": 1.2591538429260254, "learning_rate": 0.0005162131403723333, "loss": 3.0283, "step": 56985 }, { "epoch": 3.872129365402908, "grad_norm": 2.161363363265991, "learning_rate": 0.0005161706753635005, "loss": 3.5019, "step": 56990 }, { "epoch": 3.8724690854735697, "grad_norm": 1.325161337852478, "learning_rate": 0.0005161282103546677, "loss": 3.4426, "step": 56995 }, { "epoch": 3.8728088055442313, "grad_norm": 1.0317928791046143, "learning_rate": 0.0005160857453458351, "loss": 3.4736, "step": 57000 }, { "epoch": 3.8731485256148934, "grad_norm": 1.011711597442627, "learning_rate": 0.0005160432803370023, "loss": 3.3821, "step": 57005 }, { "epoch": 3.873488245685555, "grad_norm": 1.2839807271957397, "learning_rate": 0.0005160008153281695, "loss": 3.419, "step": 57010 }, { "epoch": 3.8738279657562167, "grad_norm": 1.305898904800415, "learning_rate": 0.0005159583503193369, "loss": 3.3746, "step": 57015 }, { "epoch": 3.8741676858268788, "grad_norm": 1.2087957859039307, "learning_rate": 0.0005159158853105042, "loss": 3.5289, "step": 57020 }, { "epoch": 3.8745074058975404, "grad_norm": 1.3720890283584595, "learning_rate": 0.0005158734203016714, "loss": 3.4274, "step": 57025 }, { "epoch": 3.874847125968202, "grad_norm": 1.441369652748108, "learning_rate": 0.0005158309552928388, "loss": 3.2782, "step": 57030 }, { "epoch": 3.875186846038864, "grad_norm": 1.4322377443313599, "learning_rate": 0.000515788490284006, "loss": 3.5054, "step": 57035 }, { "epoch": 3.8755265661095257, "grad_norm": 1.1106239557266235, "learning_rate": 0.0005157460252751732, "loss": 3.2786, "step": 57040 }, { "epoch": 3.8758662861801874, "grad_norm": 1.437016487121582, "learning_rate": 0.0005157035602663405, "loss": 3.3718, "step": 57045 }, { "epoch": 3.8762060062508494, "grad_norm": 1.5558979511260986, "learning_rate": 0.0005156610952575078, "loss": 3.4147, "step": 57050 }, { "epoch": 3.876545726321511, "grad_norm": 1.3791011571884155, "learning_rate": 0.0005156186302486751, "loss": 3.2615, "step": 57055 }, { "epoch": 3.8768854463921727, "grad_norm": 1.1008156538009644, "learning_rate": 0.0005155761652398424, "loss": 3.4903, "step": 57060 }, { "epoch": 3.8772251664628348, "grad_norm": 1.4547640085220337, "learning_rate": 0.0005155337002310097, "loss": 3.1244, "step": 57065 }, { "epoch": 3.8775648865334964, "grad_norm": 1.2078863382339478, "learning_rate": 0.0005154912352221769, "loss": 3.2558, "step": 57070 }, { "epoch": 3.877904606604158, "grad_norm": 1.174866795539856, "learning_rate": 0.0005154487702133442, "loss": 3.6622, "step": 57075 }, { "epoch": 3.87824432667482, "grad_norm": 1.3238657712936401, "learning_rate": 0.0005154063052045114, "loss": 3.3782, "step": 57080 }, { "epoch": 3.8785840467454817, "grad_norm": 1.2777565717697144, "learning_rate": 0.0005153638401956787, "loss": 3.8283, "step": 57085 }, { "epoch": 3.8789237668161434, "grad_norm": 1.5160882472991943, "learning_rate": 0.0005153213751868461, "loss": 3.5883, "step": 57090 }, { "epoch": 3.8792634868868054, "grad_norm": 1.4596309661865234, "learning_rate": 0.0005152789101780133, "loss": 3.4215, "step": 57095 }, { "epoch": 3.879603206957467, "grad_norm": 1.754852294921875, "learning_rate": 0.0005152364451691806, "loss": 3.4212, "step": 57100 }, { "epoch": 3.8799429270281287, "grad_norm": 1.218193769454956, "learning_rate": 0.0005151939801603479, "loss": 3.4847, "step": 57105 }, { "epoch": 3.880282647098791, "grad_norm": 1.1453787088394165, "learning_rate": 0.0005151515151515151, "loss": 3.4805, "step": 57110 }, { "epoch": 3.8806223671694524, "grad_norm": 1.0433073043823242, "learning_rate": 0.0005151090501426824, "loss": 3.5087, "step": 57115 }, { "epoch": 3.880962087240114, "grad_norm": 1.388364553451538, "learning_rate": 0.0005150665851338497, "loss": 3.337, "step": 57120 }, { "epoch": 3.881301807310776, "grad_norm": 1.1103698015213013, "learning_rate": 0.000515024120125017, "loss": 3.4076, "step": 57125 }, { "epoch": 3.8816415273814378, "grad_norm": 1.155482530593872, "learning_rate": 0.0005149816551161842, "loss": 3.6018, "step": 57130 }, { "epoch": 3.8819812474520994, "grad_norm": 1.3850653171539307, "learning_rate": 0.0005149391901073516, "loss": 3.389, "step": 57135 }, { "epoch": 3.8823209675227615, "grad_norm": 1.2090718746185303, "learning_rate": 0.0005148967250985188, "loss": 3.4803, "step": 57140 }, { "epoch": 3.882660687593423, "grad_norm": 1.0832372903823853, "learning_rate": 0.000514854260089686, "loss": 3.2831, "step": 57145 }, { "epoch": 3.8830004076640847, "grad_norm": 1.219952940940857, "learning_rate": 0.0005148117950808534, "loss": 3.3285, "step": 57150 }, { "epoch": 3.883340127734747, "grad_norm": 1.3551033735275269, "learning_rate": 0.0005147693300720206, "loss": 3.3315, "step": 57155 }, { "epoch": 3.8836798478054084, "grad_norm": 1.15576171875, "learning_rate": 0.000514726865063188, "loss": 3.8171, "step": 57160 }, { "epoch": 3.88401956787607, "grad_norm": 1.2512917518615723, "learning_rate": 0.0005146844000543553, "loss": 3.562, "step": 57165 }, { "epoch": 3.884359287946732, "grad_norm": 1.1160871982574463, "learning_rate": 0.0005146419350455225, "loss": 3.4612, "step": 57170 }, { "epoch": 3.8846990080173938, "grad_norm": 1.2888096570968628, "learning_rate": 0.0005145994700366898, "loss": 3.3245, "step": 57175 }, { "epoch": 3.8850387280880554, "grad_norm": 1.3334596157073975, "learning_rate": 0.000514557005027857, "loss": 3.2705, "step": 57180 }, { "epoch": 3.8853784481587175, "grad_norm": 1.1913093328475952, "learning_rate": 0.0005145145400190243, "loss": 3.4686, "step": 57185 }, { "epoch": 3.885718168229379, "grad_norm": 1.2098722457885742, "learning_rate": 0.0005144720750101916, "loss": 3.4919, "step": 57190 }, { "epoch": 3.8860578883000407, "grad_norm": 1.020784616470337, "learning_rate": 0.0005144296100013589, "loss": 3.5362, "step": 57195 }, { "epoch": 3.886397608370703, "grad_norm": 1.079302430152893, "learning_rate": 0.0005143871449925262, "loss": 3.2171, "step": 57200 }, { "epoch": 3.8867373284413644, "grad_norm": 1.2423851490020752, "learning_rate": 0.0005143446799836935, "loss": 3.5078, "step": 57205 }, { "epoch": 3.887077048512026, "grad_norm": 1.1127021312713623, "learning_rate": 0.0005143022149748607, "loss": 3.5504, "step": 57210 }, { "epoch": 3.887416768582688, "grad_norm": 2.0135371685028076, "learning_rate": 0.000514259749966028, "loss": 3.6283, "step": 57215 }, { "epoch": 3.8877564886533498, "grad_norm": 1.1991134881973267, "learning_rate": 0.0005142172849571953, "loss": 3.4912, "step": 57220 }, { "epoch": 3.8880962087240114, "grad_norm": 1.370331048965454, "learning_rate": 0.0005141748199483625, "loss": 3.3475, "step": 57225 }, { "epoch": 3.888435928794673, "grad_norm": 1.4228708744049072, "learning_rate": 0.0005141323549395299, "loss": 3.3318, "step": 57230 }, { "epoch": 3.888775648865335, "grad_norm": 1.3985464572906494, "learning_rate": 0.0005140898899306972, "loss": 3.5265, "step": 57235 }, { "epoch": 3.8891153689359967, "grad_norm": 0.9907740354537964, "learning_rate": 0.0005140474249218644, "loss": 3.2376, "step": 57240 }, { "epoch": 3.8894550890066584, "grad_norm": 1.3048311471939087, "learning_rate": 0.0005140049599130316, "loss": 3.3965, "step": 57245 }, { "epoch": 3.8897948090773204, "grad_norm": 1.0308877229690552, "learning_rate": 0.000513962494904199, "loss": 3.2887, "step": 57250 }, { "epoch": 3.890134529147982, "grad_norm": 1.1416298151016235, "learning_rate": 0.0005139200298953662, "loss": 3.4454, "step": 57255 }, { "epoch": 3.8904742492186437, "grad_norm": 1.9708185195922852, "learning_rate": 0.0005138775648865334, "loss": 3.5123, "step": 57260 }, { "epoch": 3.8908139692893053, "grad_norm": 1.3108707666397095, "learning_rate": 0.0005138350998777009, "loss": 3.5556, "step": 57265 }, { "epoch": 3.8911536893599674, "grad_norm": 1.458841323852539, "learning_rate": 0.0005137926348688681, "loss": 3.3223, "step": 57270 }, { "epoch": 3.891493409430629, "grad_norm": 1.5432360172271729, "learning_rate": 0.0005137501698600353, "loss": 3.4204, "step": 57275 }, { "epoch": 3.8918331295012907, "grad_norm": 1.2472114562988281, "learning_rate": 0.0005137077048512027, "loss": 3.3011, "step": 57280 }, { "epoch": 3.8921728495719528, "grad_norm": 1.2438244819641113, "learning_rate": 0.0005136652398423699, "loss": 3.5305, "step": 57285 }, { "epoch": 3.8925125696426144, "grad_norm": 1.2556219100952148, "learning_rate": 0.0005136227748335371, "loss": 3.5658, "step": 57290 }, { "epoch": 3.892852289713276, "grad_norm": 1.1525273323059082, "learning_rate": 0.0005135803098247045, "loss": 3.527, "step": 57295 }, { "epoch": 3.893192009783938, "grad_norm": 1.445064902305603, "learning_rate": 0.0005135378448158718, "loss": 3.4528, "step": 57300 }, { "epoch": 3.8935317298545997, "grad_norm": 1.2254494428634644, "learning_rate": 0.000513495379807039, "loss": 3.6612, "step": 57305 }, { "epoch": 3.8938714499252614, "grad_norm": 1.3975750207901, "learning_rate": 0.0005134529147982063, "loss": 3.3928, "step": 57310 }, { "epoch": 3.8942111699959234, "grad_norm": 1.4948196411132812, "learning_rate": 0.0005134104497893736, "loss": 3.5318, "step": 57315 }, { "epoch": 3.894550890066585, "grad_norm": 1.7430167198181152, "learning_rate": 0.0005133679847805408, "loss": 3.6228, "step": 57320 }, { "epoch": 3.8948906101372467, "grad_norm": 1.1959922313690186, "learning_rate": 0.0005133255197717081, "loss": 3.511, "step": 57325 }, { "epoch": 3.8952303302079088, "grad_norm": 1.083156704902649, "learning_rate": 0.0005132830547628755, "loss": 3.4438, "step": 57330 }, { "epoch": 3.8955700502785704, "grad_norm": 1.4022963047027588, "learning_rate": 0.0005132405897540427, "loss": 3.4707, "step": 57335 }, { "epoch": 3.895909770349232, "grad_norm": 1.2448230981826782, "learning_rate": 0.00051319812474521, "loss": 3.3031, "step": 57340 }, { "epoch": 3.896249490419894, "grad_norm": 1.563812494277954, "learning_rate": 0.0005131556597363772, "loss": 3.5991, "step": 57345 }, { "epoch": 3.8965892104905557, "grad_norm": 0.8569964170455933, "learning_rate": 0.0005131131947275445, "loss": 3.7742, "step": 57350 }, { "epoch": 3.8969289305612174, "grad_norm": 1.153371810913086, "learning_rate": 0.0005130707297187118, "loss": 3.5511, "step": 57355 }, { "epoch": 3.8972686506318794, "grad_norm": 1.3636444807052612, "learning_rate": 0.000513028264709879, "loss": 3.5416, "step": 57360 }, { "epoch": 3.897608370702541, "grad_norm": 1.1515156030654907, "learning_rate": 0.0005129857997010464, "loss": 3.5363, "step": 57365 }, { "epoch": 3.8979480907732027, "grad_norm": 1.078934907913208, "learning_rate": 0.0005129433346922137, "loss": 3.5949, "step": 57370 }, { "epoch": 3.8982878108438648, "grad_norm": 1.517702341079712, "learning_rate": 0.0005129008696833809, "loss": 3.6446, "step": 57375 }, { "epoch": 3.8986275309145264, "grad_norm": 1.4493494033813477, "learning_rate": 0.0005128584046745481, "loss": 3.716, "step": 57380 }, { "epoch": 3.898967250985188, "grad_norm": 1.016249656677246, "learning_rate": 0.0005128159396657155, "loss": 3.4036, "step": 57385 }, { "epoch": 3.89930697105585, "grad_norm": 1.228051781654358, "learning_rate": 0.0005127734746568827, "loss": 3.5371, "step": 57390 }, { "epoch": 3.8996466911265117, "grad_norm": 1.5030077695846558, "learning_rate": 0.0005127310096480499, "loss": 3.4027, "step": 57395 }, { "epoch": 3.8999864111971734, "grad_norm": 1.5001903772354126, "learning_rate": 0.0005126885446392174, "loss": 3.2561, "step": 57400 }, { "epoch": 3.9003261312678354, "grad_norm": 1.031963586807251, "learning_rate": 0.0005126460796303846, "loss": 3.2872, "step": 57405 }, { "epoch": 3.900665851338497, "grad_norm": 1.171830654144287, "learning_rate": 0.0005126036146215518, "loss": 3.3528, "step": 57410 }, { "epoch": 3.9010055714091587, "grad_norm": 1.2733004093170166, "learning_rate": 0.0005125611496127192, "loss": 3.2523, "step": 57415 }, { "epoch": 3.901345291479821, "grad_norm": 1.3546301126480103, "learning_rate": 0.0005125186846038864, "loss": 3.463, "step": 57420 }, { "epoch": 3.9016850115504824, "grad_norm": 1.2928730249404907, "learning_rate": 0.0005124762195950536, "loss": 3.4836, "step": 57425 }, { "epoch": 3.902024731621144, "grad_norm": 1.1040536165237427, "learning_rate": 0.000512433754586221, "loss": 3.4814, "step": 57430 }, { "epoch": 3.902364451691806, "grad_norm": 1.4976261854171753, "learning_rate": 0.0005123912895773883, "loss": 3.3805, "step": 57435 }, { "epoch": 3.9027041717624678, "grad_norm": 1.031265139579773, "learning_rate": 0.0005123488245685555, "loss": 3.5669, "step": 57440 }, { "epoch": 3.9030438918331294, "grad_norm": 1.4620722532272339, "learning_rate": 0.0005123063595597228, "loss": 3.1701, "step": 57445 }, { "epoch": 3.9033836119037915, "grad_norm": 1.070650577545166, "learning_rate": 0.0005122638945508901, "loss": 3.2983, "step": 57450 }, { "epoch": 3.903723331974453, "grad_norm": 1.0992660522460938, "learning_rate": 0.0005122214295420573, "loss": 3.6946, "step": 57455 }, { "epoch": 3.9040630520451147, "grad_norm": 1.0796571969985962, "learning_rate": 0.0005121789645332246, "loss": 3.4751, "step": 57460 }, { "epoch": 3.904402772115777, "grad_norm": 1.6172171831130981, "learning_rate": 0.0005121364995243919, "loss": 3.5097, "step": 57465 }, { "epoch": 3.9047424921864384, "grad_norm": 1.336769700050354, "learning_rate": 0.0005120940345155592, "loss": 3.4143, "step": 57470 }, { "epoch": 3.9050822122571, "grad_norm": 1.1555744409561157, "learning_rate": 0.0005120515695067265, "loss": 3.3324, "step": 57475 }, { "epoch": 3.905421932327762, "grad_norm": 1.3815085887908936, "learning_rate": 0.0005120091044978937, "loss": 3.4486, "step": 57480 }, { "epoch": 3.9057616523984238, "grad_norm": 1.4707376956939697, "learning_rate": 0.000511966639489061, "loss": 3.3476, "step": 57485 }, { "epoch": 3.9061013724690854, "grad_norm": 1.462825894355774, "learning_rate": 0.0005119241744802283, "loss": 3.0182, "step": 57490 }, { "epoch": 3.9064410925397475, "grad_norm": 1.5270249843597412, "learning_rate": 0.0005118817094713955, "loss": 3.4779, "step": 57495 }, { "epoch": 3.906780812610409, "grad_norm": 1.135646939277649, "learning_rate": 0.0005118392444625629, "loss": 3.5578, "step": 57500 }, { "epoch": 3.9071205326810707, "grad_norm": 1.0956964492797852, "learning_rate": 0.0005117967794537302, "loss": 3.5592, "step": 57505 }, { "epoch": 3.907460252751733, "grad_norm": 1.357802391052246, "learning_rate": 0.0005117543144448974, "loss": 3.2828, "step": 57510 }, { "epoch": 3.9077999728223944, "grad_norm": 1.4692234992980957, "learning_rate": 0.0005117118494360648, "loss": 3.5357, "step": 57515 }, { "epoch": 3.908139692893056, "grad_norm": 1.2040351629257202, "learning_rate": 0.000511669384427232, "loss": 3.5075, "step": 57520 }, { "epoch": 3.908479412963718, "grad_norm": 1.7384182214736938, "learning_rate": 0.0005116269194183992, "loss": 3.207, "step": 57525 }, { "epoch": 3.90881913303438, "grad_norm": 1.4147090911865234, "learning_rate": 0.0005115844544095665, "loss": 3.3745, "step": 57530 }, { "epoch": 3.9091588531050414, "grad_norm": 1.4206297397613525, "learning_rate": 0.0005115419894007338, "loss": 3.5026, "step": 57535 }, { "epoch": 3.9094985731757035, "grad_norm": 1.2510374784469604, "learning_rate": 0.0005114995243919011, "loss": 3.393, "step": 57540 }, { "epoch": 3.909838293246365, "grad_norm": 1.063643217086792, "learning_rate": 0.0005114570593830684, "loss": 3.5604, "step": 57545 }, { "epoch": 3.9101780133170267, "grad_norm": 1.3533214330673218, "learning_rate": 0.0005114145943742357, "loss": 3.425, "step": 57550 }, { "epoch": 3.910517733387689, "grad_norm": 1.6717604398727417, "learning_rate": 0.0005113721293654029, "loss": 3.3862, "step": 57555 }, { "epoch": 3.9108574534583505, "grad_norm": 1.152646541595459, "learning_rate": 0.0005113296643565702, "loss": 3.5445, "step": 57560 }, { "epoch": 3.911197173529012, "grad_norm": 1.10090172290802, "learning_rate": 0.0005112871993477375, "loss": 3.4874, "step": 57565 }, { "epoch": 3.9115368935996737, "grad_norm": 1.1045583486557007, "learning_rate": 0.0005112447343389047, "loss": 3.4237, "step": 57570 }, { "epoch": 3.911876613670336, "grad_norm": 1.321698784828186, "learning_rate": 0.0005112022693300721, "loss": 3.4221, "step": 57575 }, { "epoch": 3.9122163337409974, "grad_norm": 1.0997462272644043, "learning_rate": 0.0005111598043212393, "loss": 3.4713, "step": 57580 }, { "epoch": 3.912556053811659, "grad_norm": 1.1992223262786865, "learning_rate": 0.0005111173393124066, "loss": 3.4427, "step": 57585 }, { "epoch": 3.912895773882321, "grad_norm": 1.3675806522369385, "learning_rate": 0.0005110748743035739, "loss": 3.6742, "step": 57590 }, { "epoch": 3.9132354939529828, "grad_norm": 1.422320008277893, "learning_rate": 0.0005110324092947411, "loss": 3.2906, "step": 57595 }, { "epoch": 3.9135752140236444, "grad_norm": 1.6976925134658813, "learning_rate": 0.0005109899442859084, "loss": 3.4712, "step": 57600 }, { "epoch": 3.913914934094306, "grad_norm": 1.280439853668213, "learning_rate": 0.0005109474792770757, "loss": 3.5208, "step": 57605 }, { "epoch": 3.914254654164968, "grad_norm": 1.27042555809021, "learning_rate": 0.000510905014268243, "loss": 3.4048, "step": 57610 }, { "epoch": 3.9145943742356297, "grad_norm": 1.5168225765228271, "learning_rate": 0.0005108625492594103, "loss": 3.4188, "step": 57615 }, { "epoch": 3.9149340943062914, "grad_norm": 1.215883731842041, "learning_rate": 0.0005108200842505776, "loss": 3.5076, "step": 57620 }, { "epoch": 3.9152738143769534, "grad_norm": 1.128188133239746, "learning_rate": 0.0005107776192417448, "loss": 3.7093, "step": 57625 }, { "epoch": 3.915613534447615, "grad_norm": 1.7878519296646118, "learning_rate": 0.000510735154232912, "loss": 3.3563, "step": 57630 }, { "epoch": 3.9159532545182767, "grad_norm": 1.126452922821045, "learning_rate": 0.0005106926892240794, "loss": 3.6299, "step": 57635 }, { "epoch": 3.9162929745889388, "grad_norm": 1.4248906373977661, "learning_rate": 0.0005106502242152466, "loss": 3.5266, "step": 57640 }, { "epoch": 3.9166326946596004, "grad_norm": 1.1459490060806274, "learning_rate": 0.0005106077592064139, "loss": 3.6855, "step": 57645 }, { "epoch": 3.916972414730262, "grad_norm": 1.0088552236557007, "learning_rate": 0.0005105652941975813, "loss": 3.343, "step": 57650 }, { "epoch": 3.917312134800924, "grad_norm": 1.2584830522537231, "learning_rate": 0.0005105228291887485, "loss": 3.3084, "step": 57655 }, { "epoch": 3.9176518548715857, "grad_norm": 1.4113099575042725, "learning_rate": 0.0005104803641799157, "loss": 3.2289, "step": 57660 }, { "epoch": 3.9179915749422474, "grad_norm": 0.8866149187088013, "learning_rate": 0.0005104378991710831, "loss": 3.5733, "step": 57665 }, { "epoch": 3.9183312950129094, "grad_norm": 1.5115270614624023, "learning_rate": 0.0005103954341622503, "loss": 3.3827, "step": 57670 }, { "epoch": 3.918671015083571, "grad_norm": 1.058584213256836, "learning_rate": 0.0005103529691534175, "loss": 3.6132, "step": 57675 }, { "epoch": 3.9190107351542327, "grad_norm": 1.0766714811325073, "learning_rate": 0.000510310504144585, "loss": 3.4421, "step": 57680 }, { "epoch": 3.919350455224895, "grad_norm": 1.184497356414795, "learning_rate": 0.0005102680391357522, "loss": 3.4581, "step": 57685 }, { "epoch": 3.9196901752955564, "grad_norm": 0.9867731928825378, "learning_rate": 0.0005102255741269194, "loss": 3.4419, "step": 57690 }, { "epoch": 3.920029895366218, "grad_norm": 1.0912799835205078, "learning_rate": 0.0005101831091180867, "loss": 3.7917, "step": 57695 }, { "epoch": 3.92036961543688, "grad_norm": 1.0972230434417725, "learning_rate": 0.000510140644109254, "loss": 3.6893, "step": 57700 }, { "epoch": 3.9207093355075417, "grad_norm": 1.0025765895843506, "learning_rate": 0.0005100981791004212, "loss": 3.6131, "step": 57705 }, { "epoch": 3.9210490555782034, "grad_norm": 1.486323595046997, "learning_rate": 0.0005100557140915885, "loss": 3.4175, "step": 57710 }, { "epoch": 3.9213887756488655, "grad_norm": 1.484474539756775, "learning_rate": 0.0005100132490827559, "loss": 3.3396, "step": 57715 }, { "epoch": 3.921728495719527, "grad_norm": 1.474371075630188, "learning_rate": 0.0005099707840739231, "loss": 3.1657, "step": 57720 }, { "epoch": 3.9220682157901887, "grad_norm": 1.2145439386367798, "learning_rate": 0.0005099283190650904, "loss": 3.3808, "step": 57725 }, { "epoch": 3.922407935860851, "grad_norm": 1.1134717464447021, "learning_rate": 0.0005098858540562576, "loss": 3.5102, "step": 57730 }, { "epoch": 3.9227476559315124, "grad_norm": 1.093674659729004, "learning_rate": 0.0005098433890474249, "loss": 3.4728, "step": 57735 }, { "epoch": 3.923087376002174, "grad_norm": 1.4348641633987427, "learning_rate": 0.0005098009240385922, "loss": 3.2852, "step": 57740 }, { "epoch": 3.923427096072836, "grad_norm": 1.2969441413879395, "learning_rate": 0.0005097584590297594, "loss": 3.4015, "step": 57745 }, { "epoch": 3.9237668161434978, "grad_norm": 1.4084833860397339, "learning_rate": 0.0005097159940209268, "loss": 3.5804, "step": 57750 }, { "epoch": 3.9241065362141594, "grad_norm": 1.733555793762207, "learning_rate": 0.0005096735290120941, "loss": 3.4525, "step": 57755 }, { "epoch": 3.9244462562848215, "grad_norm": 1.083262324333191, "learning_rate": 0.0005096310640032613, "loss": 3.4612, "step": 57760 }, { "epoch": 3.924785976355483, "grad_norm": 0.9931144118309021, "learning_rate": 0.0005095885989944285, "loss": 3.4988, "step": 57765 }, { "epoch": 3.9251256964261447, "grad_norm": 1.2723246812820435, "learning_rate": 0.0005095461339855959, "loss": 3.2882, "step": 57770 }, { "epoch": 3.925465416496807, "grad_norm": 1.105676531791687, "learning_rate": 0.0005095036689767631, "loss": 3.4634, "step": 57775 }, { "epoch": 3.9258051365674684, "grad_norm": 1.0959391593933105, "learning_rate": 0.0005094612039679303, "loss": 3.2605, "step": 57780 }, { "epoch": 3.92614485663813, "grad_norm": 1.691889762878418, "learning_rate": 0.0005094187389590978, "loss": 3.5336, "step": 57785 }, { "epoch": 3.926484576708792, "grad_norm": 1.1653978824615479, "learning_rate": 0.000509376273950265, "loss": 3.4803, "step": 57790 }, { "epoch": 3.9268242967794538, "grad_norm": 1.7175374031066895, "learning_rate": 0.0005093338089414322, "loss": 3.2253, "step": 57795 }, { "epoch": 3.9271640168501154, "grad_norm": 1.3659802675247192, "learning_rate": 0.0005092913439325996, "loss": 3.4965, "step": 57800 }, { "epoch": 3.9275037369207775, "grad_norm": 1.1985008716583252, "learning_rate": 0.0005092488789237668, "loss": 3.6084, "step": 57805 }, { "epoch": 3.927843456991439, "grad_norm": 1.4035637378692627, "learning_rate": 0.000509206413914934, "loss": 3.3491, "step": 57810 }, { "epoch": 3.9281831770621007, "grad_norm": 1.5037057399749756, "learning_rate": 0.0005091639489061015, "loss": 3.4081, "step": 57815 }, { "epoch": 3.928522897132763, "grad_norm": 1.319808006286621, "learning_rate": 0.0005091214838972687, "loss": 3.302, "step": 57820 }, { "epoch": 3.9288626172034244, "grad_norm": 1.6210854053497314, "learning_rate": 0.0005090790188884359, "loss": 3.5946, "step": 57825 }, { "epoch": 3.929202337274086, "grad_norm": 1.1060327291488647, "learning_rate": 0.0005090365538796032, "loss": 3.4966, "step": 57830 }, { "epoch": 3.929542057344748, "grad_norm": 1.3678981065750122, "learning_rate": 0.0005089940888707705, "loss": 3.2348, "step": 57835 }, { "epoch": 3.92988177741541, "grad_norm": 1.2365353107452393, "learning_rate": 0.0005089516238619378, "loss": 3.2552, "step": 57840 }, { "epoch": 3.9302214974860714, "grad_norm": 1.257978916168213, "learning_rate": 0.000508909158853105, "loss": 3.5, "step": 57845 }, { "epoch": 3.9305612175567335, "grad_norm": 1.2267963886260986, "learning_rate": 0.0005088666938442724, "loss": 3.4209, "step": 57850 }, { "epoch": 3.930900937627395, "grad_norm": 1.0290578603744507, "learning_rate": 0.0005088242288354397, "loss": 3.5109, "step": 57855 }, { "epoch": 3.9312406576980568, "grad_norm": 1.324654459953308, "learning_rate": 0.0005087817638266069, "loss": 3.5427, "step": 57860 }, { "epoch": 3.931580377768719, "grad_norm": 1.4027360677719116, "learning_rate": 0.0005087392988177741, "loss": 3.3635, "step": 57865 }, { "epoch": 3.9319200978393805, "grad_norm": 1.2410119771957397, "learning_rate": 0.0005086968338089415, "loss": 3.2557, "step": 57870 }, { "epoch": 3.932259817910042, "grad_norm": 1.213700294494629, "learning_rate": 0.0005086543688001087, "loss": 3.5772, "step": 57875 }, { "epoch": 3.932599537980704, "grad_norm": 1.1198458671569824, "learning_rate": 0.0005086119037912759, "loss": 3.4004, "step": 57880 }, { "epoch": 3.932939258051366, "grad_norm": 1.453545093536377, "learning_rate": 0.0005085694387824434, "loss": 3.6075, "step": 57885 }, { "epoch": 3.9332789781220274, "grad_norm": 1.0034778118133545, "learning_rate": 0.0005085269737736106, "loss": 3.3994, "step": 57890 }, { "epoch": 3.9336186981926895, "grad_norm": 1.142853856086731, "learning_rate": 0.0005084845087647778, "loss": 3.3987, "step": 57895 }, { "epoch": 3.933958418263351, "grad_norm": 1.0295981168746948, "learning_rate": 0.0005084420437559452, "loss": 3.3034, "step": 57900 }, { "epoch": 3.9342981383340128, "grad_norm": 1.4385881423950195, "learning_rate": 0.0005083995787471124, "loss": 3.5559, "step": 57905 }, { "epoch": 3.9346378584046744, "grad_norm": 1.1722224950790405, "learning_rate": 0.0005083571137382796, "loss": 3.6364, "step": 57910 }, { "epoch": 3.9349775784753365, "grad_norm": 1.154023289680481, "learning_rate": 0.000508314648729447, "loss": 3.5318, "step": 57915 }, { "epoch": 3.935317298545998, "grad_norm": 1.6375247240066528, "learning_rate": 0.0005082721837206143, "loss": 3.1384, "step": 57920 }, { "epoch": 3.9356570186166597, "grad_norm": 1.744925856590271, "learning_rate": 0.0005082297187117815, "loss": 3.5247, "step": 57925 }, { "epoch": 3.935996738687322, "grad_norm": 1.47929048538208, "learning_rate": 0.0005081872537029488, "loss": 3.227, "step": 57930 }, { "epoch": 3.9363364587579834, "grad_norm": 1.30722975730896, "learning_rate": 0.0005081447886941161, "loss": 3.5586, "step": 57935 }, { "epoch": 3.936676178828645, "grad_norm": 1.7680031061172485, "learning_rate": 0.0005081023236852833, "loss": 3.4026, "step": 57940 }, { "epoch": 3.9370158988993067, "grad_norm": 1.0634396076202393, "learning_rate": 0.0005080598586764506, "loss": 3.0747, "step": 57945 }, { "epoch": 3.9373556189699688, "grad_norm": 1.691177248954773, "learning_rate": 0.0005080173936676179, "loss": 3.4112, "step": 57950 }, { "epoch": 3.9376953390406304, "grad_norm": 1.1027517318725586, "learning_rate": 0.0005079749286587852, "loss": 3.4675, "step": 57955 }, { "epoch": 3.938035059111292, "grad_norm": 1.3593806028366089, "learning_rate": 0.0005079324636499525, "loss": 3.2284, "step": 57960 }, { "epoch": 3.938374779181954, "grad_norm": 1.2310762405395508, "learning_rate": 0.0005078899986411198, "loss": 3.4815, "step": 57965 }, { "epoch": 3.9387144992526157, "grad_norm": 1.742502212524414, "learning_rate": 0.000507847533632287, "loss": 3.6846, "step": 57970 }, { "epoch": 3.9390542193232774, "grad_norm": 1.1861212253570557, "learning_rate": 0.0005078050686234543, "loss": 3.4193, "step": 57975 }, { "epoch": 3.9393939393939394, "grad_norm": 1.219614863395691, "learning_rate": 0.0005077626036146215, "loss": 3.2433, "step": 57980 }, { "epoch": 3.939733659464601, "grad_norm": 1.2312915325164795, "learning_rate": 0.0005077201386057888, "loss": 3.5148, "step": 57985 }, { "epoch": 3.9400733795352627, "grad_norm": 1.3634209632873535, "learning_rate": 0.0005076776735969562, "loss": 3.4611, "step": 57990 }, { "epoch": 3.940413099605925, "grad_norm": 1.111198902130127, "learning_rate": 0.0005076352085881234, "loss": 3.2896, "step": 57995 }, { "epoch": 3.9407528196765864, "grad_norm": 1.6776278018951416, "learning_rate": 0.0005075927435792907, "loss": 3.3047, "step": 58000 }, { "epoch": 3.941092539747248, "grad_norm": 1.3695539236068726, "learning_rate": 0.000507550278570458, "loss": 3.1263, "step": 58005 }, { "epoch": 3.94143225981791, "grad_norm": 1.4163681268692017, "learning_rate": 0.0005075078135616252, "loss": 3.5275, "step": 58010 }, { "epoch": 3.9417719798885718, "grad_norm": 1.3691104650497437, "learning_rate": 0.0005074653485527924, "loss": 3.2854, "step": 58015 }, { "epoch": 3.9421116999592334, "grad_norm": 1.1802030801773071, "learning_rate": 0.0005074228835439598, "loss": 3.5337, "step": 58020 }, { "epoch": 3.9424514200298955, "grad_norm": 1.3007043600082397, "learning_rate": 0.0005073804185351271, "loss": 3.4181, "step": 58025 }, { "epoch": 3.942791140100557, "grad_norm": 1.3219099044799805, "learning_rate": 0.0005073379535262943, "loss": 3.336, "step": 58030 }, { "epoch": 3.9431308601712187, "grad_norm": 1.9308346509933472, "learning_rate": 0.0005072954885174617, "loss": 3.3827, "step": 58035 }, { "epoch": 3.943470580241881, "grad_norm": 1.2363107204437256, "learning_rate": 0.0005072530235086289, "loss": 3.424, "step": 58040 }, { "epoch": 3.9438103003125424, "grad_norm": 1.1867384910583496, "learning_rate": 0.0005072105584997961, "loss": 3.3685, "step": 58045 }, { "epoch": 3.944150020383204, "grad_norm": 1.2843573093414307, "learning_rate": 0.0005071680934909635, "loss": 3.3793, "step": 58050 }, { "epoch": 3.944489740453866, "grad_norm": 1.1732641458511353, "learning_rate": 0.0005071256284821307, "loss": 3.5998, "step": 58055 }, { "epoch": 3.9448294605245278, "grad_norm": 1.2732505798339844, "learning_rate": 0.000507083163473298, "loss": 3.5089, "step": 58060 }, { "epoch": 3.9451691805951894, "grad_norm": 1.404557228088379, "learning_rate": 0.0005070406984644654, "loss": 3.3526, "step": 58065 }, { "epoch": 3.9455089006658515, "grad_norm": 1.1301488876342773, "learning_rate": 0.0005069982334556326, "loss": 3.5313, "step": 58070 }, { "epoch": 3.945848620736513, "grad_norm": 1.128710389137268, "learning_rate": 0.0005069557684467998, "loss": 3.6095, "step": 58075 }, { "epoch": 3.9461883408071747, "grad_norm": 1.1188734769821167, "learning_rate": 0.0005069133034379671, "loss": 3.504, "step": 58080 }, { "epoch": 3.946528060877837, "grad_norm": 1.1851917505264282, "learning_rate": 0.0005068708384291344, "loss": 3.5157, "step": 58085 }, { "epoch": 3.9468677809484984, "grad_norm": 1.366887092590332, "learning_rate": 0.0005068283734203016, "loss": 3.4254, "step": 58090 }, { "epoch": 3.94720750101916, "grad_norm": 1.1093155145645142, "learning_rate": 0.000506785908411469, "loss": 3.4206, "step": 58095 }, { "epoch": 3.947547221089822, "grad_norm": 1.233168125152588, "learning_rate": 0.0005067434434026363, "loss": 3.6604, "step": 58100 }, { "epoch": 3.9478869411604838, "grad_norm": 1.9782177209854126, "learning_rate": 0.0005067009783938035, "loss": 3.6348, "step": 58105 }, { "epoch": 3.9482266612311454, "grad_norm": 1.6372365951538086, "learning_rate": 0.0005066585133849708, "loss": 3.2327, "step": 58110 }, { "epoch": 3.9485663813018075, "grad_norm": 1.4111549854278564, "learning_rate": 0.000506616048376138, "loss": 3.5325, "step": 58115 }, { "epoch": 3.948906101372469, "grad_norm": 1.3142485618591309, "learning_rate": 0.0005065735833673053, "loss": 3.2735, "step": 58120 }, { "epoch": 3.9492458214431307, "grad_norm": 1.249993085861206, "learning_rate": 0.0005065311183584726, "loss": 3.2903, "step": 58125 }, { "epoch": 3.949585541513793, "grad_norm": 1.6114157438278198, "learning_rate": 0.0005064886533496399, "loss": 3.8286, "step": 58130 }, { "epoch": 3.9499252615844545, "grad_norm": 1.1784001588821411, "learning_rate": 0.0005064461883408072, "loss": 3.5019, "step": 58135 }, { "epoch": 3.950264981655116, "grad_norm": 1.3783658742904663, "learning_rate": 0.0005064037233319745, "loss": 3.4644, "step": 58140 }, { "epoch": 3.950604701725778, "grad_norm": 1.2878293991088867, "learning_rate": 0.0005063612583231417, "loss": 3.5817, "step": 58145 }, { "epoch": 3.95094442179644, "grad_norm": 1.2525675296783447, "learning_rate": 0.000506318793314309, "loss": 3.5162, "step": 58150 }, { "epoch": 3.9512841418671014, "grad_norm": 1.1520750522613525, "learning_rate": 0.0005062763283054763, "loss": 3.4385, "step": 58155 }, { "epoch": 3.9516238619377635, "grad_norm": 1.2792311906814575, "learning_rate": 0.0005062338632966435, "loss": 3.6833, "step": 58160 }, { "epoch": 3.951963582008425, "grad_norm": 1.2816580533981323, "learning_rate": 0.0005061913982878108, "loss": 3.7815, "step": 58165 }, { "epoch": 3.9523033020790868, "grad_norm": 1.383563756942749, "learning_rate": 0.0005061489332789782, "loss": 3.3149, "step": 58170 }, { "epoch": 3.952643022149749, "grad_norm": 1.431697964668274, "learning_rate": 0.0005061064682701454, "loss": 3.4028, "step": 58175 }, { "epoch": 3.9529827422204105, "grad_norm": 1.131801724433899, "learning_rate": 0.0005060640032613127, "loss": 3.316, "step": 58180 }, { "epoch": 3.953322462291072, "grad_norm": 1.3443807363510132, "learning_rate": 0.00050602153825248, "loss": 3.4646, "step": 58185 }, { "epoch": 3.953662182361734, "grad_norm": 1.1499563455581665, "learning_rate": 0.0005059790732436472, "loss": 3.4223, "step": 58190 }, { "epoch": 3.954001902432396, "grad_norm": 1.8194698095321655, "learning_rate": 0.0005059366082348145, "loss": 3.3722, "step": 58195 }, { "epoch": 3.9543416225030574, "grad_norm": 1.3174033164978027, "learning_rate": 0.0005058941432259819, "loss": 3.4654, "step": 58200 }, { "epoch": 3.9546813425737195, "grad_norm": 1.4209948778152466, "learning_rate": 0.0005058516782171491, "loss": 3.4924, "step": 58205 }, { "epoch": 3.955021062644381, "grad_norm": 1.450941562652588, "learning_rate": 0.0005058092132083164, "loss": 3.2491, "step": 58210 }, { "epoch": 3.9553607827150428, "grad_norm": 1.2687474489212036, "learning_rate": 0.0005057667481994836, "loss": 3.2121, "step": 58215 }, { "epoch": 3.955700502785705, "grad_norm": 1.3776867389678955, "learning_rate": 0.0005057242831906509, "loss": 3.626, "step": 58220 }, { "epoch": 3.9560402228563665, "grad_norm": 1.1666327714920044, "learning_rate": 0.0005056818181818182, "loss": 3.2788, "step": 58225 }, { "epoch": 3.956379942927028, "grad_norm": 1.3504644632339478, "learning_rate": 0.0005056393531729854, "loss": 3.3119, "step": 58230 }, { "epoch": 3.95671966299769, "grad_norm": 1.1045844554901123, "learning_rate": 0.0005055968881641528, "loss": 3.5988, "step": 58235 }, { "epoch": 3.957059383068352, "grad_norm": 1.190786361694336, "learning_rate": 0.0005055544231553201, "loss": 3.5162, "step": 58240 }, { "epoch": 3.9573991031390134, "grad_norm": 3.8084003925323486, "learning_rate": 0.0005055119581464873, "loss": 3.6588, "step": 58245 }, { "epoch": 3.957738823209675, "grad_norm": 1.2448163032531738, "learning_rate": 0.0005054694931376546, "loss": 3.466, "step": 58250 }, { "epoch": 3.958078543280337, "grad_norm": 1.2925556898117065, "learning_rate": 0.0005054270281288219, "loss": 3.2204, "step": 58255 }, { "epoch": 3.958418263350999, "grad_norm": 1.40848970413208, "learning_rate": 0.0005053845631199891, "loss": 3.331, "step": 58260 }, { "epoch": 3.9587579834216604, "grad_norm": 1.5432405471801758, "learning_rate": 0.0005053420981111563, "loss": 3.5539, "step": 58265 }, { "epoch": 3.9590977034923225, "grad_norm": 1.2263263463974, "learning_rate": 0.0005052996331023238, "loss": 3.4672, "step": 58270 }, { "epoch": 3.959437423562984, "grad_norm": 1.3007911443710327, "learning_rate": 0.000505257168093491, "loss": 3.1826, "step": 58275 }, { "epoch": 3.9597771436336457, "grad_norm": 1.1375179290771484, "learning_rate": 0.0005052147030846582, "loss": 3.4997, "step": 58280 }, { "epoch": 3.9601168637043074, "grad_norm": 1.259456753730774, "learning_rate": 0.0005051722380758256, "loss": 3.4934, "step": 58285 }, { "epoch": 3.9604565837749695, "grad_norm": 1.1041783094406128, "learning_rate": 0.0005051297730669928, "loss": 3.7707, "step": 58290 }, { "epoch": 3.960796303845631, "grad_norm": 1.8376777172088623, "learning_rate": 0.00050508730805816, "loss": 3.4023, "step": 58295 }, { "epoch": 3.9611360239162927, "grad_norm": 1.8208723068237305, "learning_rate": 0.0005050448430493274, "loss": 3.3767, "step": 58300 }, { "epoch": 3.961475743986955, "grad_norm": 1.312045931816101, "learning_rate": 0.0005050023780404947, "loss": 3.4955, "step": 58305 }, { "epoch": 3.9618154640576164, "grad_norm": 1.3508042097091675, "learning_rate": 0.0005049599130316619, "loss": 3.3293, "step": 58310 }, { "epoch": 3.962155184128278, "grad_norm": 1.2957780361175537, "learning_rate": 0.0005049174480228292, "loss": 3.7223, "step": 58315 }, { "epoch": 3.96249490419894, "grad_norm": 1.3588685989379883, "learning_rate": 0.0005048749830139965, "loss": 3.4163, "step": 58320 }, { "epoch": 3.9628346242696018, "grad_norm": 1.2822149991989136, "learning_rate": 0.0005048325180051637, "loss": 3.3734, "step": 58325 }, { "epoch": 3.9631743443402634, "grad_norm": 1.6205934286117554, "learning_rate": 0.000504790052996331, "loss": 3.3279, "step": 58330 }, { "epoch": 3.9635140644109255, "grad_norm": 1.0386046171188354, "learning_rate": 0.0005047475879874983, "loss": 3.4903, "step": 58335 }, { "epoch": 3.963853784481587, "grad_norm": 1.0982975959777832, "learning_rate": 0.0005047051229786656, "loss": 3.5162, "step": 58340 }, { "epoch": 3.9641935045522487, "grad_norm": 1.6448835134506226, "learning_rate": 0.0005046626579698329, "loss": 3.1811, "step": 58345 }, { "epoch": 3.964533224622911, "grad_norm": 1.018308162689209, "learning_rate": 0.0005046201929610002, "loss": 3.75, "step": 58350 }, { "epoch": 3.9648729446935724, "grad_norm": 1.3368960618972778, "learning_rate": 0.0005045777279521674, "loss": 3.1504, "step": 58355 }, { "epoch": 3.965212664764234, "grad_norm": 1.2700762748718262, "learning_rate": 0.0005045352629433347, "loss": 3.3804, "step": 58360 }, { "epoch": 3.965552384834896, "grad_norm": 1.3000644445419312, "learning_rate": 0.0005044927979345019, "loss": 3.5987, "step": 58365 }, { "epoch": 3.9658921049055578, "grad_norm": 1.3436928987503052, "learning_rate": 0.0005044503329256692, "loss": 3.4031, "step": 58370 }, { "epoch": 3.9662318249762194, "grad_norm": 1.2534815073013306, "learning_rate": 0.0005044078679168366, "loss": 3.4176, "step": 58375 }, { "epoch": 3.9665715450468815, "grad_norm": 1.462605357170105, "learning_rate": 0.0005043654029080038, "loss": 3.5182, "step": 58380 }, { "epoch": 3.966911265117543, "grad_norm": 1.3658474683761597, "learning_rate": 0.0005043229378991711, "loss": 3.5579, "step": 58385 }, { "epoch": 3.9672509851882047, "grad_norm": 1.283747911453247, "learning_rate": 0.0005042804728903384, "loss": 3.534, "step": 58390 }, { "epoch": 3.967590705258867, "grad_norm": 1.149407148361206, "learning_rate": 0.0005042380078815056, "loss": 3.5978, "step": 58395 }, { "epoch": 3.9679304253295284, "grad_norm": 1.1865171194076538, "learning_rate": 0.0005041955428726728, "loss": 3.395, "step": 58400 }, { "epoch": 3.96827014540019, "grad_norm": 1.1140702962875366, "learning_rate": 0.0005041530778638403, "loss": 3.3429, "step": 58405 }, { "epoch": 3.968609865470852, "grad_norm": 1.2800066471099854, "learning_rate": 0.0005041106128550075, "loss": 3.365, "step": 58410 }, { "epoch": 3.968949585541514, "grad_norm": 1.5348474979400635, "learning_rate": 0.0005040681478461747, "loss": 3.2591, "step": 58415 }, { "epoch": 3.9692893056121754, "grad_norm": 1.3725731372833252, "learning_rate": 0.0005040256828373421, "loss": 3.4135, "step": 58420 }, { "epoch": 3.9696290256828375, "grad_norm": 1.5080616474151611, "learning_rate": 0.0005039832178285093, "loss": 3.8057, "step": 58425 }, { "epoch": 3.969968745753499, "grad_norm": 1.5754436254501343, "learning_rate": 0.0005039407528196765, "loss": 3.2522, "step": 58430 }, { "epoch": 3.9703084658241607, "grad_norm": 1.1888182163238525, "learning_rate": 0.0005038982878108439, "loss": 3.2622, "step": 58435 }, { "epoch": 3.970648185894823, "grad_norm": 1.308711290359497, "learning_rate": 0.0005038558228020112, "loss": 3.3415, "step": 58440 }, { "epoch": 3.9709879059654845, "grad_norm": 1.4073115587234497, "learning_rate": 0.0005038133577931784, "loss": 3.3307, "step": 58445 }, { "epoch": 3.971327626036146, "grad_norm": 1.4998974800109863, "learning_rate": 0.0005037708927843458, "loss": 3.0777, "step": 58450 }, { "epoch": 3.971667346106808, "grad_norm": 1.5100141763687134, "learning_rate": 0.000503728427775513, "loss": 3.4701, "step": 58455 }, { "epoch": 3.97200706617747, "grad_norm": 1.342902660369873, "learning_rate": 0.0005036859627666802, "loss": 3.4214, "step": 58460 }, { "epoch": 3.9723467862481314, "grad_norm": 1.4857861995697021, "learning_rate": 0.0005036434977578475, "loss": 3.3507, "step": 58465 }, { "epoch": 3.9726865063187935, "grad_norm": 1.6283875703811646, "learning_rate": 0.0005036010327490148, "loss": 3.2308, "step": 58470 }, { "epoch": 3.973026226389455, "grad_norm": 1.3046696186065674, "learning_rate": 0.0005035585677401821, "loss": 3.3768, "step": 58475 }, { "epoch": 3.9733659464601168, "grad_norm": 1.3793039321899414, "learning_rate": 0.0005035161027313494, "loss": 3.5479, "step": 58480 }, { "epoch": 3.973705666530779, "grad_norm": 1.217058539390564, "learning_rate": 0.0005034736377225167, "loss": 3.1507, "step": 58485 }, { "epoch": 3.9740453866014405, "grad_norm": 1.201220154762268, "learning_rate": 0.0005034311727136839, "loss": 3.618, "step": 58490 }, { "epoch": 3.974385106672102, "grad_norm": 1.313771367073059, "learning_rate": 0.0005033887077048512, "loss": 3.3241, "step": 58495 }, { "epoch": 3.974724826742764, "grad_norm": 1.4497169256210327, "learning_rate": 0.0005033462426960184, "loss": 3.3847, "step": 58500 }, { "epoch": 3.975064546813426, "grad_norm": 1.4571030139923096, "learning_rate": 0.0005033037776871857, "loss": 3.2191, "step": 58505 }, { "epoch": 3.9754042668840874, "grad_norm": 1.1567813158035278, "learning_rate": 0.0005032613126783531, "loss": 3.3777, "step": 58510 }, { "epoch": 3.9757439869547495, "grad_norm": 1.1624417304992676, "learning_rate": 0.0005032188476695203, "loss": 3.5788, "step": 58515 }, { "epoch": 3.976083707025411, "grad_norm": 1.6540108919143677, "learning_rate": 0.0005031763826606877, "loss": 3.4706, "step": 58520 }, { "epoch": 3.9764234270960728, "grad_norm": 1.198972225189209, "learning_rate": 0.0005031339176518549, "loss": 3.5618, "step": 58525 }, { "epoch": 3.976763147166735, "grad_norm": 3.080317974090576, "learning_rate": 0.0005030914526430221, "loss": 3.3557, "step": 58530 }, { "epoch": 3.9771028672373965, "grad_norm": 1.7330487966537476, "learning_rate": 0.0005030489876341895, "loss": 3.4195, "step": 58535 }, { "epoch": 3.977442587308058, "grad_norm": 1.303670048713684, "learning_rate": 0.0005030065226253567, "loss": 3.4132, "step": 58540 }, { "epoch": 3.97778230737872, "grad_norm": 1.6482223272323608, "learning_rate": 0.000502964057616524, "loss": 3.3976, "step": 58545 }, { "epoch": 3.978122027449382, "grad_norm": 1.4689959287643433, "learning_rate": 0.0005029215926076914, "loss": 3.6209, "step": 58550 }, { "epoch": 3.9784617475200434, "grad_norm": 1.270085096359253, "learning_rate": 0.0005028791275988586, "loss": 3.686, "step": 58555 }, { "epoch": 3.9788014675907055, "grad_norm": 1.180773377418518, "learning_rate": 0.0005028366625900258, "loss": 3.574, "step": 58560 }, { "epoch": 3.979141187661367, "grad_norm": 0.9840219020843506, "learning_rate": 0.0005027941975811931, "loss": 3.4761, "step": 58565 }, { "epoch": 3.979480907732029, "grad_norm": 1.3193399906158447, "learning_rate": 0.0005027517325723604, "loss": 3.4091, "step": 58570 }, { "epoch": 3.979820627802691, "grad_norm": 1.444750428199768, "learning_rate": 0.0005027092675635276, "loss": 3.3129, "step": 58575 }, { "epoch": 3.9801603478733525, "grad_norm": 2.1047041416168213, "learning_rate": 0.000502666802554695, "loss": 3.5082, "step": 58580 }, { "epoch": 3.980500067944014, "grad_norm": 1.3229981660842896, "learning_rate": 0.0005026243375458623, "loss": 3.4391, "step": 58585 }, { "epoch": 3.9808397880146758, "grad_norm": 1.244596242904663, "learning_rate": 0.0005025818725370295, "loss": 3.4227, "step": 58590 }, { "epoch": 3.981179508085338, "grad_norm": 1.612308144569397, "learning_rate": 0.0005025394075281968, "loss": 3.4984, "step": 58595 }, { "epoch": 3.9815192281559995, "grad_norm": 1.0935758352279663, "learning_rate": 0.000502496942519364, "loss": 3.4621, "step": 58600 }, { "epoch": 3.981858948226661, "grad_norm": 0.9579334259033203, "learning_rate": 0.0005024544775105313, "loss": 3.4761, "step": 58605 }, { "epoch": 3.982198668297323, "grad_norm": 1.2649601697921753, "learning_rate": 0.0005024120125016986, "loss": 3.4045, "step": 58610 }, { "epoch": 3.982538388367985, "grad_norm": 1.1825486421585083, "learning_rate": 0.0005023695474928659, "loss": 3.355, "step": 58615 }, { "epoch": 3.9828781084386464, "grad_norm": 1.1918545961380005, "learning_rate": 0.0005023270824840332, "loss": 3.5247, "step": 58620 }, { "epoch": 3.983217828509308, "grad_norm": 1.0798283815383911, "learning_rate": 0.0005022846174752005, "loss": 3.3819, "step": 58625 }, { "epoch": 3.98355754857997, "grad_norm": 1.1591742038726807, "learning_rate": 0.0005022421524663677, "loss": 3.4975, "step": 58630 }, { "epoch": 3.9838972686506318, "grad_norm": 1.1430927515029907, "learning_rate": 0.000502199687457535, "loss": 3.2308, "step": 58635 }, { "epoch": 3.9842369887212934, "grad_norm": 1.2621853351593018, "learning_rate": 0.0005021572224487023, "loss": 3.5861, "step": 58640 }, { "epoch": 3.9845767087919555, "grad_norm": 1.3413537740707397, "learning_rate": 0.0005021147574398695, "loss": 3.4412, "step": 58645 }, { "epoch": 3.984916428862617, "grad_norm": 1.1967003345489502, "learning_rate": 0.0005020722924310369, "loss": 3.6056, "step": 58650 }, { "epoch": 3.9852561489332787, "grad_norm": 1.0755914449691772, "learning_rate": 0.0005020298274222042, "loss": 3.4019, "step": 58655 }, { "epoch": 3.985595869003941, "grad_norm": 1.0919948816299438, "learning_rate": 0.0005019873624133714, "loss": 3.3185, "step": 58660 }, { "epoch": 3.9859355890746024, "grad_norm": 1.2589670419692993, "learning_rate": 0.0005019448974045386, "loss": 3.4903, "step": 58665 }, { "epoch": 3.986275309145264, "grad_norm": 1.5002341270446777, "learning_rate": 0.000501902432395706, "loss": 3.2985, "step": 58670 }, { "epoch": 3.986615029215926, "grad_norm": 1.130164384841919, "learning_rate": 0.0005018599673868732, "loss": 3.5044, "step": 58675 }, { "epoch": 3.9869547492865878, "grad_norm": 1.4100017547607422, "learning_rate": 0.0005018175023780404, "loss": 3.3951, "step": 58680 }, { "epoch": 3.9872944693572494, "grad_norm": 2.1461074352264404, "learning_rate": 0.0005017750373692079, "loss": 3.1895, "step": 58685 }, { "epoch": 3.9876341894279115, "grad_norm": 1.3989723920822144, "learning_rate": 0.0005017325723603751, "loss": 3.4648, "step": 58690 }, { "epoch": 3.987973909498573, "grad_norm": 0.9941594004631042, "learning_rate": 0.0005016901073515423, "loss": 3.6325, "step": 58695 }, { "epoch": 3.9883136295692347, "grad_norm": 1.123989224433899, "learning_rate": 0.0005016476423427097, "loss": 3.2511, "step": 58700 }, { "epoch": 3.988653349639897, "grad_norm": 1.0552947521209717, "learning_rate": 0.0005016051773338769, "loss": 3.5249, "step": 58705 }, { "epoch": 3.9889930697105584, "grad_norm": 1.3156769275665283, "learning_rate": 0.0005015627123250441, "loss": 3.3273, "step": 58710 }, { "epoch": 3.98933278978122, "grad_norm": 1.2050623893737793, "learning_rate": 0.0005015202473162114, "loss": 3.5597, "step": 58715 }, { "epoch": 3.989672509851882, "grad_norm": 1.1333942413330078, "learning_rate": 0.0005014777823073788, "loss": 3.7226, "step": 58720 }, { "epoch": 3.990012229922544, "grad_norm": 1.7251074314117432, "learning_rate": 0.000501435317298546, "loss": 3.1486, "step": 58725 }, { "epoch": 3.9903519499932054, "grad_norm": 1.2872885465621948, "learning_rate": 0.0005013928522897133, "loss": 3.2527, "step": 58730 }, { "epoch": 3.9906916700638675, "grad_norm": 1.4176914691925049, "learning_rate": 0.0005013503872808806, "loss": 3.2571, "step": 58735 }, { "epoch": 3.991031390134529, "grad_norm": 0.9726143479347229, "learning_rate": 0.0005013079222720478, "loss": 3.3948, "step": 58740 }, { "epoch": 3.9913711102051908, "grad_norm": 1.1062707901000977, "learning_rate": 0.0005012654572632151, "loss": 3.2936, "step": 58745 }, { "epoch": 3.991710830275853, "grad_norm": 1.8385696411132812, "learning_rate": 0.0005012229922543823, "loss": 3.3061, "step": 58750 }, { "epoch": 3.9920505503465145, "grad_norm": 1.3773032426834106, "learning_rate": 0.0005011805272455497, "loss": 3.4487, "step": 58755 }, { "epoch": 3.992390270417176, "grad_norm": 1.273768424987793, "learning_rate": 0.000501138062236717, "loss": 3.2876, "step": 58760 }, { "epoch": 3.992729990487838, "grad_norm": 1.585395097732544, "learning_rate": 0.0005010955972278842, "loss": 3.6858, "step": 58765 }, { "epoch": 3.9930697105585, "grad_norm": 1.429885745048523, "learning_rate": 0.0005010531322190515, "loss": 3.3991, "step": 58770 }, { "epoch": 3.9934094306291614, "grad_norm": 1.5095915794372559, "learning_rate": 0.0005010106672102188, "loss": 3.1689, "step": 58775 }, { "epoch": 3.9937491506998235, "grad_norm": 1.1291801929473877, "learning_rate": 0.000500968202201386, "loss": 3.1707, "step": 58780 }, { "epoch": 3.994088870770485, "grad_norm": 1.229790449142456, "learning_rate": 0.0005009257371925533, "loss": 3.5287, "step": 58785 }, { "epoch": 3.9944285908411468, "grad_norm": 1.3201533555984497, "learning_rate": 0.0005008832721837207, "loss": 3.2862, "step": 58790 }, { "epoch": 3.994768310911809, "grad_norm": 1.2218847274780273, "learning_rate": 0.0005008408071748879, "loss": 3.5634, "step": 58795 }, { "epoch": 3.9951080309824705, "grad_norm": 1.4773614406585693, "learning_rate": 0.0005007983421660551, "loss": 3.5514, "step": 58800 }, { "epoch": 3.995447751053132, "grad_norm": 1.7846412658691406, "learning_rate": 0.0005007558771572225, "loss": 3.7561, "step": 58805 }, { "epoch": 3.995787471123794, "grad_norm": 1.100117564201355, "learning_rate": 0.0005007134121483897, "loss": 3.3947, "step": 58810 }, { "epoch": 3.996127191194456, "grad_norm": 1.2669161558151245, "learning_rate": 0.0005006709471395569, "loss": 3.2618, "step": 58815 }, { "epoch": 3.9964669112651174, "grad_norm": 1.534149169921875, "learning_rate": 0.0005006284821307243, "loss": 3.5528, "step": 58820 }, { "epoch": 3.9968066313357795, "grad_norm": 1.1896638870239258, "learning_rate": 0.0005005860171218916, "loss": 3.4212, "step": 58825 }, { "epoch": 3.997146351406441, "grad_norm": 1.1268550157546997, "learning_rate": 0.0005005435521130588, "loss": 3.5882, "step": 58830 }, { "epoch": 3.9974860714771028, "grad_norm": 1.1549456119537354, "learning_rate": 0.0005005010871042262, "loss": 3.2116, "step": 58835 }, { "epoch": 3.997825791547765, "grad_norm": 1.9245718717575073, "learning_rate": 0.0005004586220953934, "loss": 3.4509, "step": 58840 }, { "epoch": 3.9981655116184265, "grad_norm": 0.9226694703102112, "learning_rate": 0.0005004161570865606, "loss": 3.5166, "step": 58845 }, { "epoch": 3.998505231689088, "grad_norm": 1.3786603212356567, "learning_rate": 0.0005003736920777279, "loss": 3.2962, "step": 58850 }, { "epoch": 3.99884495175975, "grad_norm": 0.9986640214920044, "learning_rate": 0.0005003312270688952, "loss": 3.4659, "step": 58855 }, { "epoch": 3.999184671830412, "grad_norm": 1.3372710943222046, "learning_rate": 0.0005002887620600626, "loss": 3.669, "step": 58860 }, { "epoch": 3.9995243919010735, "grad_norm": 1.4348193407058716, "learning_rate": 0.0005002462970512298, "loss": 3.7245, "step": 58865 }, { "epoch": 3.9998641119717355, "grad_norm": 2.0749220848083496, "learning_rate": 0.0005002038320423971, "loss": 3.3302, "step": 58870 }, { "epoch": 4.0, "eval_bertscore": { "f1": 0.840559351697242, "precision": 0.8463854441717621, "recall": 0.8354937262181129 }, "eval_bleu_4": 0.013060877522707234, "eval_exact_match": 0.0005814516910553348, "eval_loss": 3.4148595333099365, "eval_meteor": 0.08236694140042775, "eval_rouge": { "rouge1": 0.1185769604327812, "rouge2": 0.017075765808795426, "rougeL": 0.10294520097198637, "rougeLsum": 0.10295386384978539 }, "eval_runtime": 1452.4906, "eval_samples_per_second": 7.104, "eval_steps_per_second": 0.888, "step": 58872 }, { "epoch": 4.000203832042397, "grad_norm": 1.2701600790023804, "learning_rate": 0.0005001613670335644, "loss": 3.6202, "step": 58875 }, { "epoch": 4.000543552113059, "grad_norm": 2.4242515563964844, "learning_rate": 0.0005001189020247316, "loss": 3.3944, "step": 58880 }, { "epoch": 4.000883272183721, "grad_norm": 1.6237496137619019, "learning_rate": 0.0005000764370158989, "loss": 3.2556, "step": 58885 }, { "epoch": 4.001222992254382, "grad_norm": 1.2133572101593018, "learning_rate": 0.0005000339720070662, "loss": 3.2115, "step": 58890 }, { "epoch": 4.001562712325044, "grad_norm": 1.270180106163025, "learning_rate": 0.0004999915069982335, "loss": 3.3526, "step": 58895 }, { "epoch": 4.001902432395706, "grad_norm": 1.3246647119522095, "learning_rate": 0.0004999490419894007, "loss": 3.408, "step": 58900 }, { "epoch": 4.002242152466367, "grad_norm": 1.233951210975647, "learning_rate": 0.000499906576980568, "loss": 3.2466, "step": 58905 }, { "epoch": 4.0025818725370295, "grad_norm": 0.9150893092155457, "learning_rate": 0.0004998641119717353, "loss": 3.3062, "step": 58910 }, { "epoch": 4.0029215926076915, "grad_norm": 1.731970191001892, "learning_rate": 0.0004998216469629025, "loss": 3.2278, "step": 58915 }, { "epoch": 4.003261312678353, "grad_norm": 1.3492673635482788, "learning_rate": 0.0004997791819540699, "loss": 3.4991, "step": 58920 }, { "epoch": 4.003601032749015, "grad_norm": 1.294539213180542, "learning_rate": 0.0004997367169452371, "loss": 3.2621, "step": 58925 }, { "epoch": 4.003940752819677, "grad_norm": 1.524833083152771, "learning_rate": 0.0004996942519364044, "loss": 3.6367, "step": 58930 }, { "epoch": 4.004280472890338, "grad_norm": 1.748426079750061, "learning_rate": 0.0004996517869275717, "loss": 3.2685, "step": 58935 }, { "epoch": 4.004620192961, "grad_norm": 1.0896722078323364, "learning_rate": 0.000499609321918739, "loss": 3.4796, "step": 58940 }, { "epoch": 4.004959913031662, "grad_norm": 1.1788194179534912, "learning_rate": 0.0004995668569099062, "loss": 3.2886, "step": 58945 }, { "epoch": 4.005299633102323, "grad_norm": 1.1128121614456177, "learning_rate": 0.0004995243919010735, "loss": 3.3199, "step": 58950 }, { "epoch": 4.0056393531729855, "grad_norm": 1.4260034561157227, "learning_rate": 0.0004994819268922408, "loss": 3.2587, "step": 58955 }, { "epoch": 4.0059790732436475, "grad_norm": 1.3430981636047363, "learning_rate": 0.0004994394618834081, "loss": 3.5132, "step": 58960 }, { "epoch": 4.006318793314309, "grad_norm": 1.2979985475540161, "learning_rate": 0.0004993969968745753, "loss": 3.1923, "step": 58965 }, { "epoch": 4.006658513384971, "grad_norm": 1.3184341192245483, "learning_rate": 0.0004993545318657427, "loss": 3.2254, "step": 58970 }, { "epoch": 4.006998233455633, "grad_norm": 2.016026258468628, "learning_rate": 0.0004993120668569099, "loss": 3.6025, "step": 58975 }, { "epoch": 4.007337953526294, "grad_norm": 1.3741366863250732, "learning_rate": 0.0004992696018480772, "loss": 3.5005, "step": 58980 }, { "epoch": 4.007677673596956, "grad_norm": 1.447784662246704, "learning_rate": 0.0004992271368392445, "loss": 3.4643, "step": 58985 }, { "epoch": 4.008017393667618, "grad_norm": 1.2210533618927002, "learning_rate": 0.0004991846718304118, "loss": 3.4155, "step": 58990 }, { "epoch": 4.008357113738279, "grad_norm": 1.3602967262268066, "learning_rate": 0.000499142206821579, "loss": 3.4275, "step": 58995 }, { "epoch": 4.0086968338089415, "grad_norm": 1.3041743040084839, "learning_rate": 0.0004990997418127463, "loss": 3.5676, "step": 59000 }, { "epoch": 4.009036553879604, "grad_norm": 1.0815773010253906, "learning_rate": 0.0004990572768039136, "loss": 3.3388, "step": 59005 }, { "epoch": 4.009376273950265, "grad_norm": 1.0801774263381958, "learning_rate": 0.0004990148117950808, "loss": 3.2953, "step": 59010 }, { "epoch": 4.009715994020927, "grad_norm": 1.4527500867843628, "learning_rate": 0.0004989723467862481, "loss": 3.156, "step": 59015 }, { "epoch": 4.010055714091589, "grad_norm": 1.0229129791259766, "learning_rate": 0.0004989298817774155, "loss": 3.7619, "step": 59020 }, { "epoch": 4.01039543416225, "grad_norm": 1.7026287317276, "learning_rate": 0.0004988874167685827, "loss": 3.5224, "step": 59025 }, { "epoch": 4.010735154232912, "grad_norm": 1.3388222455978394, "learning_rate": 0.00049884495175975, "loss": 3.331, "step": 59030 }, { "epoch": 4.011074874303574, "grad_norm": NaN, "learning_rate": 0.0004988109797526839, "loss": 3.3533, "step": 59035 }, { "epoch": 4.011414594374235, "grad_norm": 1.350050926208496, "learning_rate": 0.0004987685147438511, "loss": 3.2075, "step": 59040 }, { "epoch": 4.0117543144448975, "grad_norm": 1.2498834133148193, "learning_rate": 0.0004987260497350183, "loss": 3.4757, "step": 59045 }, { "epoch": 4.01209403451556, "grad_norm": 1.2830320596694946, "learning_rate": 0.0004986835847261856, "loss": 3.3829, "step": 59050 }, { "epoch": 4.012433754586221, "grad_norm": 1.7781113386154175, "learning_rate": 0.000498641119717353, "loss": 3.4598, "step": 59055 }, { "epoch": 4.012773474656883, "grad_norm": 1.2806593179702759, "learning_rate": 0.0004985986547085202, "loss": 3.44, "step": 59060 }, { "epoch": 4.013113194727545, "grad_norm": 1.3419325351715088, "learning_rate": 0.0004985561896996874, "loss": 3.515, "step": 59065 }, { "epoch": 4.013452914798206, "grad_norm": 1.3315913677215576, "learning_rate": 0.0004985137246908548, "loss": 3.4046, "step": 59070 }, { "epoch": 4.013792634868868, "grad_norm": 1.2164489030838013, "learning_rate": 0.000498471259682022, "loss": 3.2099, "step": 59075 }, { "epoch": 4.01413235493953, "grad_norm": 1.3661752939224243, "learning_rate": 0.0004984287946731893, "loss": 3.4456, "step": 59080 }, { "epoch": 4.014472075010191, "grad_norm": 1.5173393487930298, "learning_rate": 0.0004983863296643565, "loss": 3.4026, "step": 59085 }, { "epoch": 4.0148117950808535, "grad_norm": 1.323933720588684, "learning_rate": 0.0004983438646555239, "loss": 3.4749, "step": 59090 }, { "epoch": 4.015151515151516, "grad_norm": 1.0528572797775269, "learning_rate": 0.0004983013996466911, "loss": 3.469, "step": 59095 }, { "epoch": 4.015491235222177, "grad_norm": 1.2034908533096313, "learning_rate": 0.0004982589346378584, "loss": 3.5612, "step": 59100 }, { "epoch": 4.015830955292839, "grad_norm": 1.3859279155731201, "learning_rate": 0.0004982164696290257, "loss": 3.2909, "step": 59105 }, { "epoch": 4.016170675363501, "grad_norm": 1.2332905530929565, "learning_rate": 0.000498174004620193, "loss": 3.3949, "step": 59110 }, { "epoch": 4.016510395434162, "grad_norm": 2.901749849319458, "learning_rate": 0.0004981315396113602, "loss": 3.1913, "step": 59115 }, { "epoch": 4.016850115504824, "grad_norm": 1.4965641498565674, "learning_rate": 0.0004980890746025275, "loss": 3.3306, "step": 59120 }, { "epoch": 4.017189835575485, "grad_norm": 1.4789756536483765, "learning_rate": 0.0004980466095936948, "loss": 3.1589, "step": 59125 }, { "epoch": 4.017529555646147, "grad_norm": 1.0033265352249146, "learning_rate": 0.0004980041445848621, "loss": 3.5231, "step": 59130 }, { "epoch": 4.0178692757168095, "grad_norm": 1.5400484800338745, "learning_rate": 0.0004979616795760293, "loss": 3.5359, "step": 59135 }, { "epoch": 4.018208995787471, "grad_norm": 1.783261775970459, "learning_rate": 0.0004979192145671967, "loss": 3.4777, "step": 59140 }, { "epoch": 4.018548715858133, "grad_norm": 1.2545782327651978, "learning_rate": 0.0004978767495583639, "loss": 3.2948, "step": 59145 }, { "epoch": 4.018888435928795, "grad_norm": 1.235006332397461, "learning_rate": 0.0004978342845495311, "loss": 3.3971, "step": 59150 }, { "epoch": 4.019228155999456, "grad_norm": 0.9294551014900208, "learning_rate": 0.0004977918195406985, "loss": 3.6677, "step": 59155 }, { "epoch": 4.019567876070118, "grad_norm": 1.2854467630386353, "learning_rate": 0.0004977493545318658, "loss": 3.1963, "step": 59160 }, { "epoch": 4.01990759614078, "grad_norm": 1.095118522644043, "learning_rate": 0.000497706889523033, "loss": 3.4098, "step": 59165 }, { "epoch": 4.020247316211441, "grad_norm": 1.5473648309707642, "learning_rate": 0.0004976644245142003, "loss": 3.3258, "step": 59170 }, { "epoch": 4.0205870362821035, "grad_norm": 1.2084054946899414, "learning_rate": 0.0004976219595053676, "loss": 3.5375, "step": 59175 }, { "epoch": 4.0209267563527655, "grad_norm": 1.2263740301132202, "learning_rate": 0.0004975794944965349, "loss": 3.2284, "step": 59180 }, { "epoch": 4.021266476423427, "grad_norm": 1.6719568967819214, "learning_rate": 0.0004975370294877021, "loss": 3.5761, "step": 59185 }, { "epoch": 4.021606196494089, "grad_norm": 1.0503709316253662, "learning_rate": 0.0004974945644788695, "loss": 3.7208, "step": 59190 }, { "epoch": 4.021945916564751, "grad_norm": 1.0745242834091187, "learning_rate": 0.0004974520994700367, "loss": 3.4662, "step": 59195 }, { "epoch": 4.022285636635412, "grad_norm": 1.2605763673782349, "learning_rate": 0.0004974096344612039, "loss": 3.4155, "step": 59200 }, { "epoch": 4.022625356706074, "grad_norm": 1.2113661766052246, "learning_rate": 0.0004973671694523713, "loss": 3.3972, "step": 59205 }, { "epoch": 4.022965076776736, "grad_norm": 1.331446886062622, "learning_rate": 0.0004973247044435386, "loss": 3.5005, "step": 59210 }, { "epoch": 4.023304796847397, "grad_norm": 1.9254969358444214, "learning_rate": 0.0004972822394347058, "loss": 3.548, "step": 59215 }, { "epoch": 4.0236445169180595, "grad_norm": 1.2992879152297974, "learning_rate": 0.000497239774425873, "loss": 3.4275, "step": 59220 }, { "epoch": 4.0239842369887215, "grad_norm": 1.5601673126220703, "learning_rate": 0.0004971973094170404, "loss": 3.4215, "step": 59225 }, { "epoch": 4.024323957059383, "grad_norm": 1.1505193710327148, "learning_rate": 0.0004971548444082076, "loss": 3.6013, "step": 59230 }, { "epoch": 4.024663677130045, "grad_norm": 1.1120202541351318, "learning_rate": 0.000497112379399375, "loss": 3.5329, "step": 59235 }, { "epoch": 4.025003397200707, "grad_norm": 1.1550008058547974, "learning_rate": 0.0004970699143905422, "loss": 3.3825, "step": 59240 }, { "epoch": 4.025343117271368, "grad_norm": 1.36951744556427, "learning_rate": 0.0004970274493817095, "loss": 3.5172, "step": 59245 }, { "epoch": 4.02568283734203, "grad_norm": 1.2501351833343506, "learning_rate": 0.0004969849843728767, "loss": 3.5109, "step": 59250 }, { "epoch": 4.026022557412692, "grad_norm": 1.2793657779693604, "learning_rate": 0.0004969425193640441, "loss": 3.4252, "step": 59255 }, { "epoch": 4.026362277483353, "grad_norm": 1.2863695621490479, "learning_rate": 0.0004969000543552113, "loss": 3.404, "step": 59260 }, { "epoch": 4.0267019975540155, "grad_norm": 1.1380910873413086, "learning_rate": 0.0004968575893463786, "loss": 3.6171, "step": 59265 }, { "epoch": 4.0270417176246776, "grad_norm": 1.2263753414154053, "learning_rate": 0.0004968151243375459, "loss": 3.5507, "step": 59270 }, { "epoch": 4.027381437695339, "grad_norm": 1.742556095123291, "learning_rate": 0.0004967726593287131, "loss": 3.5265, "step": 59275 }, { "epoch": 4.027721157766001, "grad_norm": 1.1226037740707397, "learning_rate": 0.0004967301943198804, "loss": 3.2736, "step": 59280 }, { "epoch": 4.028060877836663, "grad_norm": 2.2483270168304443, "learning_rate": 0.0004966877293110477, "loss": 3.4746, "step": 59285 }, { "epoch": 4.028400597907324, "grad_norm": 1.077246904373169, "learning_rate": 0.000496645264302215, "loss": 3.2472, "step": 59290 }, { "epoch": 4.028740317977986, "grad_norm": 1.5389984846115112, "learning_rate": 0.0004966027992933823, "loss": 3.376, "step": 59295 }, { "epoch": 4.029080038048648, "grad_norm": 0.993549644947052, "learning_rate": 0.0004965603342845495, "loss": 3.4214, "step": 59300 }, { "epoch": 4.029419758119309, "grad_norm": 1.163082480430603, "learning_rate": 0.0004965178692757168, "loss": 3.3942, "step": 59305 }, { "epoch": 4.0297594781899715, "grad_norm": 1.3753024339675903, "learning_rate": 0.0004964754042668841, "loss": 3.2448, "step": 59310 }, { "epoch": 4.030099198260634, "grad_norm": 1.1717970371246338, "learning_rate": 0.0004964329392580514, "loss": 3.3408, "step": 59315 }, { "epoch": 4.030438918331295, "grad_norm": 1.1377941370010376, "learning_rate": 0.0004963904742492187, "loss": 3.4633, "step": 59320 }, { "epoch": 4.030778638401957, "grad_norm": 1.1842520236968994, "learning_rate": 0.0004963480092403859, "loss": 3.4246, "step": 59325 }, { "epoch": 4.031118358472619, "grad_norm": 1.013010859489441, "learning_rate": 0.0004963055442315532, "loss": 3.5362, "step": 59330 }, { "epoch": 4.03145807854328, "grad_norm": 1.4899256229400635, "learning_rate": 0.0004962630792227205, "loss": 3.4927, "step": 59335 }, { "epoch": 4.031797798613942, "grad_norm": 1.2089210748672485, "learning_rate": 0.0004962206142138878, "loss": 3.3492, "step": 59340 }, { "epoch": 4.032137518684604, "grad_norm": 1.5373231172561646, "learning_rate": 0.000496178149205055, "loss": 3.3768, "step": 59345 }, { "epoch": 4.032477238755265, "grad_norm": 1.346038818359375, "learning_rate": 0.0004961356841962223, "loss": 3.3798, "step": 59350 }, { "epoch": 4.0328169588259275, "grad_norm": 1.5182995796203613, "learning_rate": 0.0004960932191873896, "loss": 3.4005, "step": 59355 }, { "epoch": 4.03315667889659, "grad_norm": 1.4758386611938477, "learning_rate": 0.0004960507541785569, "loss": 3.2656, "step": 59360 }, { "epoch": 4.033496398967251, "grad_norm": 1.2553976774215698, "learning_rate": 0.0004960082891697242, "loss": 3.2974, "step": 59365 }, { "epoch": 4.033836119037913, "grad_norm": 1.268757939338684, "learning_rate": 0.0004959658241608915, "loss": 3.3575, "step": 59370 }, { "epoch": 4.034175839108575, "grad_norm": 1.0733224153518677, "learning_rate": 0.0004959233591520587, "loss": 3.2795, "step": 59375 }, { "epoch": 4.034515559179236, "grad_norm": 1.1723301410675049, "learning_rate": 0.000495880894143226, "loss": 3.6462, "step": 59380 }, { "epoch": 4.034855279249898, "grad_norm": 1.303622841835022, "learning_rate": 0.0004958384291343932, "loss": 3.4304, "step": 59385 }, { "epoch": 4.03519499932056, "grad_norm": 1.1522785425186157, "learning_rate": 0.0004957959641255606, "loss": 3.2855, "step": 59390 }, { "epoch": 4.035534719391221, "grad_norm": 1.4632272720336914, "learning_rate": 0.0004957534991167278, "loss": 3.1697, "step": 59395 }, { "epoch": 4.0358744394618835, "grad_norm": 1.3215162754058838, "learning_rate": 0.0004957110341078951, "loss": 3.1622, "step": 59400 }, { "epoch": 4.036214159532546, "grad_norm": 1.2355574369430542, "learning_rate": 0.0004956685690990624, "loss": 3.3367, "step": 59405 }, { "epoch": 4.036553879603207, "grad_norm": 1.4581836462020874, "learning_rate": 0.0004956261040902297, "loss": 3.2318, "step": 59410 }, { "epoch": 4.036893599673869, "grad_norm": 1.2864900827407837, "learning_rate": 0.0004955836390813969, "loss": 3.2992, "step": 59415 }, { "epoch": 4.037233319744531, "grad_norm": 1.4174976348876953, "learning_rate": 0.0004955411740725643, "loss": 3.2344, "step": 59420 }, { "epoch": 4.037573039815192, "grad_norm": 1.3485335111618042, "learning_rate": 0.0004954987090637315, "loss": 3.4085, "step": 59425 }, { "epoch": 4.037912759885854, "grad_norm": 1.5446465015411377, "learning_rate": 0.0004954562440548987, "loss": 3.4733, "step": 59430 }, { "epoch": 4.038252479956516, "grad_norm": 1.230350375175476, "learning_rate": 0.000495413779046066, "loss": 3.3312, "step": 59435 }, { "epoch": 4.0385922000271774, "grad_norm": 1.3005075454711914, "learning_rate": 0.0004953713140372334, "loss": 3.2665, "step": 59440 }, { "epoch": 4.0389319200978395, "grad_norm": 1.7988529205322266, "learning_rate": 0.0004953288490284006, "loss": 3.3798, "step": 59445 }, { "epoch": 4.039271640168501, "grad_norm": 1.5597046613693237, "learning_rate": 0.0004952863840195678, "loss": 3.2785, "step": 59450 }, { "epoch": 4.039611360239163, "grad_norm": 1.3271883726119995, "learning_rate": 0.0004952439190107352, "loss": 3.639, "step": 59455 }, { "epoch": 4.039951080309825, "grad_norm": 1.319557785987854, "learning_rate": 0.0004952014540019024, "loss": 3.493, "step": 59460 }, { "epoch": 4.040290800380486, "grad_norm": 1.743161678314209, "learning_rate": 0.0004951589889930697, "loss": 3.2642, "step": 59465 }, { "epoch": 4.040630520451148, "grad_norm": 1.040988802909851, "learning_rate": 0.0004951165239842371, "loss": 3.5287, "step": 59470 }, { "epoch": 4.04097024052181, "grad_norm": 1.1170293092727661, "learning_rate": 0.0004950740589754043, "loss": 3.3824, "step": 59475 }, { "epoch": 4.041309960592471, "grad_norm": 1.3038848638534546, "learning_rate": 0.0004950315939665715, "loss": 3.3965, "step": 59480 }, { "epoch": 4.0416496806631335, "grad_norm": 1.1995857954025269, "learning_rate": 0.0004949891289577388, "loss": 3.3398, "step": 59485 }, { "epoch": 4.0419894007337955, "grad_norm": 1.4367574453353882, "learning_rate": 0.0004949466639489061, "loss": 3.4936, "step": 59490 }, { "epoch": 4.042329120804457, "grad_norm": 1.5502090454101562, "learning_rate": 0.0004949041989400734, "loss": 3.6435, "step": 59495 }, { "epoch": 4.042668840875119, "grad_norm": 1.3376264572143555, "learning_rate": 0.0004948617339312406, "loss": 3.4719, "step": 59500 }, { "epoch": 4.043008560945781, "grad_norm": 1.0009227991104126, "learning_rate": 0.000494819268922408, "loss": 3.5954, "step": 59505 }, { "epoch": 4.043348281016442, "grad_norm": 1.353394627571106, "learning_rate": 0.0004947768039135752, "loss": 3.3671, "step": 59510 }, { "epoch": 4.043688001087104, "grad_norm": 1.4506103992462158, "learning_rate": 0.0004947343389047425, "loss": 3.3677, "step": 59515 }, { "epoch": 4.044027721157766, "grad_norm": 1.2335615158081055, "learning_rate": 0.0004946918738959099, "loss": 3.25, "step": 59520 }, { "epoch": 4.044367441228427, "grad_norm": 1.1175686120986938, "learning_rate": 0.0004946494088870771, "loss": 3.3176, "step": 59525 }, { "epoch": 4.0447071612990895, "grad_norm": 1.4425952434539795, "learning_rate": 0.0004946069438782443, "loss": 3.3888, "step": 59530 }, { "epoch": 4.0450468813697515, "grad_norm": 1.2946381568908691, "learning_rate": 0.0004945644788694116, "loss": 3.5175, "step": 59535 }, { "epoch": 4.045386601440413, "grad_norm": 1.0552417039871216, "learning_rate": 0.0004945220138605789, "loss": 3.6166, "step": 59540 }, { "epoch": 4.045726321511075, "grad_norm": 1.2421259880065918, "learning_rate": 0.0004944795488517462, "loss": 3.3014, "step": 59545 }, { "epoch": 4.046066041581737, "grad_norm": 1.298723578453064, "learning_rate": 0.0004944370838429134, "loss": 3.2883, "step": 59550 }, { "epoch": 4.046405761652398, "grad_norm": 1.1420018672943115, "learning_rate": 0.0004943946188340808, "loss": 3.1842, "step": 59555 }, { "epoch": 4.04674548172306, "grad_norm": 1.2128653526306152, "learning_rate": 0.000494352153825248, "loss": 3.6223, "step": 59560 }, { "epoch": 4.047085201793722, "grad_norm": 1.308239459991455, "learning_rate": 0.0004943096888164153, "loss": 3.4845, "step": 59565 }, { "epoch": 4.047424921864383, "grad_norm": 1.474242925643921, "learning_rate": 0.0004942672238075825, "loss": 3.1978, "step": 59570 }, { "epoch": 4.0477646419350455, "grad_norm": 1.2428638935089111, "learning_rate": 0.0004942247587987499, "loss": 3.1015, "step": 59575 }, { "epoch": 4.048104362005708, "grad_norm": 1.2364414930343628, "learning_rate": 0.0004941822937899171, "loss": 3.4731, "step": 59580 }, { "epoch": 4.048444082076369, "grad_norm": 1.218131184577942, "learning_rate": 0.0004941398287810843, "loss": 3.4134, "step": 59585 }, { "epoch": 4.048783802147031, "grad_norm": 1.3041247129440308, "learning_rate": 0.0004940973637722517, "loss": 3.3089, "step": 59590 }, { "epoch": 4.049123522217693, "grad_norm": 1.5807701349258423, "learning_rate": 0.000494054898763419, "loss": 3.2956, "step": 59595 }, { "epoch": 4.049463242288354, "grad_norm": 1.5835328102111816, "learning_rate": 0.0004940124337545862, "loss": 3.5251, "step": 59600 }, { "epoch": 4.049802962359016, "grad_norm": 1.3274250030517578, "learning_rate": 0.0004939699687457535, "loss": 3.4715, "step": 59605 }, { "epoch": 4.050142682429678, "grad_norm": 1.2708708047866821, "learning_rate": 0.0004939275037369208, "loss": 3.5171, "step": 59610 }, { "epoch": 4.050482402500339, "grad_norm": 1.2662371397018433, "learning_rate": 0.000493885038728088, "loss": 3.4133, "step": 59615 }, { "epoch": 4.0508221225710015, "grad_norm": 1.3752174377441406, "learning_rate": 0.0004938425737192554, "loss": 3.4573, "step": 59620 }, { "epoch": 4.051161842641664, "grad_norm": 1.2577017545700073, "learning_rate": 0.0004938001087104227, "loss": 3.24, "step": 59625 }, { "epoch": 4.051501562712325, "grad_norm": 3.117647647857666, "learning_rate": 0.0004937576437015899, "loss": 3.3734, "step": 59630 }, { "epoch": 4.051841282782987, "grad_norm": 1.4289281368255615, "learning_rate": 0.0004937151786927571, "loss": 3.6827, "step": 59635 }, { "epoch": 4.052181002853649, "grad_norm": 0.9690352082252502, "learning_rate": 0.0004936727136839245, "loss": 3.2516, "step": 59640 }, { "epoch": 4.05252072292431, "grad_norm": 1.0587517023086548, "learning_rate": 0.0004936302486750917, "loss": 3.3691, "step": 59645 }, { "epoch": 4.052860442994972, "grad_norm": 1.865226149559021, "learning_rate": 0.000493587783666259, "loss": 3.4604, "step": 59650 }, { "epoch": 4.053200163065634, "grad_norm": 1.235956072807312, "learning_rate": 0.0004935453186574263, "loss": 3.3484, "step": 59655 }, { "epoch": 4.053539883136295, "grad_norm": 1.1845916509628296, "learning_rate": 0.0004935028536485936, "loss": 3.3581, "step": 59660 }, { "epoch": 4.0538796032069575, "grad_norm": 1.792374610900879, "learning_rate": 0.0004934603886397608, "loss": 3.4662, "step": 59665 }, { "epoch": 4.05421932327762, "grad_norm": 1.3518073558807373, "learning_rate": 0.0004934179236309282, "loss": 3.6338, "step": 59670 }, { "epoch": 4.054559043348281, "grad_norm": 1.2951396703720093, "learning_rate": 0.0004933754586220954, "loss": 3.4272, "step": 59675 }, { "epoch": 4.054898763418943, "grad_norm": 1.3220527172088623, "learning_rate": 0.0004933329936132627, "loss": 3.5255, "step": 59680 }, { "epoch": 4.055238483489605, "grad_norm": 1.1611155271530151, "learning_rate": 0.0004932905286044299, "loss": 3.3681, "step": 59685 }, { "epoch": 4.055578203560266, "grad_norm": 1.2845205068588257, "learning_rate": 0.0004932480635955973, "loss": 3.257, "step": 59690 }, { "epoch": 4.055917923630928, "grad_norm": 1.4953892230987549, "learning_rate": 0.0004932055985867645, "loss": 3.3113, "step": 59695 }, { "epoch": 4.05625764370159, "grad_norm": 1.297173261642456, "learning_rate": 0.0004931631335779318, "loss": 3.2515, "step": 59700 }, { "epoch": 4.056597363772251, "grad_norm": 1.9750930070877075, "learning_rate": 0.0004931206685690991, "loss": 3.2604, "step": 59705 }, { "epoch": 4.0569370838429135, "grad_norm": 1.6357650756835938, "learning_rate": 0.0004930782035602663, "loss": 3.4067, "step": 59710 }, { "epoch": 4.057276803913576, "grad_norm": 1.2251218557357788, "learning_rate": 0.0004930357385514336, "loss": 3.469, "step": 59715 }, { "epoch": 4.057616523984237, "grad_norm": 0.9406675100326538, "learning_rate": 0.000492993273542601, "loss": 3.3657, "step": 59720 }, { "epoch": 4.057956244054899, "grad_norm": 1.0810730457305908, "learning_rate": 0.0004929508085337682, "loss": 3.4946, "step": 59725 }, { "epoch": 4.058295964125561, "grad_norm": 1.290845513343811, "learning_rate": 0.0004929083435249355, "loss": 3.4414, "step": 59730 }, { "epoch": 4.058635684196222, "grad_norm": 1.1920623779296875, "learning_rate": 0.0004928658785161027, "loss": 3.3604, "step": 59735 }, { "epoch": 4.058975404266884, "grad_norm": 1.5365524291992188, "learning_rate": 0.00049282341350727, "loss": 3.5137, "step": 59740 }, { "epoch": 4.059315124337546, "grad_norm": 1.218461275100708, "learning_rate": 0.0004927809484984373, "loss": 3.4984, "step": 59745 }, { "epoch": 4.0596548444082075, "grad_norm": 1.4333593845367432, "learning_rate": 0.0004927384834896046, "loss": 3.4203, "step": 59750 }, { "epoch": 4.0599945644788695, "grad_norm": 1.1381797790527344, "learning_rate": 0.0004926960184807719, "loss": 3.4299, "step": 59755 }, { "epoch": 4.060334284549532, "grad_norm": 1.1667360067367554, "learning_rate": 0.0004926535534719391, "loss": 3.2877, "step": 59760 }, { "epoch": 4.060674004620193, "grad_norm": 1.473970890045166, "learning_rate": 0.0004926110884631064, "loss": 3.3378, "step": 59765 }, { "epoch": 4.061013724690855, "grad_norm": 1.1420129537582397, "learning_rate": 0.0004925686234542736, "loss": 3.3541, "step": 59770 }, { "epoch": 4.061353444761517, "grad_norm": 1.4516198635101318, "learning_rate": 0.000492526158445441, "loss": 3.4456, "step": 59775 }, { "epoch": 4.061693164832178, "grad_norm": 1.1080595254898071, "learning_rate": 0.0004924836934366083, "loss": 3.3571, "step": 59780 }, { "epoch": 4.06203288490284, "grad_norm": 1.643306851387024, "learning_rate": 0.0004924412284277755, "loss": 3.2915, "step": 59785 }, { "epoch": 4.062372604973502, "grad_norm": 1.1402034759521484, "learning_rate": 0.0004923987634189428, "loss": 3.5357, "step": 59790 }, { "epoch": 4.0627123250441635, "grad_norm": 1.4998592138290405, "learning_rate": 0.0004923562984101101, "loss": 3.6381, "step": 59795 }, { "epoch": 4.0630520451148255, "grad_norm": 1.3511378765106201, "learning_rate": 0.0004923138334012773, "loss": 3.3572, "step": 59800 }, { "epoch": 4.063391765185487, "grad_norm": 2.4756412506103516, "learning_rate": 0.0004922713683924447, "loss": 3.4564, "step": 59805 }, { "epoch": 4.063731485256149, "grad_norm": 1.2320789098739624, "learning_rate": 0.0004922289033836119, "loss": 3.4056, "step": 59810 }, { "epoch": 4.064071205326811, "grad_norm": 1.5001543760299683, "learning_rate": 0.0004921864383747792, "loss": 3.3053, "step": 59815 }, { "epoch": 4.064410925397472, "grad_norm": 1.0174490213394165, "learning_rate": 0.0004921439733659464, "loss": 3.4795, "step": 59820 }, { "epoch": 4.064750645468134, "grad_norm": 1.5525280237197876, "learning_rate": 0.0004921015083571138, "loss": 3.3984, "step": 59825 }, { "epoch": 4.065090365538796, "grad_norm": 1.184477686882019, "learning_rate": 0.000492059043348281, "loss": 3.2049, "step": 59830 }, { "epoch": 4.065430085609457, "grad_norm": 1.3569374084472656, "learning_rate": 0.0004920165783394483, "loss": 3.1458, "step": 59835 }, { "epoch": 4.0657698056801195, "grad_norm": 1.20895516872406, "learning_rate": 0.0004919741133306156, "loss": 3.6851, "step": 59840 }, { "epoch": 4.0661095257507816, "grad_norm": 1.1480902433395386, "learning_rate": 0.0004919316483217829, "loss": 3.5927, "step": 59845 }, { "epoch": 4.066449245821443, "grad_norm": 1.345662236213684, "learning_rate": 0.0004918891833129501, "loss": 3.3711, "step": 59850 }, { "epoch": 4.066788965892105, "grad_norm": 1.7816038131713867, "learning_rate": 0.0004918467183041175, "loss": 3.0214, "step": 59855 }, { "epoch": 4.067128685962767, "grad_norm": 1.2009177207946777, "learning_rate": 0.0004918042532952847, "loss": 3.3165, "step": 59860 }, { "epoch": 4.067468406033428, "grad_norm": 1.0587787628173828, "learning_rate": 0.0004917617882864519, "loss": 3.3905, "step": 59865 }, { "epoch": 4.06780812610409, "grad_norm": 1.103164792060852, "learning_rate": 0.0004917193232776192, "loss": 3.486, "step": 59870 }, { "epoch": 4.068147846174752, "grad_norm": 1.4032549858093262, "learning_rate": 0.0004916768582687866, "loss": 3.5355, "step": 59875 }, { "epoch": 4.068487566245413, "grad_norm": 1.1413980722427368, "learning_rate": 0.0004916343932599538, "loss": 3.5354, "step": 59880 }, { "epoch": 4.0688272863160755, "grad_norm": 1.400217890739441, "learning_rate": 0.0004915919282511211, "loss": 3.4365, "step": 59885 }, { "epoch": 4.069167006386738, "grad_norm": 1.3899785280227661, "learning_rate": 0.0004915494632422884, "loss": 3.3836, "step": 59890 }, { "epoch": 4.069506726457399, "grad_norm": 1.4626961946487427, "learning_rate": 0.0004915069982334556, "loss": 3.4385, "step": 59895 }, { "epoch": 4.069846446528061, "grad_norm": 1.1228463649749756, "learning_rate": 0.0004914645332246229, "loss": 3.5197, "step": 59900 }, { "epoch": 4.070186166598723, "grad_norm": 1.0527938604354858, "learning_rate": 0.0004914220682157903, "loss": 3.623, "step": 59905 }, { "epoch": 4.070525886669384, "grad_norm": 1.3820743560791016, "learning_rate": 0.0004913796032069575, "loss": 3.2568, "step": 59910 }, { "epoch": 4.070865606740046, "grad_norm": 1.627605676651001, "learning_rate": 0.0004913371381981247, "loss": 3.4898, "step": 59915 }, { "epoch": 4.071205326810708, "grad_norm": 1.7059262990951538, "learning_rate": 0.000491294673189292, "loss": 3.4629, "step": 59920 }, { "epoch": 4.071545046881369, "grad_norm": 1.1319046020507812, "learning_rate": 0.0004912522081804593, "loss": 3.4168, "step": 59925 }, { "epoch": 4.0718847669520315, "grad_norm": 1.3979312181472778, "learning_rate": 0.0004912097431716266, "loss": 3.6143, "step": 59930 }, { "epoch": 4.072224487022694, "grad_norm": 1.6260048151016235, "learning_rate": 0.0004911672781627938, "loss": 3.3351, "step": 59935 }, { "epoch": 4.072564207093355, "grad_norm": 1.4134689569473267, "learning_rate": 0.0004911248131539612, "loss": 3.6361, "step": 59940 }, { "epoch": 4.072903927164017, "grad_norm": 1.2072361707687378, "learning_rate": 0.0004910823481451284, "loss": 3.6992, "step": 59945 }, { "epoch": 4.073243647234679, "grad_norm": 1.2873083353042603, "learning_rate": 0.0004910398831362957, "loss": 3.4644, "step": 59950 }, { "epoch": 4.07358336730534, "grad_norm": 1.3539814949035645, "learning_rate": 0.000490997418127463, "loss": 3.4179, "step": 59955 }, { "epoch": 4.073923087376002, "grad_norm": 1.2553632259368896, "learning_rate": 0.0004909549531186303, "loss": 3.2729, "step": 59960 }, { "epoch": 4.074262807446664, "grad_norm": 1.6693098545074463, "learning_rate": 0.0004909124881097975, "loss": 3.0868, "step": 59965 }, { "epoch": 4.074602527517325, "grad_norm": 1.5175780057907104, "learning_rate": 0.0004908700231009647, "loss": 3.3866, "step": 59970 }, { "epoch": 4.0749422475879875, "grad_norm": 1.2147321701049805, "learning_rate": 0.0004908275580921321, "loss": 3.5207, "step": 59975 }, { "epoch": 4.07528196765865, "grad_norm": 1.2127629518508911, "learning_rate": 0.0004907850930832994, "loss": 3.4069, "step": 59980 }, { "epoch": 4.075621687729311, "grad_norm": 1.63595449924469, "learning_rate": 0.0004907426280744666, "loss": 3.3158, "step": 59985 }, { "epoch": 4.075961407799973, "grad_norm": 1.7176249027252197, "learning_rate": 0.000490700163065634, "loss": 3.3476, "step": 59990 }, { "epoch": 4.076301127870635, "grad_norm": 1.5886948108673096, "learning_rate": 0.0004906576980568012, "loss": 3.2045, "step": 59995 }, { "epoch": 4.076640847941296, "grad_norm": 1.3838212490081787, "learning_rate": 0.0004906152330479684, "loss": 3.409, "step": 60000 }, { "epoch": 4.076980568011958, "grad_norm": 1.2068508863449097, "learning_rate": 0.0004905727680391358, "loss": 3.398, "step": 60005 }, { "epoch": 4.07732028808262, "grad_norm": 1.2550252676010132, "learning_rate": 0.0004905303030303031, "loss": 3.2576, "step": 60010 }, { "epoch": 4.0776600081532814, "grad_norm": 2.4689884185791016, "learning_rate": 0.0004904878380214703, "loss": 3.2234, "step": 60015 }, { "epoch": 4.0779997282239435, "grad_norm": 1.8410322666168213, "learning_rate": 0.0004904453730126375, "loss": 3.3587, "step": 60020 }, { "epoch": 4.078339448294606, "grad_norm": 1.3891422748565674, "learning_rate": 0.0004904029080038049, "loss": 3.2326, "step": 60025 }, { "epoch": 4.078679168365267, "grad_norm": 1.1653891801834106, "learning_rate": 0.0004903604429949722, "loss": 3.4066, "step": 60030 }, { "epoch": 4.079018888435929, "grad_norm": 1.5079104900360107, "learning_rate": 0.0004903179779861394, "loss": 3.3724, "step": 60035 }, { "epoch": 4.079358608506591, "grad_norm": 1.3886232376098633, "learning_rate": 0.0004902755129773068, "loss": 3.3925, "step": 60040 }, { "epoch": 4.079698328577252, "grad_norm": 1.3926270008087158, "learning_rate": 0.000490233047968474, "loss": 3.3671, "step": 60045 }, { "epoch": 4.080038048647914, "grad_norm": 1.105823278427124, "learning_rate": 0.0004901905829596412, "loss": 3.3238, "step": 60050 }, { "epoch": 4.080377768718576, "grad_norm": 1.155687928199768, "learning_rate": 0.0004901481179508086, "loss": 3.4882, "step": 60055 }, { "epoch": 4.0807174887892375, "grad_norm": 1.0053044557571411, "learning_rate": 0.0004901056529419759, "loss": 3.4103, "step": 60060 }, { "epoch": 4.0810572088598995, "grad_norm": 1.4485076665878296, "learning_rate": 0.0004900631879331431, "loss": 3.2954, "step": 60065 }, { "epoch": 4.081396928930562, "grad_norm": 1.6407309770584106, "learning_rate": 0.0004900207229243103, "loss": 3.2714, "step": 60070 }, { "epoch": 4.081736649001223, "grad_norm": 1.5048924684524536, "learning_rate": 0.0004899782579154777, "loss": 3.3179, "step": 60075 }, { "epoch": 4.082076369071885, "grad_norm": 1.9299858808517456, "learning_rate": 0.0004899357929066449, "loss": 3.4509, "step": 60080 }, { "epoch": 4.082416089142547, "grad_norm": 1.2882869243621826, "learning_rate": 0.0004898933278978122, "loss": 3.5262, "step": 60085 }, { "epoch": 4.082755809213208, "grad_norm": 1.333333969116211, "learning_rate": 0.0004898508628889795, "loss": 3.5257, "step": 60090 }, { "epoch": 4.08309552928387, "grad_norm": 1.4313918352127075, "learning_rate": 0.0004898083978801468, "loss": 3.824, "step": 60095 }, { "epoch": 4.083435249354532, "grad_norm": 1.2074130773544312, "learning_rate": 0.000489765932871314, "loss": 3.284, "step": 60100 }, { "epoch": 4.0837749694251935, "grad_norm": 1.3115547895431519, "learning_rate": 0.0004897234678624814, "loss": 3.1865, "step": 60105 }, { "epoch": 4.0841146894958555, "grad_norm": 1.1246229410171509, "learning_rate": 0.0004896810028536486, "loss": 3.5253, "step": 60110 }, { "epoch": 4.084454409566518, "grad_norm": 1.260311245918274, "learning_rate": 0.0004896385378448159, "loss": 3.5657, "step": 60115 }, { "epoch": 4.084794129637179, "grad_norm": 1.094553828239441, "learning_rate": 0.0004895960728359831, "loss": 3.4263, "step": 60120 }, { "epoch": 4.085133849707841, "grad_norm": 1.4792026281356812, "learning_rate": 0.0004895536078271504, "loss": 3.4748, "step": 60125 }, { "epoch": 4.085473569778502, "grad_norm": 1.7365912199020386, "learning_rate": 0.0004895111428183177, "loss": 3.3892, "step": 60130 }, { "epoch": 4.085813289849164, "grad_norm": 1.370514988899231, "learning_rate": 0.000489468677809485, "loss": 3.7691, "step": 60135 }, { "epoch": 4.086153009919826, "grad_norm": 4.671230316162109, "learning_rate": 0.0004894262128006523, "loss": 3.243, "step": 60140 }, { "epoch": 4.086492729990487, "grad_norm": 1.2557573318481445, "learning_rate": 0.0004893837477918196, "loss": 3.4008, "step": 60145 }, { "epoch": 4.0868324500611495, "grad_norm": 1.1728222370147705, "learning_rate": 0.0004893412827829868, "loss": 3.473, "step": 60150 }, { "epoch": 4.0871721701318116, "grad_norm": 1.1872258186340332, "learning_rate": 0.000489298817774154, "loss": 3.6609, "step": 60155 }, { "epoch": 4.087511890202473, "grad_norm": 1.4828130006790161, "learning_rate": 0.0004892563527653214, "loss": 3.4319, "step": 60160 }, { "epoch": 4.087851610273135, "grad_norm": 4.492217540740967, "learning_rate": 0.0004892138877564887, "loss": 3.5484, "step": 60165 }, { "epoch": 4.088191330343797, "grad_norm": 1.3150622844696045, "learning_rate": 0.0004891714227476559, "loss": 3.3234, "step": 60170 }, { "epoch": 4.088531050414458, "grad_norm": 1.3876831531524658, "learning_rate": 0.0004891289577388232, "loss": 3.5186, "step": 60175 }, { "epoch": 4.08887077048512, "grad_norm": 1.4462051391601562, "learning_rate": 0.0004890864927299905, "loss": 3.6174, "step": 60180 }, { "epoch": 4.089210490555782, "grad_norm": 1.2741163969039917, "learning_rate": 0.0004890440277211578, "loss": 3.1823, "step": 60185 }, { "epoch": 4.089550210626443, "grad_norm": 1.5052196979522705, "learning_rate": 0.0004890015627123251, "loss": 3.3771, "step": 60190 }, { "epoch": 4.0898899306971055, "grad_norm": 1.3393161296844482, "learning_rate": 0.0004889590977034923, "loss": 3.6244, "step": 60195 }, { "epoch": 4.090229650767768, "grad_norm": 1.2338695526123047, "learning_rate": 0.0004889166326946596, "loss": 3.3028, "step": 60200 }, { "epoch": 4.090569370838429, "grad_norm": 2.3059604167938232, "learning_rate": 0.0004888741676858268, "loss": 3.6154, "step": 60205 }, { "epoch": 4.090909090909091, "grad_norm": 1.2738734483718872, "learning_rate": 0.0004888317026769942, "loss": 3.2954, "step": 60210 }, { "epoch": 4.091248810979753, "grad_norm": 1.3747029304504395, "learning_rate": 0.0004887892376681615, "loss": 3.5224, "step": 60215 }, { "epoch": 4.091588531050414, "grad_norm": 1.4482356309890747, "learning_rate": 0.0004887467726593287, "loss": 3.3164, "step": 60220 }, { "epoch": 4.091928251121076, "grad_norm": 1.2387850284576416, "learning_rate": 0.000488704307650496, "loss": 3.547, "step": 60225 }, { "epoch": 4.092267971191738, "grad_norm": 1.0559601783752441, "learning_rate": 0.0004886618426416633, "loss": 3.5154, "step": 60230 }, { "epoch": 4.092607691262399, "grad_norm": 1.1733393669128418, "learning_rate": 0.0004886193776328305, "loss": 3.2495, "step": 60235 }, { "epoch": 4.0929474113330615, "grad_norm": 1.5223137140274048, "learning_rate": 0.0004885769126239979, "loss": 3.21, "step": 60240 }, { "epoch": 4.093287131403724, "grad_norm": 1.305938959121704, "learning_rate": 0.0004885344476151651, "loss": 3.3102, "step": 60245 }, { "epoch": 4.093626851474385, "grad_norm": 1.3703712224960327, "learning_rate": 0.0004884919826063324, "loss": 3.1888, "step": 60250 }, { "epoch": 4.093966571545047, "grad_norm": 1.3222934007644653, "learning_rate": 0.0004884495175974996, "loss": 3.431, "step": 60255 }, { "epoch": 4.094306291615709, "grad_norm": 1.4139364957809448, "learning_rate": 0.000488407052588667, "loss": 3.4981, "step": 60260 }, { "epoch": 4.09464601168637, "grad_norm": 1.232804536819458, "learning_rate": 0.0004883645875798342, "loss": 3.5294, "step": 60265 }, { "epoch": 4.094985731757032, "grad_norm": 1.5290426015853882, "learning_rate": 0.0004883221225710015, "loss": 3.3306, "step": 60270 }, { "epoch": 4.095325451827694, "grad_norm": 1.313306450843811, "learning_rate": 0.0004882796575621688, "loss": 3.2362, "step": 60275 }, { "epoch": 4.095665171898355, "grad_norm": 1.266998052597046, "learning_rate": 0.00048823719255333605, "loss": 3.4038, "step": 60280 }, { "epoch": 4.0960048919690175, "grad_norm": 1.1890736818313599, "learning_rate": 0.00048819472754450333, "loss": 3.1542, "step": 60285 }, { "epoch": 4.09634461203968, "grad_norm": 1.459882378578186, "learning_rate": 0.0004881522625356706, "loss": 3.3492, "step": 60290 }, { "epoch": 4.096684332110341, "grad_norm": 1.3294956684112549, "learning_rate": 0.0004881097975268379, "loss": 3.3461, "step": 60295 }, { "epoch": 4.097024052181003, "grad_norm": 1.2931960821151733, "learning_rate": 0.00048806733251800517, "loss": 3.4299, "step": 60300 }, { "epoch": 4.097363772251665, "grad_norm": 1.3009769916534424, "learning_rate": 0.00048802486750917245, "loss": 3.5608, "step": 60305 }, { "epoch": 4.097703492322326, "grad_norm": 1.0936427116394043, "learning_rate": 0.00048798240250033973, "loss": 3.1624, "step": 60310 }, { "epoch": 4.098043212392988, "grad_norm": 1.127505898475647, "learning_rate": 0.000487939937491507, "loss": 3.4536, "step": 60315 }, { "epoch": 4.09838293246365, "grad_norm": 1.201551079750061, "learning_rate": 0.0004878974724826743, "loss": 3.2947, "step": 60320 }, { "epoch": 4.0987226525343115, "grad_norm": 1.5883774757385254, "learning_rate": 0.0004878550074738415, "loss": 3.4519, "step": 60325 }, { "epoch": 4.0990623726049735, "grad_norm": 1.149612307548523, "learning_rate": 0.00048781254246500885, "loss": 3.4128, "step": 60330 }, { "epoch": 4.099402092675636, "grad_norm": 1.4266986846923828, "learning_rate": 0.00048777007745617613, "loss": 3.5184, "step": 60335 }, { "epoch": 4.099741812746297, "grad_norm": 1.3804748058319092, "learning_rate": 0.00048772761244734336, "loss": 3.4248, "step": 60340 }, { "epoch": 4.100081532816959, "grad_norm": 1.2789201736450195, "learning_rate": 0.0004876851474385107, "loss": 3.5055, "step": 60345 }, { "epoch": 4.100421252887621, "grad_norm": 1.0347365140914917, "learning_rate": 0.00048764268242967797, "loss": 3.5312, "step": 60350 }, { "epoch": 4.100760972958282, "grad_norm": 1.0215762853622437, "learning_rate": 0.00048760021742084525, "loss": 3.4963, "step": 60355 }, { "epoch": 4.101100693028944, "grad_norm": 1.4348803758621216, "learning_rate": 0.0004875577524120125, "loss": 3.4452, "step": 60360 }, { "epoch": 4.101440413099606, "grad_norm": 1.2428828477859497, "learning_rate": 0.0004875152874031798, "loss": 3.4392, "step": 60365 }, { "epoch": 4.1017801331702675, "grad_norm": 1.3611823320388794, "learning_rate": 0.0004874728223943471, "loss": 3.4131, "step": 60370 }, { "epoch": 4.1021198532409295, "grad_norm": 1.3440465927124023, "learning_rate": 0.0004874303573855143, "loss": 3.4168, "step": 60375 }, { "epoch": 4.102459573311592, "grad_norm": 1.1223922967910767, "learning_rate": 0.00048738789237668165, "loss": 3.4968, "step": 60380 }, { "epoch": 4.102799293382253, "grad_norm": 1.3711825609207153, "learning_rate": 0.00048734542736784893, "loss": 3.3561, "step": 60385 }, { "epoch": 4.103139013452915, "grad_norm": 1.5977611541748047, "learning_rate": 0.00048730296235901616, "loss": 3.3345, "step": 60390 }, { "epoch": 4.103478733523577, "grad_norm": 1.1822437047958374, "learning_rate": 0.00048726049735018344, "loss": 3.2518, "step": 60395 }, { "epoch": 4.103818453594238, "grad_norm": 1.3744304180145264, "learning_rate": 0.00048721803234135077, "loss": 3.2045, "step": 60400 }, { "epoch": 4.1041581736649, "grad_norm": 1.6478031873703003, "learning_rate": 0.000487175567332518, "loss": 3.414, "step": 60405 }, { "epoch": 4.104497893735562, "grad_norm": 1.2526137828826904, "learning_rate": 0.0004871331023236853, "loss": 3.2731, "step": 60410 }, { "epoch": 4.1048376138062235, "grad_norm": 1.3869484663009644, "learning_rate": 0.0004870906373148526, "loss": 3.3721, "step": 60415 }, { "epoch": 4.1051773338768855, "grad_norm": 1.9361847639083862, "learning_rate": 0.00048704817230601984, "loss": 3.4032, "step": 60420 }, { "epoch": 4.105517053947548, "grad_norm": 1.418067455291748, "learning_rate": 0.0004870057072971871, "loss": 3.4153, "step": 60425 }, { "epoch": 4.105856774018209, "grad_norm": 1.22860848903656, "learning_rate": 0.0004869632422883544, "loss": 3.2952, "step": 60430 }, { "epoch": 4.106196494088871, "grad_norm": 1.2415006160736084, "learning_rate": 0.0004869207772795217, "loss": 3.0775, "step": 60435 }, { "epoch": 4.106536214159533, "grad_norm": 1.5316827297210693, "learning_rate": 0.00048687831227068896, "loss": 3.4316, "step": 60440 }, { "epoch": 4.106875934230194, "grad_norm": 1.344600796699524, "learning_rate": 0.00048683584726185624, "loss": 3.4489, "step": 60445 }, { "epoch": 4.107215654300856, "grad_norm": 1.4313215017318726, "learning_rate": 0.0004867933822530235, "loss": 3.5456, "step": 60450 }, { "epoch": 4.107555374371518, "grad_norm": 1.3499367237091064, "learning_rate": 0.0004867509172441908, "loss": 3.3759, "step": 60455 }, { "epoch": 4.1078950944421795, "grad_norm": 15.03063678741455, "learning_rate": 0.0004867084522353581, "loss": 3.3266, "step": 60460 }, { "epoch": 4.108234814512842, "grad_norm": 1.3901448249816895, "learning_rate": 0.0004866659872265253, "loss": 3.278, "step": 60465 }, { "epoch": 4.108574534583504, "grad_norm": 2.0878701210021973, "learning_rate": 0.00048662352221769264, "loss": 3.3461, "step": 60470 }, { "epoch": 4.108914254654165, "grad_norm": 2.0622141361236572, "learning_rate": 0.0004865810572088599, "loss": 3.4845, "step": 60475 }, { "epoch": 4.109253974724827, "grad_norm": 1.102796196937561, "learning_rate": 0.00048653859220002714, "loss": 3.4687, "step": 60480 }, { "epoch": 4.109593694795488, "grad_norm": 1.0628552436828613, "learning_rate": 0.0004864961271911945, "loss": 3.3143, "step": 60485 }, { "epoch": 4.10993341486615, "grad_norm": 1.1020904779434204, "learning_rate": 0.00048645366218236176, "loss": 3.6229, "step": 60490 }, { "epoch": 4.110273134936812, "grad_norm": 2.152662754058838, "learning_rate": 0.000486411197173529, "loss": 3.4186, "step": 60495 }, { "epoch": 4.110612855007473, "grad_norm": 1.52838134765625, "learning_rate": 0.0004863687321646963, "loss": 3.0659, "step": 60500 }, { "epoch": 4.1109525750781355, "grad_norm": 1.2885488271713257, "learning_rate": 0.0004863262671558636, "loss": 3.4433, "step": 60505 }, { "epoch": 4.111292295148798, "grad_norm": 1.3488866090774536, "learning_rate": 0.0004862838021470308, "loss": 3.4808, "step": 60510 }, { "epoch": 4.111632015219459, "grad_norm": 1.1348471641540527, "learning_rate": 0.0004862413371381981, "loss": 3.3934, "step": 60515 }, { "epoch": 4.111971735290121, "grad_norm": 1.4585809707641602, "learning_rate": 0.00048619887212936544, "loss": 3.279, "step": 60520 }, { "epoch": 4.112311455360783, "grad_norm": 1.595038652420044, "learning_rate": 0.0004861564071205327, "loss": 3.3665, "step": 60525 }, { "epoch": 4.112651175431444, "grad_norm": 1.6371760368347168, "learning_rate": 0.00048611394211169995, "loss": 3.2084, "step": 60530 }, { "epoch": 4.112990895502106, "grad_norm": 1.3586465120315552, "learning_rate": 0.0004860714771028673, "loss": 3.1628, "step": 60535 }, { "epoch": 4.113330615572768, "grad_norm": 1.3840200901031494, "learning_rate": 0.00048602901209403456, "loss": 3.2094, "step": 60540 }, { "epoch": 4.113670335643429, "grad_norm": 1.1648037433624268, "learning_rate": 0.0004859865470852018, "loss": 3.4726, "step": 60545 }, { "epoch": 4.1140100557140915, "grad_norm": 1.2096461057662964, "learning_rate": 0.00048594408207636907, "loss": 3.1335, "step": 60550 }, { "epoch": 4.114349775784754, "grad_norm": 1.0858008861541748, "learning_rate": 0.0004859016170675364, "loss": 3.2352, "step": 60555 }, { "epoch": 4.114689495855415, "grad_norm": 1.8197202682495117, "learning_rate": 0.0004858591520587036, "loss": 3.283, "step": 60560 }, { "epoch": 4.115029215926077, "grad_norm": 1.3387811183929443, "learning_rate": 0.0004858166870498709, "loss": 3.45, "step": 60565 }, { "epoch": 4.115368935996739, "grad_norm": 1.5422810316085815, "learning_rate": 0.00048577422204103824, "loss": 3.2953, "step": 60570 }, { "epoch": 4.1157086560674, "grad_norm": 1.3599711656570435, "learning_rate": 0.00048573175703220547, "loss": 3.1112, "step": 60575 }, { "epoch": 4.116048376138062, "grad_norm": 1.8201934099197388, "learning_rate": 0.00048568929202337275, "loss": 3.6008, "step": 60580 }, { "epoch": 4.116388096208724, "grad_norm": 1.1245782375335693, "learning_rate": 0.00048564682701454, "loss": 3.2535, "step": 60585 }, { "epoch": 4.116727816279385, "grad_norm": 1.2965643405914307, "learning_rate": 0.0004856043620057073, "loss": 3.303, "step": 60590 }, { "epoch": 4.1170675363500475, "grad_norm": 1.5156253576278687, "learning_rate": 0.0004855618969968746, "loss": 3.537, "step": 60595 }, { "epoch": 4.11740725642071, "grad_norm": 1.409791350364685, "learning_rate": 0.00048551943198804187, "loss": 3.6521, "step": 60600 }, { "epoch": 4.117746976491371, "grad_norm": 1.4151262044906616, "learning_rate": 0.00048547696697920915, "loss": 3.3247, "step": 60605 }, { "epoch": 4.118086696562033, "grad_norm": 1.4661190509796143, "learning_rate": 0.0004854345019703764, "loss": 3.2054, "step": 60610 }, { "epoch": 4.118426416632695, "grad_norm": 1.5885823965072632, "learning_rate": 0.0004853920369615437, "loss": 3.3309, "step": 60615 }, { "epoch": 4.118766136703356, "grad_norm": 12.369314193725586, "learning_rate": 0.00048534957195271093, "loss": 3.3904, "step": 60620 }, { "epoch": 4.119105856774018, "grad_norm": 1.4152238368988037, "learning_rate": 0.00048530710694387827, "loss": 3.5307, "step": 60625 }, { "epoch": 4.11944557684468, "grad_norm": 1.1629925966262817, "learning_rate": 0.00048526464193504555, "loss": 3.1885, "step": 60630 }, { "epoch": 4.1197852969153415, "grad_norm": 1.34468412399292, "learning_rate": 0.00048522217692621277, "loss": 3.6628, "step": 60635 }, { "epoch": 4.1201250169860035, "grad_norm": 1.427992343902588, "learning_rate": 0.0004851797119173801, "loss": 3.2775, "step": 60640 }, { "epoch": 4.120464737056666, "grad_norm": 1.2512552738189697, "learning_rate": 0.0004851372469085474, "loss": 3.3968, "step": 60645 }, { "epoch": 4.120804457127327, "grad_norm": 1.2956241369247437, "learning_rate": 0.0004850947818997146, "loss": 3.5679, "step": 60650 }, { "epoch": 4.121144177197989, "grad_norm": 1.1766630411148071, "learning_rate": 0.0004850523168908819, "loss": 3.5146, "step": 60655 }, { "epoch": 4.121483897268651, "grad_norm": 1.1253689527511597, "learning_rate": 0.00048500985188204923, "loss": 3.1869, "step": 60660 }, { "epoch": 4.121823617339312, "grad_norm": 1.3591878414154053, "learning_rate": 0.00048496738687321645, "loss": 3.2614, "step": 60665 }, { "epoch": 4.122163337409974, "grad_norm": 1.555234670639038, "learning_rate": 0.00048492492186438373, "loss": 3.3632, "step": 60670 }, { "epoch": 4.122503057480636, "grad_norm": 1.622925043106079, "learning_rate": 0.00048488245685555107, "loss": 3.1783, "step": 60675 }, { "epoch": 4.1228427775512975, "grad_norm": 1.641356110572815, "learning_rate": 0.0004848399918467183, "loss": 3.1758, "step": 60680 }, { "epoch": 4.1231824976219595, "grad_norm": 1.9422777891159058, "learning_rate": 0.0004847975268378856, "loss": 3.3004, "step": 60685 }, { "epoch": 4.123522217692622, "grad_norm": 1.1214399337768555, "learning_rate": 0.00048475506182905285, "loss": 3.8313, "step": 60690 }, { "epoch": 4.123861937763283, "grad_norm": 1.2550400495529175, "learning_rate": 0.0004847125968202202, "loss": 3.1087, "step": 60695 }, { "epoch": 4.124201657833945, "grad_norm": 1.5337989330291748, "learning_rate": 0.0004846701318113874, "loss": 3.0536, "step": 60700 }, { "epoch": 4.124541377904607, "grad_norm": 1.3252395391464233, "learning_rate": 0.0004846276668025547, "loss": 3.3611, "step": 60705 }, { "epoch": 4.124881097975268, "grad_norm": 1.4354678392410278, "learning_rate": 0.00048458520179372203, "loss": 3.2678, "step": 60710 }, { "epoch": 4.12522081804593, "grad_norm": 1.2600617408752441, "learning_rate": 0.00048454273678488925, "loss": 3.3591, "step": 60715 }, { "epoch": 4.125560538116592, "grad_norm": 1.1869254112243652, "learning_rate": 0.00048450027177605653, "loss": 3.3311, "step": 60720 }, { "epoch": 4.1259002581872535, "grad_norm": 1.1187314987182617, "learning_rate": 0.0004844578067672238, "loss": 3.0794, "step": 60725 }, { "epoch": 4.1262399782579156, "grad_norm": 1.687401533126831, "learning_rate": 0.0004844153417583911, "loss": 3.2724, "step": 60730 }, { "epoch": 4.126579698328578, "grad_norm": 1.4224212169647217, "learning_rate": 0.0004843728767495584, "loss": 3.3044, "step": 60735 }, { "epoch": 4.126919418399239, "grad_norm": 1.3152098655700684, "learning_rate": 0.00048433041174072565, "loss": 3.2694, "step": 60740 }, { "epoch": 4.127259138469901, "grad_norm": 1.5000052452087402, "learning_rate": 0.00048428794673189293, "loss": 3.0191, "step": 60745 }, { "epoch": 4.127598858540563, "grad_norm": 1.3827846050262451, "learning_rate": 0.0004842454817230602, "loss": 3.586, "step": 60750 }, { "epoch": 4.127938578611224, "grad_norm": 1.198154091835022, "learning_rate": 0.0004842030167142275, "loss": 3.4068, "step": 60755 }, { "epoch": 4.128278298681886, "grad_norm": 1.7401450872421265, "learning_rate": 0.0004841605517053947, "loss": 3.4013, "step": 60760 }, { "epoch": 4.128618018752548, "grad_norm": 1.3707464933395386, "learning_rate": 0.00048411808669656205, "loss": 3.2729, "step": 60765 }, { "epoch": 4.1289577388232095, "grad_norm": 1.5301806926727295, "learning_rate": 0.00048407562168772933, "loss": 3.4712, "step": 60770 }, { "epoch": 4.129297458893872, "grad_norm": 1.2174055576324463, "learning_rate": 0.00048403315667889656, "loss": 3.276, "step": 60775 }, { "epoch": 4.129637178964534, "grad_norm": 1.4373911619186401, "learning_rate": 0.0004839906916700639, "loss": 3.3962, "step": 60780 }, { "epoch": 4.129976899035195, "grad_norm": 1.201107382774353, "learning_rate": 0.0004839482266612312, "loss": 3.4298, "step": 60785 }, { "epoch": 4.130316619105857, "grad_norm": 1.6760069131851196, "learning_rate": 0.0004839057616523984, "loss": 3.4815, "step": 60790 }, { "epoch": 4.130656339176519, "grad_norm": 1.3105319738388062, "learning_rate": 0.00048386329664356574, "loss": 3.3085, "step": 60795 }, { "epoch": 4.13099605924718, "grad_norm": 1.2211135625839233, "learning_rate": 0.000483820831634733, "loss": 3.2661, "step": 60800 }, { "epoch": 4.131335779317842, "grad_norm": 1.126229166984558, "learning_rate": 0.00048377836662590024, "loss": 3.1042, "step": 60805 }, { "epoch": 4.131675499388503, "grad_norm": 1.5339651107788086, "learning_rate": 0.0004837359016170675, "loss": 3.4569, "step": 60810 }, { "epoch": 4.1320152194591655, "grad_norm": 6.2190842628479, "learning_rate": 0.00048369343660823486, "loss": 3.2662, "step": 60815 }, { "epoch": 4.132354939529828, "grad_norm": 1.3428163528442383, "learning_rate": 0.0004836509715994021, "loss": 3.5023, "step": 60820 }, { "epoch": 4.132694659600489, "grad_norm": 1.416890025138855, "learning_rate": 0.00048360850659056936, "loss": 3.3476, "step": 60825 }, { "epoch": 4.133034379671151, "grad_norm": 1.5003193616867065, "learning_rate": 0.0004835660415817367, "loss": 3.3707, "step": 60830 }, { "epoch": 4.133374099741813, "grad_norm": 1.2636469602584839, "learning_rate": 0.0004835235765729039, "loss": 3.3203, "step": 60835 }, { "epoch": 4.133713819812474, "grad_norm": 1.5394352674484253, "learning_rate": 0.0004834811115640712, "loss": 3.2651, "step": 60840 }, { "epoch": 4.134053539883136, "grad_norm": 1.7989602088928223, "learning_rate": 0.0004834386465552385, "loss": 3.1945, "step": 60845 }, { "epoch": 4.134393259953798, "grad_norm": 1.263663649559021, "learning_rate": 0.00048339618154640576, "loss": 3.4092, "step": 60850 }, { "epoch": 4.134732980024459, "grad_norm": 1.328361988067627, "learning_rate": 0.00048335371653757304, "loss": 3.3678, "step": 60855 }, { "epoch": 4.1350727000951215, "grad_norm": 1.3357975482940674, "learning_rate": 0.0004833112515287403, "loss": 3.3639, "step": 60860 }, { "epoch": 4.135412420165784, "grad_norm": 1.2998406887054443, "learning_rate": 0.00048326878651990766, "loss": 3.4821, "step": 60865 }, { "epoch": 4.135752140236445, "grad_norm": 1.233647108078003, "learning_rate": 0.0004832263215110749, "loss": 3.3021, "step": 60870 }, { "epoch": 4.136091860307107, "grad_norm": 1.1294819116592407, "learning_rate": 0.00048318385650224216, "loss": 3.3575, "step": 60875 }, { "epoch": 4.136431580377769, "grad_norm": 1.5232622623443604, "learning_rate": 0.00048314139149340944, "loss": 3.4787, "step": 60880 }, { "epoch": 4.13677130044843, "grad_norm": 1.3701896667480469, "learning_rate": 0.0004830989264845767, "loss": 3.4008, "step": 60885 }, { "epoch": 4.137111020519092, "grad_norm": 1.2526124715805054, "learning_rate": 0.000483056461475744, "loss": 3.5022, "step": 60890 }, { "epoch": 4.137450740589754, "grad_norm": 1.3575241565704346, "learning_rate": 0.0004830139964669113, "loss": 3.357, "step": 60895 }, { "epoch": 4.1377904606604154, "grad_norm": 1.2015835046768188, "learning_rate": 0.00048297153145807856, "loss": 3.3569, "step": 60900 }, { "epoch": 4.1381301807310775, "grad_norm": 1.3536865711212158, "learning_rate": 0.00048292906644924584, "loss": 3.355, "step": 60905 }, { "epoch": 4.13846990080174, "grad_norm": 1.4528864622116089, "learning_rate": 0.0004828866014404131, "loss": 3.3864, "step": 60910 }, { "epoch": 4.138809620872401, "grad_norm": 1.3366477489471436, "learning_rate": 0.00048284413643158035, "loss": 3.5888, "step": 60915 }, { "epoch": 4.139149340943063, "grad_norm": 1.5368084907531738, "learning_rate": 0.0004828016714227477, "loss": 3.284, "step": 60920 }, { "epoch": 4.139489061013725, "grad_norm": 1.2330178022384644, "learning_rate": 0.00048275920641391496, "loss": 3.2962, "step": 60925 }, { "epoch": 4.139828781084386, "grad_norm": 1.58428156375885, "learning_rate": 0.0004827167414050822, "loss": 3.7389, "step": 60930 }, { "epoch": 4.140168501155048, "grad_norm": 1.3380494117736816, "learning_rate": 0.0004826742763962495, "loss": 3.3744, "step": 60935 }, { "epoch": 4.14050822122571, "grad_norm": 1.0853201150894165, "learning_rate": 0.0004826318113874168, "loss": 3.3621, "step": 60940 }, { "epoch": 4.1408479412963715, "grad_norm": 1.8294603824615479, "learning_rate": 0.00048258934637858403, "loss": 3.6392, "step": 60945 }, { "epoch": 4.1411876613670335, "grad_norm": 1.17436945438385, "learning_rate": 0.0004825468813697513, "loss": 3.56, "step": 60950 }, { "epoch": 4.141527381437696, "grad_norm": 2.7272825241088867, "learning_rate": 0.00048250441636091864, "loss": 3.0654, "step": 60955 }, { "epoch": 4.141867101508357, "grad_norm": 2.2707889080047607, "learning_rate": 0.00048246195135208587, "loss": 3.2404, "step": 60960 }, { "epoch": 4.142206821579019, "grad_norm": 2.272162437438965, "learning_rate": 0.00048241948634325315, "loss": 3.3037, "step": 60965 }, { "epoch": 4.142546541649681, "grad_norm": 1.535841703414917, "learning_rate": 0.0004823770213344205, "loss": 3.4587, "step": 60970 }, { "epoch": 4.142886261720342, "grad_norm": 1.291743516921997, "learning_rate": 0.0004823345563255877, "loss": 3.3057, "step": 60975 }, { "epoch": 4.143225981791004, "grad_norm": 1.2372404336929321, "learning_rate": 0.000482292091316755, "loss": 3.2783, "step": 60980 }, { "epoch": 4.143565701861666, "grad_norm": 1.4786031246185303, "learning_rate": 0.00048224962630792227, "loss": 3.5206, "step": 60985 }, { "epoch": 4.1439054219323275, "grad_norm": 1.6558470726013184, "learning_rate": 0.00048220716129908955, "loss": 3.292, "step": 60990 }, { "epoch": 4.1442451420029895, "grad_norm": 3.1272294521331787, "learning_rate": 0.00048216469629025683, "loss": 3.1954, "step": 60995 }, { "epoch": 4.144584862073652, "grad_norm": 1.2127532958984375, "learning_rate": 0.0004821222312814241, "loss": 3.1027, "step": 61000 }, { "epoch": 4.144924582144313, "grad_norm": 1.2840685844421387, "learning_rate": 0.0004820797662725914, "loss": 3.31, "step": 61005 }, { "epoch": 4.145264302214975, "grad_norm": 1.138804316520691, "learning_rate": 0.00048203730126375867, "loss": 3.4792, "step": 61010 }, { "epoch": 4.145604022285637, "grad_norm": 1.238874077796936, "learning_rate": 0.00048199483625492595, "loss": 3.425, "step": 61015 }, { "epoch": 4.145943742356298, "grad_norm": 1.1730848550796509, "learning_rate": 0.0004819523712460932, "loss": 3.2725, "step": 61020 }, { "epoch": 4.14628346242696, "grad_norm": 1.3638619184494019, "learning_rate": 0.0004819099062372605, "loss": 3.4932, "step": 61025 }, { "epoch": 4.146623182497622, "grad_norm": 1.176190733909607, "learning_rate": 0.0004818674412284278, "loss": 3.4602, "step": 61030 }, { "epoch": 4.1469629025682835, "grad_norm": 1.1246988773345947, "learning_rate": 0.00048182497621959507, "loss": 3.3799, "step": 61035 }, { "epoch": 4.147302622638946, "grad_norm": 1.0509707927703857, "learning_rate": 0.00048178251121076235, "loss": 3.4723, "step": 61040 }, { "epoch": 4.147642342709608, "grad_norm": 1.3744937181472778, "learning_rate": 0.00048174004620192963, "loss": 3.6361, "step": 61045 }, { "epoch": 4.147982062780269, "grad_norm": 1.738588809967041, "learning_rate": 0.0004816975811930969, "loss": 3.3947, "step": 61050 }, { "epoch": 4.148321782850931, "grad_norm": 1.1685670614242554, "learning_rate": 0.0004816551161842642, "loss": 3.4721, "step": 61055 }, { "epoch": 4.148661502921593, "grad_norm": 1.0204838514328003, "learning_rate": 0.00048161265117543147, "loss": 3.4485, "step": 61060 }, { "epoch": 4.149001222992254, "grad_norm": 1.1313363313674927, "learning_rate": 0.00048157018616659875, "loss": 3.2813, "step": 61065 }, { "epoch": 4.149340943062916, "grad_norm": 1.1829533576965332, "learning_rate": 0.000481527721157766, "loss": 3.4423, "step": 61070 }, { "epoch": 4.149680663133578, "grad_norm": 2.1671948432922363, "learning_rate": 0.0004814852561489333, "loss": 3.3784, "step": 61075 }, { "epoch": 4.1500203832042395, "grad_norm": 1.285555124282837, "learning_rate": 0.0004814427911401006, "loss": 3.3331, "step": 61080 }, { "epoch": 4.150360103274902, "grad_norm": 1.5388503074645996, "learning_rate": 0.0004814003261312678, "loss": 3.313, "step": 61085 }, { "epoch": 4.150699823345564, "grad_norm": 1.2903456687927246, "learning_rate": 0.00048135786112243515, "loss": 3.3685, "step": 61090 }, { "epoch": 4.151039543416225, "grad_norm": 1.5603156089782715, "learning_rate": 0.00048131539611360243, "loss": 3.5779, "step": 61095 }, { "epoch": 4.151379263486887, "grad_norm": 1.1460834741592407, "learning_rate": 0.00048127293110476966, "loss": 3.42, "step": 61100 }, { "epoch": 4.151718983557549, "grad_norm": 1.1982815265655518, "learning_rate": 0.00048123046609593694, "loss": 3.3373, "step": 61105 }, { "epoch": 4.15205870362821, "grad_norm": 1.460005283355713, "learning_rate": 0.00048118800108710427, "loss": 3.5652, "step": 61110 }, { "epoch": 4.152398423698872, "grad_norm": 2.33648419380188, "learning_rate": 0.0004811455360782715, "loss": 3.3783, "step": 61115 }, { "epoch": 4.152738143769534, "grad_norm": 1.1695098876953125, "learning_rate": 0.0004811030710694388, "loss": 3.3989, "step": 61120 }, { "epoch": 4.1530778638401955, "grad_norm": 1.2164502143859863, "learning_rate": 0.0004810606060606061, "loss": 3.4966, "step": 61125 }, { "epoch": 4.153417583910858, "grad_norm": 1.2091703414916992, "learning_rate": 0.00048101814105177334, "loss": 3.2936, "step": 61130 }, { "epoch": 4.15375730398152, "grad_norm": 1.283532738685608, "learning_rate": 0.0004809756760429406, "loss": 3.4055, "step": 61135 }, { "epoch": 4.154097024052181, "grad_norm": 1.4984469413757324, "learning_rate": 0.0004809332110341079, "loss": 3.3921, "step": 61140 }, { "epoch": 4.154436744122843, "grad_norm": 1.4092777967453003, "learning_rate": 0.0004808907460252752, "loss": 3.3452, "step": 61145 }, { "epoch": 4.154776464193505, "grad_norm": 1.490277886390686, "learning_rate": 0.00048084828101644246, "loss": 3.5051, "step": 61150 }, { "epoch": 4.155116184264166, "grad_norm": 1.167250394821167, "learning_rate": 0.00048080581600760974, "loss": 3.5362, "step": 61155 }, { "epoch": 4.155455904334828, "grad_norm": 1.3568960428237915, "learning_rate": 0.000480763350998777, "loss": 3.3705, "step": 61160 }, { "epoch": 4.15579562440549, "grad_norm": 1.3660149574279785, "learning_rate": 0.0004807208859899443, "loss": 3.4259, "step": 61165 }, { "epoch": 4.1561353444761515, "grad_norm": 1.161424994468689, "learning_rate": 0.0004806784209811116, "loss": 3.6488, "step": 61170 }, { "epoch": 4.156475064546814, "grad_norm": 1.2578842639923096, "learning_rate": 0.0004806359559722788, "loss": 3.2787, "step": 61175 }, { "epoch": 4.156814784617475, "grad_norm": 1.3604294061660767, "learning_rate": 0.00048059349096344614, "loss": 3.3666, "step": 61180 }, { "epoch": 4.157154504688137, "grad_norm": 1.7014189958572388, "learning_rate": 0.0004805510259546134, "loss": 3.4422, "step": 61185 }, { "epoch": 4.157494224758799, "grad_norm": 1.2689865827560425, "learning_rate": 0.00048050856094578064, "loss": 3.4318, "step": 61190 }, { "epoch": 4.15783394482946, "grad_norm": 1.0722062587738037, "learning_rate": 0.000480466095936948, "loss": 3.4892, "step": 61195 }, { "epoch": 4.158173664900122, "grad_norm": 1.5493576526641846, "learning_rate": 0.00048042363092811526, "loss": 3.3703, "step": 61200 }, { "epoch": 4.158513384970784, "grad_norm": 1.4019209146499634, "learning_rate": 0.00048038116591928254, "loss": 3.2369, "step": 61205 }, { "epoch": 4.1588531050414455, "grad_norm": 1.47993004322052, "learning_rate": 0.00048033870091044976, "loss": 3.7061, "step": 61210 }, { "epoch": 4.1591928251121075, "grad_norm": 1.0960094928741455, "learning_rate": 0.0004802962359016171, "loss": 3.3219, "step": 61215 }, { "epoch": 4.15953254518277, "grad_norm": 1.2887208461761475, "learning_rate": 0.0004802537708927844, "loss": 3.492, "step": 61220 }, { "epoch": 4.159872265253431, "grad_norm": 1.574219822883606, "learning_rate": 0.0004802113058839516, "loss": 3.3442, "step": 61225 }, { "epoch": 4.160211985324093, "grad_norm": 1.1284207105636597, "learning_rate": 0.00048016884087511894, "loss": 3.3118, "step": 61230 }, { "epoch": 4.160551705394755, "grad_norm": 1.2340953350067139, "learning_rate": 0.0004801263758662862, "loss": 3.3436, "step": 61235 }, { "epoch": 4.160891425465416, "grad_norm": 1.667663812637329, "learning_rate": 0.00048008391085745345, "loss": 3.2545, "step": 61240 }, { "epoch": 4.161231145536078, "grad_norm": 1.1774030923843384, "learning_rate": 0.0004800414458486207, "loss": 3.4986, "step": 61245 }, { "epoch": 4.16157086560674, "grad_norm": 1.5794416666030884, "learning_rate": 0.00047999898083978806, "loss": 3.3084, "step": 61250 }, { "epoch": 4.1619105856774015, "grad_norm": 1.5776695013046265, "learning_rate": 0.0004799565158309553, "loss": 3.5122, "step": 61255 }, { "epoch": 4.1622503057480635, "grad_norm": 1.0767029523849487, "learning_rate": 0.00047991405082212257, "loss": 3.6068, "step": 61260 }, { "epoch": 4.162590025818726, "grad_norm": 1.4316350221633911, "learning_rate": 0.0004798715858132899, "loss": 3.5075, "step": 61265 }, { "epoch": 4.162929745889387, "grad_norm": 1.4215644598007202, "learning_rate": 0.0004798291208044571, "loss": 3.3846, "step": 61270 }, { "epoch": 4.163269465960049, "grad_norm": 1.2232246398925781, "learning_rate": 0.0004797866557956244, "loss": 3.5133, "step": 61275 }, { "epoch": 4.163609186030711, "grad_norm": 1.3590738773345947, "learning_rate": 0.0004797441907867917, "loss": 3.4896, "step": 61280 }, { "epoch": 4.163948906101372, "grad_norm": 1.0715053081512451, "learning_rate": 0.00047970172577795897, "loss": 3.3091, "step": 61285 }, { "epoch": 4.164288626172034, "grad_norm": 1.2648658752441406, "learning_rate": 0.00047965926076912625, "loss": 3.3284, "step": 61290 }, { "epoch": 4.164628346242696, "grad_norm": 1.294695258140564, "learning_rate": 0.0004796167957602935, "loss": 3.3198, "step": 61295 }, { "epoch": 4.1649680663133575, "grad_norm": 1.3017157316207886, "learning_rate": 0.0004795743307514608, "loss": 3.4587, "step": 61300 }, { "epoch": 4.1653077863840196, "grad_norm": 1.293859601020813, "learning_rate": 0.0004795318657426281, "loss": 3.5071, "step": 61305 }, { "epoch": 4.165647506454682, "grad_norm": 1.2137186527252197, "learning_rate": 0.00047948940073379537, "loss": 3.2196, "step": 61310 }, { "epoch": 4.165987226525343, "grad_norm": 1.4927762746810913, "learning_rate": 0.0004794469357249626, "loss": 3.5406, "step": 61315 }, { "epoch": 4.166326946596005, "grad_norm": 6.657795429229736, "learning_rate": 0.0004794044707161299, "loss": 3.234, "step": 61320 }, { "epoch": 4.166666666666667, "grad_norm": 1.3012402057647705, "learning_rate": 0.0004793620057072972, "loss": 3.2603, "step": 61325 }, { "epoch": 4.167006386737328, "grad_norm": 1.3636658191680908, "learning_rate": 0.00047931954069846443, "loss": 3.4415, "step": 61330 }, { "epoch": 4.16734610680799, "grad_norm": 1.1956031322479248, "learning_rate": 0.00047927707568963177, "loss": 3.4601, "step": 61335 }, { "epoch": 4.167685826878652, "grad_norm": 1.3336961269378662, "learning_rate": 0.00047923461068079905, "loss": 3.6233, "step": 61340 }, { "epoch": 4.1680255469493135, "grad_norm": 1.1564762592315674, "learning_rate": 0.00047919214567196627, "loss": 3.1128, "step": 61345 }, { "epoch": 4.168365267019976, "grad_norm": 1.6577342748641968, "learning_rate": 0.0004791496806631336, "loss": 3.2362, "step": 61350 }, { "epoch": 4.168704987090638, "grad_norm": 1.3226457834243774, "learning_rate": 0.0004791072156543009, "loss": 3.514, "step": 61355 }, { "epoch": 4.169044707161299, "grad_norm": 1.1187821626663208, "learning_rate": 0.0004790647506454681, "loss": 3.5552, "step": 61360 }, { "epoch": 4.169384427231961, "grad_norm": 1.32208251953125, "learning_rate": 0.0004790222856366354, "loss": 3.1977, "step": 61365 }, { "epoch": 4.169724147302623, "grad_norm": 1.4517083168029785, "learning_rate": 0.0004789798206278027, "loss": 3.1959, "step": 61370 }, { "epoch": 4.170063867373284, "grad_norm": 1.3094183206558228, "learning_rate": 0.00047893735561897, "loss": 3.2988, "step": 61375 }, { "epoch": 4.170403587443946, "grad_norm": 1.5569270849227905, "learning_rate": 0.00047889489061013723, "loss": 3.4676, "step": 61380 }, { "epoch": 4.170743307514608, "grad_norm": 1.423042893409729, "learning_rate": 0.00047885242560130457, "loss": 3.4911, "step": 61385 }, { "epoch": 4.1710830275852695, "grad_norm": 1.3218392133712769, "learning_rate": 0.00047880996059247185, "loss": 3.0086, "step": 61390 }, { "epoch": 4.171422747655932, "grad_norm": 1.4026306867599487, "learning_rate": 0.0004787674955836391, "loss": 3.5998, "step": 61395 }, { "epoch": 4.171762467726594, "grad_norm": 1.7112830877304077, "learning_rate": 0.00047872503057480635, "loss": 3.4504, "step": 61400 }, { "epoch": 4.172102187797255, "grad_norm": 1.154452919960022, "learning_rate": 0.0004786825655659737, "loss": 3.3174, "step": 61405 }, { "epoch": 4.172441907867917, "grad_norm": 1.4118388891220093, "learning_rate": 0.0004786401005571409, "loss": 3.5397, "step": 61410 }, { "epoch": 4.172781627938579, "grad_norm": 1.4712928533554077, "learning_rate": 0.0004785976355483082, "loss": 3.383, "step": 61415 }, { "epoch": 4.17312134800924, "grad_norm": 1.228078842163086, "learning_rate": 0.00047855517053947553, "loss": 3.4704, "step": 61420 }, { "epoch": 4.173461068079902, "grad_norm": 1.3576836585998535, "learning_rate": 0.00047851270553064275, "loss": 3.3646, "step": 61425 }, { "epoch": 4.173800788150564, "grad_norm": 1.2129744291305542, "learning_rate": 0.00047847024052181003, "loss": 3.4688, "step": 61430 }, { "epoch": 4.1741405082212255, "grad_norm": 1.46555757522583, "learning_rate": 0.0004784277755129773, "loss": 3.4965, "step": 61435 }, { "epoch": 4.174480228291888, "grad_norm": 1.4586586952209473, "learning_rate": 0.0004783853105041446, "loss": 3.5139, "step": 61440 }, { "epoch": 4.17481994836255, "grad_norm": 1.289707064628601, "learning_rate": 0.0004783428454953119, "loss": 3.419, "step": 61445 }, { "epoch": 4.175159668433211, "grad_norm": 1.0046279430389404, "learning_rate": 0.00047830038048647915, "loss": 3.5598, "step": 61450 }, { "epoch": 4.175499388503873, "grad_norm": 1.1535838842391968, "learning_rate": 0.00047825791547764643, "loss": 3.4007, "step": 61455 }, { "epoch": 4.175839108574535, "grad_norm": 2.5900888442993164, "learning_rate": 0.0004782154504688137, "loss": 3.2248, "step": 61460 }, { "epoch": 4.176178828645196, "grad_norm": 1.4214929342269897, "learning_rate": 0.000478172985459981, "loss": 3.3699, "step": 61465 }, { "epoch": 4.176518548715858, "grad_norm": 1.7867982387542725, "learning_rate": 0.0004781305204511482, "loss": 3.3577, "step": 61470 }, { "epoch": 4.17685826878652, "grad_norm": 1.1619610786437988, "learning_rate": 0.00047808805544231555, "loss": 3.4147, "step": 61475 }, { "epoch": 4.1771979888571815, "grad_norm": 1.1025022268295288, "learning_rate": 0.00047804559043348283, "loss": 3.5595, "step": 61480 }, { "epoch": 4.177537708927844, "grad_norm": 1.7286964654922485, "learning_rate": 0.00047800312542465006, "loss": 3.2137, "step": 61485 }, { "epoch": 4.177877428998505, "grad_norm": 1.2227442264556885, "learning_rate": 0.0004779606604158174, "loss": 3.4432, "step": 61490 }, { "epoch": 4.178217149069167, "grad_norm": 1.4971646070480347, "learning_rate": 0.0004779181954069847, "loss": 3.4093, "step": 61495 }, { "epoch": 4.178556869139829, "grad_norm": 1.4143935441970825, "learning_rate": 0.0004778757303981519, "loss": 3.2735, "step": 61500 }, { "epoch": 4.17889658921049, "grad_norm": 2.1264290809631348, "learning_rate": 0.0004778332653893192, "loss": 3.3728, "step": 61505 }, { "epoch": 4.179236309281152, "grad_norm": 1.356764554977417, "learning_rate": 0.0004777908003804865, "loss": 3.4084, "step": 61510 }, { "epoch": 4.179576029351814, "grad_norm": 1.6436865329742432, "learning_rate": 0.00047774833537165374, "loss": 3.1298, "step": 61515 }, { "epoch": 4.1799157494224755, "grad_norm": 1.2769187688827515, "learning_rate": 0.000477705870362821, "loss": 3.4836, "step": 61520 }, { "epoch": 4.1802554694931375, "grad_norm": 1.2776769399642944, "learning_rate": 0.00047766340535398836, "loss": 3.1698, "step": 61525 }, { "epoch": 4.1805951895638, "grad_norm": 0.9080060124397278, "learning_rate": 0.0004776209403451556, "loss": 3.6129, "step": 61530 }, { "epoch": 4.180934909634461, "grad_norm": 1.4098044633865356, "learning_rate": 0.00047757847533632286, "loss": 3.6458, "step": 61535 }, { "epoch": 4.181274629705123, "grad_norm": 1.3997533321380615, "learning_rate": 0.00047753601032749014, "loss": 3.4158, "step": 61540 }, { "epoch": 4.181614349775785, "grad_norm": 1.3954181671142578, "learning_rate": 0.0004774935453186575, "loss": 3.4263, "step": 61545 }, { "epoch": 4.181954069846446, "grad_norm": 1.3240710496902466, "learning_rate": 0.0004774510803098247, "loss": 3.3381, "step": 61550 }, { "epoch": 4.182293789917108, "grad_norm": 1.2247159481048584, "learning_rate": 0.000477408615300992, "loss": 3.4127, "step": 61555 }, { "epoch": 4.18263350998777, "grad_norm": 1.2395128011703491, "learning_rate": 0.0004773661502921593, "loss": 3.1711, "step": 61560 }, { "epoch": 4.1829732300584315, "grad_norm": 1.1690561771392822, "learning_rate": 0.00047732368528332654, "loss": 3.5876, "step": 61565 }, { "epoch": 4.1833129501290935, "grad_norm": 1.4365057945251465, "learning_rate": 0.0004772812202744938, "loss": 3.3711, "step": 61570 }, { "epoch": 4.183652670199756, "grad_norm": 1.3121657371520996, "learning_rate": 0.0004772387552656611, "loss": 3.5633, "step": 61575 }, { "epoch": 4.183992390270417, "grad_norm": 1.2335963249206543, "learning_rate": 0.0004771962902568284, "loss": 3.5133, "step": 61580 }, { "epoch": 4.184332110341079, "grad_norm": 1.0792664289474487, "learning_rate": 0.00047715382524799566, "loss": 3.5124, "step": 61585 }, { "epoch": 4.184671830411741, "grad_norm": 1.0798203945159912, "learning_rate": 0.00047711136023916294, "loss": 3.413, "step": 61590 }, { "epoch": 4.185011550482402, "grad_norm": 1.2600599527359009, "learning_rate": 0.0004770688952303302, "loss": 3.4911, "step": 61595 }, { "epoch": 4.185351270553064, "grad_norm": 1.2775375843048096, "learning_rate": 0.0004770264302214975, "loss": 3.645, "step": 61600 }, { "epoch": 4.185690990623726, "grad_norm": 1.339818000793457, "learning_rate": 0.0004769839652126648, "loss": 3.4898, "step": 61605 }, { "epoch": 4.1860307106943875, "grad_norm": 1.423826813697815, "learning_rate": 0.000476941500203832, "loss": 3.5125, "step": 61610 }, { "epoch": 4.1863704307650496, "grad_norm": 1.361767053604126, "learning_rate": 0.00047689903519499934, "loss": 3.3898, "step": 61615 }, { "epoch": 4.186710150835712, "grad_norm": 1.582794189453125, "learning_rate": 0.0004768565701861666, "loss": 3.4846, "step": 61620 }, { "epoch": 4.187049870906373, "grad_norm": 1.5229177474975586, "learning_rate": 0.00047681410517733385, "loss": 3.3503, "step": 61625 }, { "epoch": 4.187389590977035, "grad_norm": 1.4097411632537842, "learning_rate": 0.0004767716401685012, "loss": 3.5253, "step": 61630 }, { "epoch": 4.187729311047697, "grad_norm": 1.4854145050048828, "learning_rate": 0.00047672917515966846, "loss": 3.2745, "step": 61635 }, { "epoch": 4.188069031118358, "grad_norm": 1.2006887197494507, "learning_rate": 0.0004766867101508357, "loss": 3.5705, "step": 61640 }, { "epoch": 4.18840875118902, "grad_norm": 1.0682207345962524, "learning_rate": 0.000476644245142003, "loss": 3.1513, "step": 61645 }, { "epoch": 4.188748471259682, "grad_norm": 1.6144543886184692, "learning_rate": 0.0004766017801331703, "loss": 3.3011, "step": 61650 }, { "epoch": 4.1890881913303435, "grad_norm": 1.514273762702942, "learning_rate": 0.00047655931512433753, "loss": 3.3697, "step": 61655 }, { "epoch": 4.189427911401006, "grad_norm": 1.4045696258544922, "learning_rate": 0.0004765168501155048, "loss": 3.2101, "step": 61660 }, { "epoch": 4.189767631471668, "grad_norm": 1.2011736631393433, "learning_rate": 0.00047647438510667214, "loss": 3.3665, "step": 61665 }, { "epoch": 4.190107351542329, "grad_norm": 1.4698615074157715, "learning_rate": 0.00047643192009783937, "loss": 3.3899, "step": 61670 }, { "epoch": 4.190447071612991, "grad_norm": 1.2979717254638672, "learning_rate": 0.00047638945508900665, "loss": 3.4099, "step": 61675 }, { "epoch": 4.190786791683653, "grad_norm": 1.2604217529296875, "learning_rate": 0.000476346990080174, "loss": 3.2466, "step": 61680 }, { "epoch": 4.191126511754314, "grad_norm": 1.2968900203704834, "learning_rate": 0.0004763045250713412, "loss": 3.0729, "step": 61685 }, { "epoch": 4.191466231824976, "grad_norm": 1.1761291027069092, "learning_rate": 0.0004762620600625085, "loss": 3.4669, "step": 61690 }, { "epoch": 4.191805951895638, "grad_norm": 1.2406293153762817, "learning_rate": 0.00047621959505367577, "loss": 3.6923, "step": 61695 }, { "epoch": 4.1921456719662995, "grad_norm": 1.294573426246643, "learning_rate": 0.00047617713004484305, "loss": 3.5897, "step": 61700 }, { "epoch": 4.192485392036962, "grad_norm": 1.0862500667572021, "learning_rate": 0.00047613466503601033, "loss": 3.1966, "step": 61705 }, { "epoch": 4.192825112107624, "grad_norm": 2.143998146057129, "learning_rate": 0.0004760922000271776, "loss": 3.2133, "step": 61710 }, { "epoch": 4.193164832178285, "grad_norm": 1.2693766355514526, "learning_rate": 0.00047604973501834494, "loss": 3.2241, "step": 61715 }, { "epoch": 4.193504552248947, "grad_norm": 1.1634764671325684, "learning_rate": 0.00047600727000951217, "loss": 3.3111, "step": 61720 }, { "epoch": 4.193844272319609, "grad_norm": 1.2748390436172485, "learning_rate": 0.00047596480500067945, "loss": 3.2944, "step": 61725 }, { "epoch": 4.19418399239027, "grad_norm": 1.1014989614486694, "learning_rate": 0.00047592233999184673, "loss": 3.4251, "step": 61730 }, { "epoch": 4.194523712460932, "grad_norm": 1.3593257665634155, "learning_rate": 0.000475879874983014, "loss": 3.3881, "step": 61735 }, { "epoch": 4.194863432531594, "grad_norm": 1.7468467950820923, "learning_rate": 0.0004758374099741813, "loss": 3.3704, "step": 61740 }, { "epoch": 4.1952031526022555, "grad_norm": 1.591548204421997, "learning_rate": 0.00047579494496534857, "loss": 3.4944, "step": 61745 }, { "epoch": 4.195542872672918, "grad_norm": 0.9690475463867188, "learning_rate": 0.00047575247995651585, "loss": 3.5605, "step": 61750 }, { "epoch": 4.19588259274358, "grad_norm": 1.1120884418487549, "learning_rate": 0.00047571001494768313, "loss": 3.4849, "step": 61755 }, { "epoch": 4.196222312814241, "grad_norm": 1.3545587062835693, "learning_rate": 0.0004756675499388504, "loss": 3.3418, "step": 61760 }, { "epoch": 4.196562032884903, "grad_norm": 1.3116885423660278, "learning_rate": 0.00047562508493001764, "loss": 3.2621, "step": 61765 }, { "epoch": 4.196901752955565, "grad_norm": 1.5335749387741089, "learning_rate": 0.00047558261992118497, "loss": 3.5145, "step": 61770 }, { "epoch": 4.197241473026226, "grad_norm": 1.4414454698562622, "learning_rate": 0.00047554015491235225, "loss": 3.5041, "step": 61775 }, { "epoch": 4.197581193096888, "grad_norm": 1.3499826192855835, "learning_rate": 0.0004754976899035195, "loss": 3.4451, "step": 61780 }, { "epoch": 4.19792091316755, "grad_norm": 1.3612631559371948, "learning_rate": 0.0004754552248946868, "loss": 3.3003, "step": 61785 }, { "epoch": 4.1982606332382115, "grad_norm": 1.1259584426879883, "learning_rate": 0.0004754127598858541, "loss": 3.4816, "step": 61790 }, { "epoch": 4.198600353308874, "grad_norm": 1.3938663005828857, "learning_rate": 0.0004753702948770213, "loss": 3.3275, "step": 61795 }, { "epoch": 4.198940073379536, "grad_norm": 1.3387781381607056, "learning_rate": 0.0004753278298681886, "loss": 3.6495, "step": 61800 }, { "epoch": 4.199279793450197, "grad_norm": 1.38521409034729, "learning_rate": 0.00047528536485935593, "loss": 3.4035, "step": 61805 }, { "epoch": 4.199619513520859, "grad_norm": 1.2145591974258423, "learning_rate": 0.00047524289985052316, "loss": 3.4289, "step": 61810 }, { "epoch": 4.199959233591521, "grad_norm": 1.2294011116027832, "learning_rate": 0.00047520043484169044, "loss": 3.4859, "step": 61815 }, { "epoch": 4.200298953662182, "grad_norm": 1.5666486024856567, "learning_rate": 0.00047515796983285777, "loss": 3.4779, "step": 61820 }, { "epoch": 4.200638673732844, "grad_norm": 1.4526419639587402, "learning_rate": 0.000475115504824025, "loss": 3.4482, "step": 61825 }, { "epoch": 4.200978393803506, "grad_norm": 1.277561068534851, "learning_rate": 0.0004750730398151923, "loss": 3.4873, "step": 61830 }, { "epoch": 4.2013181138741675, "grad_norm": 1.9238711595535278, "learning_rate": 0.00047503057480635956, "loss": 3.3824, "step": 61835 }, { "epoch": 4.20165783394483, "grad_norm": 1.1187210083007812, "learning_rate": 0.00047498810979752684, "loss": 3.2796, "step": 61840 }, { "epoch": 4.201997554015492, "grad_norm": 1.689474105834961, "learning_rate": 0.0004749456447886941, "loss": 3.4221, "step": 61845 }, { "epoch": 4.202337274086153, "grad_norm": 1.1699086427688599, "learning_rate": 0.0004749031797798614, "loss": 3.2467, "step": 61850 }, { "epoch": 4.202676994156815, "grad_norm": 1.551489233970642, "learning_rate": 0.0004748607147710287, "loss": 3.1854, "step": 61855 }, { "epoch": 4.203016714227476, "grad_norm": 1.2140430212020874, "learning_rate": 0.00047481824976219596, "loss": 3.3335, "step": 61860 }, { "epoch": 4.203356434298138, "grad_norm": 1.25232994556427, "learning_rate": 0.00047477578475336324, "loss": 3.3267, "step": 61865 }, { "epoch": 4.2036961543688, "grad_norm": 1.49566650390625, "learning_rate": 0.00047473331974453046, "loss": 3.33, "step": 61870 }, { "epoch": 4.2040358744394615, "grad_norm": 1.7647343873977661, "learning_rate": 0.0004746908547356978, "loss": 3.0039, "step": 61875 }, { "epoch": 4.2043755945101235, "grad_norm": 1.1364859342575073, "learning_rate": 0.0004746483897268651, "loss": 3.4396, "step": 61880 }, { "epoch": 4.204715314580786, "grad_norm": 1.1616562604904175, "learning_rate": 0.00047460592471803236, "loss": 3.4953, "step": 61885 }, { "epoch": 4.205055034651447, "grad_norm": 1.3906444311141968, "learning_rate": 0.00047456345970919964, "loss": 3.3696, "step": 61890 }, { "epoch": 4.205394754722109, "grad_norm": 1.4336622953414917, "learning_rate": 0.0004745209947003669, "loss": 3.4203, "step": 61895 }, { "epoch": 4.205734474792771, "grad_norm": 1.6503397226333618, "learning_rate": 0.0004744785296915342, "loss": 3.3113, "step": 61900 }, { "epoch": 4.206074194863432, "grad_norm": 1.5293426513671875, "learning_rate": 0.0004744360646827015, "loss": 3.2135, "step": 61905 }, { "epoch": 4.206413914934094, "grad_norm": 1.1351306438446045, "learning_rate": 0.00047439359967386876, "loss": 3.4372, "step": 61910 }, { "epoch": 4.206753635004756, "grad_norm": 1.0990123748779297, "learning_rate": 0.00047435113466503604, "loss": 3.5875, "step": 61915 }, { "epoch": 4.2070933550754175, "grad_norm": 1.8711743354797363, "learning_rate": 0.00047430866965620326, "loss": 3.4818, "step": 61920 }, { "epoch": 4.20743307514608, "grad_norm": 1.2242019176483154, "learning_rate": 0.0004742662046473706, "loss": 3.2637, "step": 61925 }, { "epoch": 4.207772795216742, "grad_norm": 1.1408454179763794, "learning_rate": 0.0004742237396385379, "loss": 3.255, "step": 61930 }, { "epoch": 4.208112515287403, "grad_norm": 1.0640625953674316, "learning_rate": 0.0004741812746297051, "loss": 3.4541, "step": 61935 }, { "epoch": 4.208452235358065, "grad_norm": 1.4946082830429077, "learning_rate": 0.00047413880962087244, "loss": 3.4011, "step": 61940 }, { "epoch": 4.208791955428727, "grad_norm": 1.1439886093139648, "learning_rate": 0.0004740963446120397, "loss": 3.2809, "step": 61945 }, { "epoch": 4.209131675499388, "grad_norm": 1.205521583557129, "learning_rate": 0.00047405387960320694, "loss": 3.5142, "step": 61950 }, { "epoch": 4.20947139557005, "grad_norm": 1.4292588233947754, "learning_rate": 0.0004740114145943742, "loss": 3.3834, "step": 61955 }, { "epoch": 4.209811115640712, "grad_norm": 1.463395595550537, "learning_rate": 0.00047396894958554156, "loss": 3.4418, "step": 61960 }, { "epoch": 4.2101508357113735, "grad_norm": 1.6517270803451538, "learning_rate": 0.0004739264845767088, "loss": 3.4419, "step": 61965 }, { "epoch": 4.210490555782036, "grad_norm": 1.1687062978744507, "learning_rate": 0.00047388401956787607, "loss": 3.2561, "step": 61970 }, { "epoch": 4.210830275852698, "grad_norm": 1.1801605224609375, "learning_rate": 0.0004738415545590434, "loss": 3.4272, "step": 61975 }, { "epoch": 4.211169995923359, "grad_norm": 1.2641136646270752, "learning_rate": 0.0004737990895502106, "loss": 3.2673, "step": 61980 }, { "epoch": 4.211509715994021, "grad_norm": 1.26486337184906, "learning_rate": 0.0004737566245413779, "loss": 3.4165, "step": 61985 }, { "epoch": 4.211849436064683, "grad_norm": 1.3731188774108887, "learning_rate": 0.0004737141595325452, "loss": 3.2664, "step": 61990 }, { "epoch": 4.212189156135344, "grad_norm": 2.885732650756836, "learning_rate": 0.00047367169452371247, "loss": 3.4678, "step": 61995 }, { "epoch": 4.212528876206006, "grad_norm": 1.0670100450515747, "learning_rate": 0.00047362922951487975, "loss": 3.4991, "step": 62000 }, { "epoch": 4.212868596276668, "grad_norm": 1.747323751449585, "learning_rate": 0.000473586764506047, "loss": 3.4395, "step": 62005 }, { "epoch": 4.2132083163473295, "grad_norm": 1.5978803634643555, "learning_rate": 0.0004735442994972143, "loss": 3.2622, "step": 62010 }, { "epoch": 4.213548036417992, "grad_norm": 1.0791419744491577, "learning_rate": 0.0004735018344883816, "loss": 3.4087, "step": 62015 }, { "epoch": 4.213887756488654, "grad_norm": 1.4132972955703735, "learning_rate": 0.00047345936947954887, "loss": 3.4937, "step": 62020 }, { "epoch": 4.214227476559315, "grad_norm": 1.3491343259811401, "learning_rate": 0.0004734169044707161, "loss": 3.5932, "step": 62025 }, { "epoch": 4.214567196629977, "grad_norm": 1.1237963438034058, "learning_rate": 0.0004733744394618834, "loss": 3.3431, "step": 62030 }, { "epoch": 4.214906916700639, "grad_norm": 1.6491711139678955, "learning_rate": 0.0004733319744530507, "loss": 3.5218, "step": 62035 }, { "epoch": 4.2152466367713, "grad_norm": 1.4118964672088623, "learning_rate": 0.00047328950944421793, "loss": 3.105, "step": 62040 }, { "epoch": 4.215586356841962, "grad_norm": 1.1954336166381836, "learning_rate": 0.00047324704443538527, "loss": 3.5715, "step": 62045 }, { "epoch": 4.215926076912624, "grad_norm": 1.205946445465088, "learning_rate": 0.00047320457942655255, "loss": 3.1893, "step": 62050 }, { "epoch": 4.2162657969832855, "grad_norm": 1.7303986549377441, "learning_rate": 0.0004731621144177198, "loss": 3.2283, "step": 62055 }, { "epoch": 4.216605517053948, "grad_norm": 1.3854676485061646, "learning_rate": 0.00047311964940888705, "loss": 3.3081, "step": 62060 }, { "epoch": 4.21694523712461, "grad_norm": 1.1409190893173218, "learning_rate": 0.0004730771844000544, "loss": 3.3334, "step": 62065 }, { "epoch": 4.217284957195271, "grad_norm": 1.5035772323608398, "learning_rate": 0.00047303471939122167, "loss": 3.6131, "step": 62070 }, { "epoch": 4.217624677265933, "grad_norm": 1.336029052734375, "learning_rate": 0.0004729922543823889, "loss": 3.303, "step": 62075 }, { "epoch": 4.217964397336595, "grad_norm": 1.1799980401992798, "learning_rate": 0.0004729497893735562, "loss": 3.1966, "step": 62080 }, { "epoch": 4.218304117407256, "grad_norm": 1.314826250076294, "learning_rate": 0.0004729073243647235, "loss": 3.4247, "step": 62085 }, { "epoch": 4.218643837477918, "grad_norm": 1.5879155397415161, "learning_rate": 0.00047286485935589073, "loss": 3.4354, "step": 62090 }, { "epoch": 4.21898355754858, "grad_norm": 1.2442888021469116, "learning_rate": 0.000472822394347058, "loss": 3.3221, "step": 62095 }, { "epoch": 4.2193232776192415, "grad_norm": 1.343202829360962, "learning_rate": 0.00047277992933822535, "loss": 3.44, "step": 62100 }, { "epoch": 4.219662997689904, "grad_norm": 1.067472219467163, "learning_rate": 0.0004727374643293926, "loss": 3.6339, "step": 62105 }, { "epoch": 4.220002717760566, "grad_norm": 1.587327003479004, "learning_rate": 0.00047269499932055985, "loss": 3.0668, "step": 62110 }, { "epoch": 4.220342437831227, "grad_norm": 1.2623827457427979, "learning_rate": 0.0004726525343117272, "loss": 3.3544, "step": 62115 }, { "epoch": 4.220682157901889, "grad_norm": 1.0225017070770264, "learning_rate": 0.0004726100693028944, "loss": 3.4801, "step": 62120 }, { "epoch": 4.221021877972551, "grad_norm": 1.2586368322372437, "learning_rate": 0.0004725676042940617, "loss": 3.4631, "step": 62125 }, { "epoch": 4.221361598043212, "grad_norm": 1.5258033275604248, "learning_rate": 0.000472525139285229, "loss": 3.4387, "step": 62130 }, { "epoch": 4.221701318113874, "grad_norm": 1.3328700065612793, "learning_rate": 0.00047248267427639625, "loss": 3.2232, "step": 62135 }, { "epoch": 4.222041038184536, "grad_norm": 1.158023476600647, "learning_rate": 0.00047244020926756353, "loss": 3.3554, "step": 62140 }, { "epoch": 4.2223807582551975, "grad_norm": 1.1720421314239502, "learning_rate": 0.0004723977442587308, "loss": 3.3916, "step": 62145 }, { "epoch": 4.22272047832586, "grad_norm": 1.7303367853164673, "learning_rate": 0.0004723552792498981, "loss": 3.5243, "step": 62150 }, { "epoch": 4.223060198396522, "grad_norm": 1.3659502267837524, "learning_rate": 0.0004723128142410654, "loss": 3.5452, "step": 62155 }, { "epoch": 4.223399918467183, "grad_norm": 1.1049585342407227, "learning_rate": 0.00047227034923223265, "loss": 3.2417, "step": 62160 }, { "epoch": 4.223739638537845, "grad_norm": 1.2170356512069702, "learning_rate": 0.0004722278842233999, "loss": 3.1982, "step": 62165 }, { "epoch": 4.224079358608506, "grad_norm": 1.569958209991455, "learning_rate": 0.0004721854192145672, "loss": 3.1357, "step": 62170 }, { "epoch": 4.224419078679168, "grad_norm": 1.1611539125442505, "learning_rate": 0.0004721429542057345, "loss": 3.2523, "step": 62175 }, { "epoch": 4.22475879874983, "grad_norm": 1.6494905948638916, "learning_rate": 0.0004721004891969017, "loss": 3.4656, "step": 62180 }, { "epoch": 4.2250985188204915, "grad_norm": 1.3435118198394775, "learning_rate": 0.00047205802418806905, "loss": 3.2797, "step": 62185 }, { "epoch": 4.2254382388911536, "grad_norm": 1.533419132232666, "learning_rate": 0.00047201555917923633, "loss": 3.3414, "step": 62190 }, { "epoch": 4.225777958961816, "grad_norm": 1.258671760559082, "learning_rate": 0.00047197309417040356, "loss": 3.2391, "step": 62195 }, { "epoch": 4.226117679032477, "grad_norm": 1.4952783584594727, "learning_rate": 0.0004719306291615709, "loss": 3.4449, "step": 62200 }, { "epoch": 4.226457399103139, "grad_norm": 1.474999189376831, "learning_rate": 0.0004718881641527382, "loss": 3.4606, "step": 62205 }, { "epoch": 4.226797119173801, "grad_norm": 1.148144006729126, "learning_rate": 0.0004718456991439054, "loss": 3.1224, "step": 62210 }, { "epoch": 4.227136839244462, "grad_norm": 1.408357858657837, "learning_rate": 0.0004718032341350727, "loss": 3.2409, "step": 62215 }, { "epoch": 4.227476559315124, "grad_norm": 1.1724058389663696, "learning_rate": 0.00047176076912624, "loss": 3.514, "step": 62220 }, { "epoch": 4.227816279385786, "grad_norm": 1.3127189874649048, "learning_rate": 0.0004717183041174073, "loss": 3.2905, "step": 62225 }, { "epoch": 4.2281559994564475, "grad_norm": 1.510766863822937, "learning_rate": 0.0004716758391085745, "loss": 3.3352, "step": 62230 }, { "epoch": 4.22849571952711, "grad_norm": 1.408848762512207, "learning_rate": 0.00047163337409974186, "loss": 3.3904, "step": 62235 }, { "epoch": 4.228835439597772, "grad_norm": 1.3646589517593384, "learning_rate": 0.00047159090909090914, "loss": 3.4668, "step": 62240 }, { "epoch": 4.229175159668433, "grad_norm": 1.397926926612854, "learning_rate": 0.00047154844408207636, "loss": 3.4068, "step": 62245 }, { "epoch": 4.229514879739095, "grad_norm": 1.148913025856018, "learning_rate": 0.00047150597907324364, "loss": 3.2548, "step": 62250 }, { "epoch": 4.229854599809757, "grad_norm": 1.2563645839691162, "learning_rate": 0.000471463514064411, "loss": 3.2881, "step": 62255 }, { "epoch": 4.230194319880418, "grad_norm": 1.0854688882827759, "learning_rate": 0.0004714210490555782, "loss": 3.6223, "step": 62260 }, { "epoch": 4.23053403995108, "grad_norm": 1.358669638633728, "learning_rate": 0.0004713785840467455, "loss": 3.3907, "step": 62265 }, { "epoch": 4.230873760021742, "grad_norm": 1.560815453529358, "learning_rate": 0.0004713361190379128, "loss": 3.2688, "step": 62270 }, { "epoch": 4.2312134800924035, "grad_norm": 1.1898003816604614, "learning_rate": 0.00047129365402908004, "loss": 3.4914, "step": 62275 }, { "epoch": 4.231553200163066, "grad_norm": 1.250665545463562, "learning_rate": 0.0004712511890202473, "loss": 3.5498, "step": 62280 }, { "epoch": 4.231892920233728, "grad_norm": 1.1467031240463257, "learning_rate": 0.0004712087240114146, "loss": 3.5216, "step": 62285 }, { "epoch": 4.232232640304389, "grad_norm": 1.2791498899459839, "learning_rate": 0.0004711662590025819, "loss": 3.4355, "step": 62290 }, { "epoch": 4.232572360375051, "grad_norm": 1.8813444375991821, "learning_rate": 0.00047112379399374916, "loss": 3.4474, "step": 62295 }, { "epoch": 4.232912080445713, "grad_norm": 1.3960115909576416, "learning_rate": 0.00047108132898491644, "loss": 3.3535, "step": 62300 }, { "epoch": 4.233251800516374, "grad_norm": 1.5986474752426147, "learning_rate": 0.0004710388639760837, "loss": 3.2836, "step": 62305 }, { "epoch": 4.233591520587036, "grad_norm": 1.699904441833496, "learning_rate": 0.000470996398967251, "loss": 3.1076, "step": 62310 }, { "epoch": 4.233931240657698, "grad_norm": 1.4382554292678833, "learning_rate": 0.0004709539339584183, "loss": 3.4989, "step": 62315 }, { "epoch": 4.2342709607283595, "grad_norm": 1.3692933320999146, "learning_rate": 0.0004709114689495855, "loss": 3.1626, "step": 62320 }, { "epoch": 4.234610680799022, "grad_norm": 1.5745625495910645, "learning_rate": 0.00047086900394075284, "loss": 3.1595, "step": 62325 }, { "epoch": 4.234950400869684, "grad_norm": 1.5613523721694946, "learning_rate": 0.0004708265389319201, "loss": 3.177, "step": 62330 }, { "epoch": 4.235290120940345, "grad_norm": 1.085039496421814, "learning_rate": 0.00047078407392308735, "loss": 3.5462, "step": 62335 }, { "epoch": 4.235629841011007, "grad_norm": 1.1976269483566284, "learning_rate": 0.0004707416089142547, "loss": 3.4234, "step": 62340 }, { "epoch": 4.235969561081669, "grad_norm": 1.446313738822937, "learning_rate": 0.00047069914390542196, "loss": 3.1646, "step": 62345 }, { "epoch": 4.23630928115233, "grad_norm": 1.4687561988830566, "learning_rate": 0.0004706566788965892, "loss": 3.3899, "step": 62350 }, { "epoch": 4.236649001222992, "grad_norm": 1.1338353157043457, "learning_rate": 0.00047061421388775647, "loss": 3.3025, "step": 62355 }, { "epoch": 4.236988721293654, "grad_norm": 1.3436775207519531, "learning_rate": 0.0004705717488789238, "loss": 3.3179, "step": 62360 }, { "epoch": 4.2373284413643155, "grad_norm": 1.0535119771957397, "learning_rate": 0.00047052928387009103, "loss": 3.4024, "step": 62365 }, { "epoch": 4.237668161434978, "grad_norm": 1.3185067176818848, "learning_rate": 0.0004704868188612583, "loss": 3.3736, "step": 62370 }, { "epoch": 4.23800788150564, "grad_norm": 2.018765449523926, "learning_rate": 0.00047044435385242564, "loss": 3.2795, "step": 62375 }, { "epoch": 4.238347601576301, "grad_norm": 1.2672561407089233, "learning_rate": 0.00047040188884359287, "loss": 3.3647, "step": 62380 }, { "epoch": 4.238687321646963, "grad_norm": 1.2920540571212769, "learning_rate": 0.00047035942383476015, "loss": 3.3725, "step": 62385 }, { "epoch": 4.239027041717625, "grad_norm": 1.318429946899414, "learning_rate": 0.00047031695882592743, "loss": 3.392, "step": 62390 }, { "epoch": 4.239366761788286, "grad_norm": 1.4720556735992432, "learning_rate": 0.00047027449381709476, "loss": 3.5275, "step": 62395 }, { "epoch": 4.239706481858948, "grad_norm": 1.4578183889389038, "learning_rate": 0.000470232028808262, "loss": 3.363, "step": 62400 }, { "epoch": 4.24004620192961, "grad_norm": 1.3955074548721313, "learning_rate": 0.00047018956379942927, "loss": 3.4132, "step": 62405 }, { "epoch": 4.2403859220002715, "grad_norm": 1.3112496137619019, "learning_rate": 0.0004701470987905966, "loss": 3.5598, "step": 62410 }, { "epoch": 4.240725642070934, "grad_norm": 1.2302964925765991, "learning_rate": 0.00047010463378176383, "loss": 3.493, "step": 62415 }, { "epoch": 4.241065362141596, "grad_norm": 1.0250039100646973, "learning_rate": 0.0004700621687729311, "loss": 3.5603, "step": 62420 }, { "epoch": 4.241405082212257, "grad_norm": 1.4904699325561523, "learning_rate": 0.0004700197037640984, "loss": 3.5263, "step": 62425 }, { "epoch": 4.241744802282919, "grad_norm": 1.1581529378890991, "learning_rate": 0.00046997723875526567, "loss": 3.1392, "step": 62430 }, { "epoch": 4.242084522353581, "grad_norm": 1.4200928211212158, "learning_rate": 0.00046993477374643295, "loss": 3.3371, "step": 62435 }, { "epoch": 4.242424242424242, "grad_norm": 1.1955357789993286, "learning_rate": 0.00046989230873760023, "loss": 3.1518, "step": 62440 }, { "epoch": 4.242763962494904, "grad_norm": 1.3348897695541382, "learning_rate": 0.0004698498437287675, "loss": 3.6584, "step": 62445 }, { "epoch": 4.243103682565566, "grad_norm": 1.4055559635162354, "learning_rate": 0.0004698073787199348, "loss": 3.3582, "step": 62450 }, { "epoch": 4.2434434026362275, "grad_norm": 1.3028666973114014, "learning_rate": 0.00046976491371110207, "loss": 3.5437, "step": 62455 }, { "epoch": 4.24378312270689, "grad_norm": 0.8665190935134888, "learning_rate": 0.0004697224487022693, "loss": 3.4799, "step": 62460 }, { "epoch": 4.244122842777552, "grad_norm": 1.1814440488815308, "learning_rate": 0.00046967998369343663, "loss": 3.3572, "step": 62465 }, { "epoch": 4.244462562848213, "grad_norm": 1.2146735191345215, "learning_rate": 0.0004696375186846039, "loss": 3.1148, "step": 62470 }, { "epoch": 4.244802282918875, "grad_norm": 1.1155143976211548, "learning_rate": 0.00046959505367577114, "loss": 3.3144, "step": 62475 }, { "epoch": 4.245142002989537, "grad_norm": 1.9525818824768066, "learning_rate": 0.00046955258866693847, "loss": 3.1467, "step": 62480 }, { "epoch": 4.245481723060198, "grad_norm": 1.2765594720840454, "learning_rate": 0.00046951012365810575, "loss": 3.2166, "step": 62485 }, { "epoch": 4.24582144313086, "grad_norm": 1.6249724626541138, "learning_rate": 0.000469467658649273, "loss": 3.1859, "step": 62490 }, { "epoch": 4.246161163201522, "grad_norm": 1.4846274852752686, "learning_rate": 0.0004694251936404403, "loss": 3.157, "step": 62495 }, { "epoch": 4.246500883272184, "grad_norm": 1.5919705629348755, "learning_rate": 0.0004693827286316076, "loss": 3.392, "step": 62500 }, { "epoch": 4.246840603342846, "grad_norm": 1.3450405597686768, "learning_rate": 0.0004693402636227748, "loss": 3.4218, "step": 62505 }, { "epoch": 4.247180323413508, "grad_norm": 1.6674553155899048, "learning_rate": 0.0004692977986139421, "loss": 3.5744, "step": 62510 }, { "epoch": 4.247520043484169, "grad_norm": 1.8779816627502441, "learning_rate": 0.00046925533360510943, "loss": 3.4141, "step": 62515 }, { "epoch": 4.247859763554831, "grad_norm": 1.422730803489685, "learning_rate": 0.00046921286859627666, "loss": 3.2038, "step": 62520 }, { "epoch": 4.248199483625493, "grad_norm": 1.480230450630188, "learning_rate": 0.00046917040358744394, "loss": 3.2054, "step": 62525 }, { "epoch": 4.248539203696154, "grad_norm": 1.2641727924346924, "learning_rate": 0.00046912793857861127, "loss": 3.3726, "step": 62530 }, { "epoch": 4.248878923766816, "grad_norm": 1.3030800819396973, "learning_rate": 0.0004690854735697785, "loss": 3.2977, "step": 62535 }, { "epoch": 4.2492186438374775, "grad_norm": 1.3966912031173706, "learning_rate": 0.0004690430085609458, "loss": 3.4853, "step": 62540 }, { "epoch": 4.24955836390814, "grad_norm": 1.3938274383544922, "learning_rate": 0.00046900054355211306, "loss": 3.1808, "step": 62545 }, { "epoch": 4.249898083978802, "grad_norm": 1.3407403230667114, "learning_rate": 0.00046895807854328034, "loss": 3.3662, "step": 62550 }, { "epoch": 4.250237804049463, "grad_norm": 1.1505813598632812, "learning_rate": 0.0004689156135344476, "loss": 3.2858, "step": 62555 }, { "epoch": 4.250577524120125, "grad_norm": 1.4021713733673096, "learning_rate": 0.0004688731485256149, "loss": 3.0357, "step": 62560 }, { "epoch": 4.250917244190787, "grad_norm": 1.563905119895935, "learning_rate": 0.00046883068351678223, "loss": 3.3764, "step": 62565 }, { "epoch": 4.251256964261448, "grad_norm": 1.261899709701538, "learning_rate": 0.00046878821850794946, "loss": 3.4441, "step": 62570 }, { "epoch": 4.25159668433211, "grad_norm": 1.3259772062301636, "learning_rate": 0.00046874575349911674, "loss": 3.419, "step": 62575 }, { "epoch": 4.251936404402772, "grad_norm": 1.486903429031372, "learning_rate": 0.000468703288490284, "loss": 3.264, "step": 62580 }, { "epoch": 4.2522761244734335, "grad_norm": 1.3475781679153442, "learning_rate": 0.0004686608234814513, "loss": 3.4153, "step": 62585 }, { "epoch": 4.252615844544096, "grad_norm": 1.5269373655319214, "learning_rate": 0.0004686183584726186, "loss": 3.4754, "step": 62590 }, { "epoch": 4.252955564614758, "grad_norm": 1.1510549783706665, "learning_rate": 0.00046857589346378586, "loss": 3.3688, "step": 62595 }, { "epoch": 4.253295284685419, "grad_norm": 1.4620006084442139, "learning_rate": 0.00046853342845495314, "loss": 3.3449, "step": 62600 }, { "epoch": 4.253635004756081, "grad_norm": 1.903610110282898, "learning_rate": 0.0004684909634461204, "loss": 2.9901, "step": 62605 }, { "epoch": 4.253974724826743, "grad_norm": 1.4280308485031128, "learning_rate": 0.0004684484984372877, "loss": 3.3954, "step": 62610 }, { "epoch": 4.254314444897404, "grad_norm": 1.0948880910873413, "learning_rate": 0.0004684060334284549, "loss": 3.2997, "step": 62615 }, { "epoch": 4.254654164968066, "grad_norm": 1.6070566177368164, "learning_rate": 0.00046836356841962226, "loss": 3.4262, "step": 62620 }, { "epoch": 4.254993885038728, "grad_norm": 1.3821353912353516, "learning_rate": 0.00046832110341078954, "loss": 3.4323, "step": 62625 }, { "epoch": 4.2553336051093895, "grad_norm": 1.1784541606903076, "learning_rate": 0.00046827863840195676, "loss": 3.5698, "step": 62630 }, { "epoch": 4.255673325180052, "grad_norm": 1.853245496749878, "learning_rate": 0.0004682361733931241, "loss": 3.3853, "step": 62635 }, { "epoch": 4.256013045250714, "grad_norm": 1.655784010887146, "learning_rate": 0.0004681937083842914, "loss": 3.0986, "step": 62640 }, { "epoch": 4.256352765321375, "grad_norm": 1.2077313661575317, "learning_rate": 0.0004681512433754586, "loss": 3.4468, "step": 62645 }, { "epoch": 4.256692485392037, "grad_norm": 1.1637006998062134, "learning_rate": 0.0004681087783666259, "loss": 3.3492, "step": 62650 }, { "epoch": 4.257032205462699, "grad_norm": 1.2534469366073608, "learning_rate": 0.0004680663133577932, "loss": 3.5379, "step": 62655 }, { "epoch": 4.25737192553336, "grad_norm": 1.1551133394241333, "learning_rate": 0.00046802384834896044, "loss": 3.2333, "step": 62660 }, { "epoch": 4.257711645604022, "grad_norm": 1.1277135610580444, "learning_rate": 0.0004679813833401277, "loss": 3.5415, "step": 62665 }, { "epoch": 4.258051365674684, "grad_norm": 1.2074998617172241, "learning_rate": 0.00046793891833129506, "loss": 3.6474, "step": 62670 }, { "epoch": 4.2583910857453455, "grad_norm": 1.3041900396347046, "learning_rate": 0.0004678964533224623, "loss": 3.275, "step": 62675 }, { "epoch": 4.258730805816008, "grad_norm": 1.838153600692749, "learning_rate": 0.00046785398831362957, "loss": 3.0763, "step": 62680 }, { "epoch": 4.25907052588667, "grad_norm": 1.2432432174682617, "learning_rate": 0.00046781152330479685, "loss": 3.309, "step": 62685 }, { "epoch": 4.259410245957331, "grad_norm": 1.5704712867736816, "learning_rate": 0.0004677690582959641, "loss": 3.2621, "step": 62690 }, { "epoch": 4.259749966027993, "grad_norm": 1.254590392112732, "learning_rate": 0.0004677265932871314, "loss": 3.313, "step": 62695 }, { "epoch": 4.260089686098655, "grad_norm": 1.0585479736328125, "learning_rate": 0.0004676841282782987, "loss": 3.439, "step": 62700 }, { "epoch": 4.260429406169316, "grad_norm": 1.8835731744766235, "learning_rate": 0.00046764166326946597, "loss": 3.569, "step": 62705 }, { "epoch": 4.260769126239978, "grad_norm": 1.278185248374939, "learning_rate": 0.00046759919826063325, "loss": 3.5805, "step": 62710 }, { "epoch": 4.26110884631064, "grad_norm": 1.1337922811508179, "learning_rate": 0.0004675567332518005, "loss": 3.4649, "step": 62715 }, { "epoch": 4.2614485663813015, "grad_norm": 1.328836441040039, "learning_rate": 0.00046751426824296775, "loss": 3.2122, "step": 62720 }, { "epoch": 4.261788286451964, "grad_norm": 1.4270340204238892, "learning_rate": 0.0004674718032341351, "loss": 3.4578, "step": 62725 }, { "epoch": 4.262128006522626, "grad_norm": 1.517376184463501, "learning_rate": 0.00046742933822530237, "loss": 3.5991, "step": 62730 }, { "epoch": 4.262467726593287, "grad_norm": 1.3183549642562866, "learning_rate": 0.00046738687321646965, "loss": 3.4912, "step": 62735 }, { "epoch": 4.262807446663949, "grad_norm": 1.2652667760849, "learning_rate": 0.0004673444082076369, "loss": 3.3588, "step": 62740 }, { "epoch": 4.263147166734611, "grad_norm": 1.3792067766189575, "learning_rate": 0.0004673019431988042, "loss": 3.5533, "step": 62745 }, { "epoch": 4.263486886805272, "grad_norm": 1.7062686681747437, "learning_rate": 0.0004672594781899715, "loss": 3.343, "step": 62750 }, { "epoch": 4.263826606875934, "grad_norm": 1.2975469827651978, "learning_rate": 0.0004672170131811387, "loss": 3.345, "step": 62755 }, { "epoch": 4.264166326946596, "grad_norm": 1.3583589792251587, "learning_rate": 0.00046717454817230605, "loss": 3.4928, "step": 62760 }, { "epoch": 4.2645060470172576, "grad_norm": 1.358707308769226, "learning_rate": 0.0004671320831634733, "loss": 3.3606, "step": 62765 }, { "epoch": 4.26484576708792, "grad_norm": 1.1624746322631836, "learning_rate": 0.00046708961815464055, "loss": 3.3033, "step": 62770 }, { "epoch": 4.265185487158582, "grad_norm": 1.4381095170974731, "learning_rate": 0.0004670471531458079, "loss": 3.4098, "step": 62775 }, { "epoch": 4.265525207229243, "grad_norm": 1.364314317703247, "learning_rate": 0.00046700468813697517, "loss": 3.1568, "step": 62780 }, { "epoch": 4.265864927299905, "grad_norm": 1.424428105354309, "learning_rate": 0.0004669622231281424, "loss": 3.304, "step": 62785 }, { "epoch": 4.266204647370567, "grad_norm": 1.5930920839309692, "learning_rate": 0.0004669197581193097, "loss": 3.4017, "step": 62790 }, { "epoch": 4.266544367441228, "grad_norm": 1.2756677865982056, "learning_rate": 0.000466877293110477, "loss": 3.3478, "step": 62795 }, { "epoch": 4.26688408751189, "grad_norm": 1.3057950735092163, "learning_rate": 0.00046683482810164423, "loss": 3.3869, "step": 62800 }, { "epoch": 4.267223807582552, "grad_norm": 1.3443981409072876, "learning_rate": 0.0004667923630928115, "loss": 3.3518, "step": 62805 }, { "epoch": 4.267563527653214, "grad_norm": 1.5669634342193604, "learning_rate": 0.00046674989808397885, "loss": 3.2372, "step": 62810 }, { "epoch": 4.267903247723876, "grad_norm": 1.4110767841339111, "learning_rate": 0.0004667074330751461, "loss": 3.7025, "step": 62815 }, { "epoch": 4.268242967794538, "grad_norm": 1.3284602165222168, "learning_rate": 0.00046666496806631335, "loss": 2.9887, "step": 62820 }, { "epoch": 4.268582687865199, "grad_norm": 1.254332184791565, "learning_rate": 0.0004666225030574807, "loss": 3.538, "step": 62825 }, { "epoch": 4.268922407935861, "grad_norm": 1.2662129402160645, "learning_rate": 0.0004665800380486479, "loss": 3.6198, "step": 62830 }, { "epoch": 4.269262128006522, "grad_norm": 1.1402523517608643, "learning_rate": 0.0004665375730398152, "loss": 3.4414, "step": 62835 }, { "epoch": 4.269601848077184, "grad_norm": 1.1804009675979614, "learning_rate": 0.0004664951080309825, "loss": 3.423, "step": 62840 }, { "epoch": 4.269941568147846, "grad_norm": 1.383034586906433, "learning_rate": 0.00046645264302214975, "loss": 3.5897, "step": 62845 }, { "epoch": 4.2702812882185075, "grad_norm": 1.9614887237548828, "learning_rate": 0.00046641017801331703, "loss": 3.4527, "step": 62850 }, { "epoch": 4.27062100828917, "grad_norm": 1.2161967754364014, "learning_rate": 0.0004663677130044843, "loss": 3.5499, "step": 62855 }, { "epoch": 4.270960728359832, "grad_norm": 1.2732460498809814, "learning_rate": 0.0004663252479956516, "loss": 3.2972, "step": 62860 }, { "epoch": 4.271300448430493, "grad_norm": 1.234459638595581, "learning_rate": 0.0004662827829868189, "loss": 3.6532, "step": 62865 }, { "epoch": 4.271640168501155, "grad_norm": 1.1345535516738892, "learning_rate": 0.00046624031797798615, "loss": 3.3084, "step": 62870 }, { "epoch": 4.271979888571817, "grad_norm": 1.0276291370391846, "learning_rate": 0.0004661978529691534, "loss": 3.4645, "step": 62875 }, { "epoch": 4.272319608642478, "grad_norm": 1.4383373260498047, "learning_rate": 0.0004661553879603207, "loss": 3.104, "step": 62880 }, { "epoch": 4.27265932871314, "grad_norm": 1.5216503143310547, "learning_rate": 0.000466112922951488, "loss": 3.4042, "step": 62885 }, { "epoch": 4.272999048783802, "grad_norm": 1.7449954748153687, "learning_rate": 0.0004660704579426552, "loss": 3.5617, "step": 62890 }, { "epoch": 4.2733387688544635, "grad_norm": 1.490777611732483, "learning_rate": 0.00046602799293382255, "loss": 3.4106, "step": 62895 }, { "epoch": 4.273678488925126, "grad_norm": 1.1062932014465332, "learning_rate": 0.00046598552792498983, "loss": 3.1355, "step": 62900 }, { "epoch": 4.274018208995788, "grad_norm": 1.32659912109375, "learning_rate": 0.0004659430629161571, "loss": 3.3713, "step": 62905 }, { "epoch": 4.274357929066449, "grad_norm": 1.0946048498153687, "learning_rate": 0.00046590059790732434, "loss": 3.322, "step": 62910 }, { "epoch": 4.274697649137111, "grad_norm": 1.3185824155807495, "learning_rate": 0.0004658581328984917, "loss": 3.2707, "step": 62915 }, { "epoch": 4.275037369207773, "grad_norm": 0.9966199994087219, "learning_rate": 0.00046581566788965895, "loss": 3.5389, "step": 62920 }, { "epoch": 4.275377089278434, "grad_norm": 1.211066722869873, "learning_rate": 0.0004657732028808262, "loss": 3.3526, "step": 62925 }, { "epoch": 4.275716809349096, "grad_norm": 1.3469927310943604, "learning_rate": 0.0004657307378719935, "loss": 3.4676, "step": 62930 }, { "epoch": 4.276056529419758, "grad_norm": 1.2966041564941406, "learning_rate": 0.0004656882728631608, "loss": 3.2152, "step": 62935 }, { "epoch": 4.2763962494904195, "grad_norm": 1.2371505498886108, "learning_rate": 0.000465645807854328, "loss": 3.4293, "step": 62940 }, { "epoch": 4.276735969561082, "grad_norm": 1.7468621730804443, "learning_rate": 0.0004656033428454953, "loss": 3.3049, "step": 62945 }, { "epoch": 4.277075689631744, "grad_norm": 1.2237616777420044, "learning_rate": 0.00046556087783666264, "loss": 3.443, "step": 62950 }, { "epoch": 4.277415409702405, "grad_norm": 1.1973766088485718, "learning_rate": 0.00046551841282782986, "loss": 3.4296, "step": 62955 }, { "epoch": 4.277755129773067, "grad_norm": 1.3021925687789917, "learning_rate": 0.00046547594781899714, "loss": 3.325, "step": 62960 }, { "epoch": 4.278094849843729, "grad_norm": 1.2427996397018433, "learning_rate": 0.0004654334828101645, "loss": 3.5635, "step": 62965 }, { "epoch": 4.27843456991439, "grad_norm": 1.5121124982833862, "learning_rate": 0.0004653910178013317, "loss": 3.249, "step": 62970 }, { "epoch": 4.278774289985052, "grad_norm": 1.1963225603103638, "learning_rate": 0.000465348552792499, "loss": 3.4137, "step": 62975 }, { "epoch": 4.279114010055714, "grad_norm": 1.323372483253479, "learning_rate": 0.00046530608778366626, "loss": 3.5522, "step": 62980 }, { "epoch": 4.2794537301263755, "grad_norm": 1.0513854026794434, "learning_rate": 0.00046526362277483354, "loss": 3.3406, "step": 62985 }, { "epoch": 4.279793450197038, "grad_norm": 0.9803798198699951, "learning_rate": 0.0004652211577660008, "loss": 3.3322, "step": 62990 }, { "epoch": 4.2801331702677, "grad_norm": 1.1901655197143555, "learning_rate": 0.0004651786927571681, "loss": 3.0698, "step": 62995 }, { "epoch": 4.280472890338361, "grad_norm": 1.5067336559295654, "learning_rate": 0.0004651362277483354, "loss": 3.4263, "step": 63000 }, { "epoch": 4.280812610409023, "grad_norm": 1.0991334915161133, "learning_rate": 0.00046509376273950266, "loss": 3.6039, "step": 63005 }, { "epoch": 4.281152330479685, "grad_norm": NaN, "learning_rate": 0.0004650597907324365, "loss": 3.5803, "step": 63010 }, { "epoch": 4.281492050550346, "grad_norm": 1.2036000490188599, "learning_rate": 0.00046501732572360377, "loss": 3.4672, "step": 63015 }, { "epoch": 4.281831770621008, "grad_norm": 1.485710620880127, "learning_rate": 0.00046497486071477105, "loss": 3.6161, "step": 63020 }, { "epoch": 4.28217149069167, "grad_norm": 1.1794077157974243, "learning_rate": 0.0004649323957059383, "loss": 3.4687, "step": 63025 }, { "epoch": 4.2825112107623315, "grad_norm": 1.4556739330291748, "learning_rate": 0.0004648899306971056, "loss": 3.3764, "step": 63030 }, { "epoch": 4.282850930832994, "grad_norm": 1.2190824747085571, "learning_rate": 0.00046484746568827283, "loss": 3.5529, "step": 63035 }, { "epoch": 4.283190650903656, "grad_norm": 1.3667316436767578, "learning_rate": 0.00046480500067944017, "loss": 3.3488, "step": 63040 }, { "epoch": 4.283530370974317, "grad_norm": 1.3193769454956055, "learning_rate": 0.00046476253567060745, "loss": 3.4897, "step": 63045 }, { "epoch": 4.283870091044979, "grad_norm": 1.4435317516326904, "learning_rate": 0.00046472007066177467, "loss": 3.413, "step": 63050 }, { "epoch": 4.284209811115641, "grad_norm": 1.15815269947052, "learning_rate": 0.000464677605652942, "loss": 3.5532, "step": 63055 }, { "epoch": 4.284549531186302, "grad_norm": 1.2995517253875732, "learning_rate": 0.0004646351406441093, "loss": 3.5192, "step": 63060 }, { "epoch": 4.284889251256964, "grad_norm": 1.3764938116073608, "learning_rate": 0.0004645926756352765, "loss": 3.5965, "step": 63065 }, { "epoch": 4.285228971327626, "grad_norm": 1.152827262878418, "learning_rate": 0.0004645502106264438, "loss": 3.3741, "step": 63070 }, { "epoch": 4.2855686913982876, "grad_norm": 1.7899612188339233, "learning_rate": 0.0004645077456176111, "loss": 3.6323, "step": 63075 }, { "epoch": 4.28590841146895, "grad_norm": 1.4074734449386597, "learning_rate": 0.00046446528060877835, "loss": 3.5834, "step": 63080 }, { "epoch": 4.286248131539612, "grad_norm": 1.2692443132400513, "learning_rate": 0.00046442281559994563, "loss": 3.3179, "step": 63085 }, { "epoch": 4.286587851610273, "grad_norm": 1.1628663539886475, "learning_rate": 0.00046438035059111297, "loss": 3.3083, "step": 63090 }, { "epoch": 4.286927571680935, "grad_norm": 1.4604487419128418, "learning_rate": 0.0004643378855822802, "loss": 2.9114, "step": 63095 }, { "epoch": 4.287267291751597, "grad_norm": 1.4891375303268433, "learning_rate": 0.0004642954205734475, "loss": 3.4413, "step": 63100 }, { "epoch": 4.287607011822258, "grad_norm": 1.109410047531128, "learning_rate": 0.00046425295556461475, "loss": 3.2138, "step": 63105 }, { "epoch": 4.28794673189292, "grad_norm": 1.721307396888733, "learning_rate": 0.0004642104905557821, "loss": 3.4474, "step": 63110 }, { "epoch": 4.288286451963582, "grad_norm": 1.1375139951705933, "learning_rate": 0.0004641680255469493, "loss": 3.3852, "step": 63115 }, { "epoch": 4.288626172034244, "grad_norm": 1.7823206186294556, "learning_rate": 0.0004641255605381166, "loss": 3.4725, "step": 63120 }, { "epoch": 4.288965892104906, "grad_norm": 1.599578857421875, "learning_rate": 0.00046408309552928393, "loss": 3.7392, "step": 63125 }, { "epoch": 4.289305612175568, "grad_norm": 1.5431157350540161, "learning_rate": 0.00046404063052045115, "loss": 3.3455, "step": 63130 }, { "epoch": 4.289645332246229, "grad_norm": 1.1283290386199951, "learning_rate": 0.00046399816551161843, "loss": 3.4281, "step": 63135 }, { "epoch": 4.289985052316891, "grad_norm": 1.3596291542053223, "learning_rate": 0.0004639557005027857, "loss": 3.2878, "step": 63140 }, { "epoch": 4.290324772387553, "grad_norm": 1.132075309753418, "learning_rate": 0.000463913235493953, "loss": 3.4841, "step": 63145 }, { "epoch": 4.290664492458214, "grad_norm": 1.3650346994400024, "learning_rate": 0.0004638707704851203, "loss": 3.6186, "step": 63150 }, { "epoch": 4.291004212528876, "grad_norm": 1.5499463081359863, "learning_rate": 0.00046382830547628755, "loss": 3.4963, "step": 63155 }, { "epoch": 4.291343932599538, "grad_norm": 1.21547269821167, "learning_rate": 0.00046378584046745483, "loss": 3.5087, "step": 63160 }, { "epoch": 4.2916836526702, "grad_norm": 1.5163811445236206, "learning_rate": 0.0004637433754586221, "loss": 3.1248, "step": 63165 }, { "epoch": 4.292023372740862, "grad_norm": 1.5668355226516724, "learning_rate": 0.0004637009104497894, "loss": 3.497, "step": 63170 }, { "epoch": 4.292363092811524, "grad_norm": 1.178851842880249, "learning_rate": 0.0004636584454409566, "loss": 3.3985, "step": 63175 }, { "epoch": 4.292702812882185, "grad_norm": 1.3933796882629395, "learning_rate": 0.00046361598043212395, "loss": 3.3449, "step": 63180 }, { "epoch": 4.293042532952847, "grad_norm": 1.3657363653182983, "learning_rate": 0.00046357351542329123, "loss": 3.3587, "step": 63185 }, { "epoch": 4.293382253023509, "grad_norm": 1.5643951892852783, "learning_rate": 0.00046353105041445846, "loss": 3.4683, "step": 63190 }, { "epoch": 4.29372197309417, "grad_norm": 0.9634292125701904, "learning_rate": 0.0004634885854056258, "loss": 3.5882, "step": 63195 }, { "epoch": 4.294061693164832, "grad_norm": 1.5004756450653076, "learning_rate": 0.0004634461203967931, "loss": 3.1536, "step": 63200 }, { "epoch": 4.294401413235494, "grad_norm": 1.5302306413650513, "learning_rate": 0.0004634036553879603, "loss": 3.4189, "step": 63205 }, { "epoch": 4.294741133306156, "grad_norm": 1.3412508964538574, "learning_rate": 0.0004633611903791276, "loss": 3.4012, "step": 63210 }, { "epoch": 4.295080853376818, "grad_norm": 1.3403722047805786, "learning_rate": 0.0004633187253702949, "loss": 3.4116, "step": 63215 }, { "epoch": 4.29542057344748, "grad_norm": 1.393913984298706, "learning_rate": 0.00046327626036146214, "loss": 3.4966, "step": 63220 }, { "epoch": 4.295760293518141, "grad_norm": 1.4737869501113892, "learning_rate": 0.0004632337953526294, "loss": 3.3974, "step": 63225 }, { "epoch": 4.296100013588803, "grad_norm": 1.4036048650741577, "learning_rate": 0.00046319133034379676, "loss": 3.4143, "step": 63230 }, { "epoch": 4.296439733659464, "grad_norm": 1.587098240852356, "learning_rate": 0.000463148865334964, "loss": 3.1928, "step": 63235 }, { "epoch": 4.296779453730126, "grad_norm": 1.5859569311141968, "learning_rate": 0.00046310640032613126, "loss": 3.3018, "step": 63240 }, { "epoch": 4.297119173800788, "grad_norm": 1.0732978582382202, "learning_rate": 0.00046306393531729854, "loss": 3.474, "step": 63245 }, { "epoch": 4.2974588938714495, "grad_norm": 1.0463718175888062, "learning_rate": 0.0004630214703084658, "loss": 3.5692, "step": 63250 }, { "epoch": 4.297798613942112, "grad_norm": 1.3691983222961426, "learning_rate": 0.0004629790052996331, "loss": 3.4642, "step": 63255 }, { "epoch": 4.298138334012774, "grad_norm": 1.3291528224945068, "learning_rate": 0.0004629365402908004, "loss": 3.3526, "step": 63260 }, { "epoch": 4.298478054083435, "grad_norm": 1.4298009872436523, "learning_rate": 0.00046289407528196766, "loss": 3.6693, "step": 63265 }, { "epoch": 4.298817774154097, "grad_norm": 1.307428002357483, "learning_rate": 0.00046285161027313494, "loss": 3.1688, "step": 63270 }, { "epoch": 4.299157494224759, "grad_norm": 1.3523882627487183, "learning_rate": 0.0004628091452643022, "loss": 3.5951, "step": 63275 }, { "epoch": 4.29949721429542, "grad_norm": 1.1524620056152344, "learning_rate": 0.00046276668025546956, "loss": 3.4976, "step": 63280 }, { "epoch": 4.299836934366082, "grad_norm": 1.3550256490707397, "learning_rate": 0.0004627242152466368, "loss": 3.3866, "step": 63285 }, { "epoch": 4.300176654436744, "grad_norm": 1.2754970788955688, "learning_rate": 0.00046268175023780406, "loss": 3.5024, "step": 63290 }, { "epoch": 4.3005163745074055, "grad_norm": 1.3502808809280396, "learning_rate": 0.00046263928522897134, "loss": 3.5418, "step": 63295 }, { "epoch": 4.300856094578068, "grad_norm": 1.574785828590393, "learning_rate": 0.0004625968202201386, "loss": 3.2892, "step": 63300 }, { "epoch": 4.30119581464873, "grad_norm": 1.2126362323760986, "learning_rate": 0.0004625543552113059, "loss": 3.5488, "step": 63305 }, { "epoch": 4.301535534719391, "grad_norm": 1.7388122081756592, "learning_rate": 0.0004625118902024732, "loss": 3.1248, "step": 63310 }, { "epoch": 4.301875254790053, "grad_norm": 1.3620100021362305, "learning_rate": 0.00046246942519364046, "loss": 3.3951, "step": 63315 }, { "epoch": 4.302214974860715, "grad_norm": 1.2572104930877686, "learning_rate": 0.00046242696018480774, "loss": 3.6028, "step": 63320 }, { "epoch": 4.302554694931376, "grad_norm": 1.0714646577835083, "learning_rate": 0.000462384495175975, "loss": 3.306, "step": 63325 }, { "epoch": 4.302894415002038, "grad_norm": 1.1261545419692993, "learning_rate": 0.00046234203016714225, "loss": 3.3908, "step": 63330 }, { "epoch": 4.3032341350727, "grad_norm": 1.4868459701538086, "learning_rate": 0.0004622995651583096, "loss": 3.2952, "step": 63335 }, { "epoch": 4.3035738551433615, "grad_norm": 1.7908024787902832, "learning_rate": 0.00046225710014947686, "loss": 3.241, "step": 63340 }, { "epoch": 4.303913575214024, "grad_norm": 1.312086820602417, "learning_rate": 0.0004622146351406441, "loss": 3.0486, "step": 63345 }, { "epoch": 4.304253295284686, "grad_norm": 1.2014628648757935, "learning_rate": 0.0004621721701318114, "loss": 3.4292, "step": 63350 }, { "epoch": 4.304593015355347, "grad_norm": 4.160722255706787, "learning_rate": 0.0004621297051229787, "loss": 3.1752, "step": 63355 }, { "epoch": 4.304932735426009, "grad_norm": 0.9342489838600159, "learning_rate": 0.00046208724011414593, "loss": 3.7316, "step": 63360 }, { "epoch": 4.305272455496671, "grad_norm": 1.5888255834579468, "learning_rate": 0.0004620447751053132, "loss": 3.5479, "step": 63365 }, { "epoch": 4.305612175567332, "grad_norm": 1.4398101568222046, "learning_rate": 0.00046200231009648054, "loss": 3.4381, "step": 63370 }, { "epoch": 4.305951895637994, "grad_norm": 1.3756790161132812, "learning_rate": 0.00046195984508764777, "loss": 3.4762, "step": 63375 }, { "epoch": 4.306291615708656, "grad_norm": 1.3620630502700806, "learning_rate": 0.00046191738007881505, "loss": 3.1369, "step": 63380 }, { "epoch": 4.306631335779318, "grad_norm": 1.2598527669906616, "learning_rate": 0.0004618749150699824, "loss": 3.257, "step": 63385 }, { "epoch": 4.30697105584998, "grad_norm": 1.405390977859497, "learning_rate": 0.0004618324500611496, "loss": 3.3792, "step": 63390 }, { "epoch": 4.307310775920642, "grad_norm": 1.2563350200653076, "learning_rate": 0.0004617899850523169, "loss": 3.4149, "step": 63395 }, { "epoch": 4.307650495991303, "grad_norm": 1.180651307106018, "learning_rate": 0.00046174752004348417, "loss": 3.5656, "step": 63400 }, { "epoch": 4.307990216061965, "grad_norm": 1.2731667757034302, "learning_rate": 0.00046170505503465145, "loss": 3.207, "step": 63405 }, { "epoch": 4.308329936132627, "grad_norm": 1.161744236946106, "learning_rate": 0.00046166259002581873, "loss": 3.404, "step": 63410 }, { "epoch": 4.308669656203288, "grad_norm": 1.1306183338165283, "learning_rate": 0.000461620125016986, "loss": 3.5175, "step": 63415 }, { "epoch": 4.30900937627395, "grad_norm": 1.3364665508270264, "learning_rate": 0.0004615776600081533, "loss": 3.4686, "step": 63420 }, { "epoch": 4.309349096344612, "grad_norm": 1.0978672504425049, "learning_rate": 0.00046153519499932057, "loss": 3.5132, "step": 63425 }, { "epoch": 4.309688816415274, "grad_norm": 1.2370350360870361, "learning_rate": 0.00046149272999048785, "loss": 3.5204, "step": 63430 }, { "epoch": 4.310028536485936, "grad_norm": 1.0641427040100098, "learning_rate": 0.0004614502649816551, "loss": 3.4917, "step": 63435 }, { "epoch": 4.310368256556598, "grad_norm": 1.3029541969299316, "learning_rate": 0.0004614077999728224, "loss": 3.4051, "step": 63440 }, { "epoch": 4.310707976627259, "grad_norm": 1.292567253112793, "learning_rate": 0.0004613653349639897, "loss": 3.1948, "step": 63445 }, { "epoch": 4.311047696697921, "grad_norm": 1.2960752248764038, "learning_rate": 0.00046132286995515697, "loss": 3.5143, "step": 63450 }, { "epoch": 4.311387416768583, "grad_norm": 1.1724880933761597, "learning_rate": 0.00046128040494632425, "loss": 3.6897, "step": 63455 }, { "epoch": 4.311727136839244, "grad_norm": 1.6634289026260376, "learning_rate": 0.00046123793993749153, "loss": 3.4978, "step": 63460 }, { "epoch": 4.312066856909906, "grad_norm": 1.3170921802520752, "learning_rate": 0.0004611954749286588, "loss": 3.2764, "step": 63465 }, { "epoch": 4.312406576980568, "grad_norm": 1.3798694610595703, "learning_rate": 0.00046115300991982604, "loss": 3.4371, "step": 63470 }, { "epoch": 4.31274629705123, "grad_norm": 1.5344914197921753, "learning_rate": 0.00046111054491099337, "loss": 3.499, "step": 63475 }, { "epoch": 4.313086017121892, "grad_norm": 1.4317978620529175, "learning_rate": 0.00046106807990216065, "loss": 3.469, "step": 63480 }, { "epoch": 4.313425737192554, "grad_norm": 1.3035799264907837, "learning_rate": 0.0004610256148933279, "loss": 3.325, "step": 63485 }, { "epoch": 4.313765457263215, "grad_norm": 1.3360426425933838, "learning_rate": 0.0004609831498844952, "loss": 3.4451, "step": 63490 }, { "epoch": 4.314105177333877, "grad_norm": 1.2862215042114258, "learning_rate": 0.0004609406848756625, "loss": 3.2692, "step": 63495 }, { "epoch": 4.314444897404539, "grad_norm": 1.0657470226287842, "learning_rate": 0.0004608982198668297, "loss": 3.6263, "step": 63500 }, { "epoch": 4.3147846174752, "grad_norm": 1.2840368747711182, "learning_rate": 0.000460855754857997, "loss": 3.4963, "step": 63505 }, { "epoch": 4.315124337545862, "grad_norm": 1.3302656412124634, "learning_rate": 0.00046081328984916433, "loss": 3.5065, "step": 63510 }, { "epoch": 4.3154640576165235, "grad_norm": 1.3758337497711182, "learning_rate": 0.00046077082484033156, "loss": 3.633, "step": 63515 }, { "epoch": 4.315803777687186, "grad_norm": 1.4231865406036377, "learning_rate": 0.00046072835983149884, "loss": 3.2968, "step": 63520 }, { "epoch": 4.316143497757848, "grad_norm": 1.0393003225326538, "learning_rate": 0.00046068589482266617, "loss": 3.4048, "step": 63525 }, { "epoch": 4.316483217828509, "grad_norm": 1.3695831298828125, "learning_rate": 0.0004606434298138334, "loss": 3.1624, "step": 63530 }, { "epoch": 4.316822937899171, "grad_norm": 1.620760440826416, "learning_rate": 0.0004606009648050007, "loss": 3.4836, "step": 63535 }, { "epoch": 4.317162657969833, "grad_norm": 1.1874439716339111, "learning_rate": 0.000460558499796168, "loss": 3.3762, "step": 63540 }, { "epoch": 4.317502378040494, "grad_norm": 1.2800967693328857, "learning_rate": 0.00046051603478733524, "loss": 3.4195, "step": 63545 }, { "epoch": 4.317842098111156, "grad_norm": 1.0873597860336304, "learning_rate": 0.0004604735697785025, "loss": 3.3513, "step": 63550 }, { "epoch": 4.318181818181818, "grad_norm": 1.2821474075317383, "learning_rate": 0.0004604311047696698, "loss": 3.5673, "step": 63555 }, { "epoch": 4.3185215382524795, "grad_norm": 1.3070149421691895, "learning_rate": 0.0004603886397608371, "loss": 3.5198, "step": 63560 }, { "epoch": 4.318861258323142, "grad_norm": 1.3244353532791138, "learning_rate": 0.00046034617475200436, "loss": 3.4029, "step": 63565 }, { "epoch": 4.319200978393804, "grad_norm": 1.27135169506073, "learning_rate": 0.00046030370974317164, "loss": 3.4092, "step": 63570 }, { "epoch": 4.319540698464465, "grad_norm": 1.1785807609558105, "learning_rate": 0.0004602612447343389, "loss": 3.2278, "step": 63575 }, { "epoch": 4.319880418535127, "grad_norm": 1.4408326148986816, "learning_rate": 0.0004602187797255062, "loss": 3.5492, "step": 63580 }, { "epoch": 4.320220138605789, "grad_norm": 1.111560583114624, "learning_rate": 0.0004601763147166735, "loss": 3.5384, "step": 63585 }, { "epoch": 4.32055985867645, "grad_norm": 1.3733376264572144, "learning_rate": 0.0004601338497078407, "loss": 3.2695, "step": 63590 }, { "epoch": 4.320899578747112, "grad_norm": 1.191686749458313, "learning_rate": 0.00046009138469900804, "loss": 3.2061, "step": 63595 }, { "epoch": 4.321239298817774, "grad_norm": 1.2554473876953125, "learning_rate": 0.0004600489196901753, "loss": 3.4276, "step": 63600 }, { "epoch": 4.3215790188884355, "grad_norm": 1.3840694427490234, "learning_rate": 0.00046000645468134254, "loss": 3.4785, "step": 63605 }, { "epoch": 4.321918738959098, "grad_norm": 1.526422142982483, "learning_rate": 0.0004599639896725099, "loss": 3.3143, "step": 63610 }, { "epoch": 4.32225845902976, "grad_norm": 1.1810191869735718, "learning_rate": 0.00045992152466367716, "loss": 3.3688, "step": 63615 }, { "epoch": 4.322598179100421, "grad_norm": 1.1663683652877808, "learning_rate": 0.00045987905965484444, "loss": 3.3886, "step": 63620 }, { "epoch": 4.322937899171083, "grad_norm": 1.1709576845169067, "learning_rate": 0.00045983659464601166, "loss": 3.327, "step": 63625 }, { "epoch": 4.323277619241745, "grad_norm": 1.2971194982528687, "learning_rate": 0.000459794129637179, "loss": 3.5361, "step": 63630 }, { "epoch": 4.323617339312406, "grad_norm": 1.4131428003311157, "learning_rate": 0.0004597516646283463, "loss": 3.1172, "step": 63635 }, { "epoch": 4.323957059383068, "grad_norm": 1.0279704332351685, "learning_rate": 0.0004597091996195135, "loss": 3.4614, "step": 63640 }, { "epoch": 4.32429677945373, "grad_norm": 1.3219482898712158, "learning_rate": 0.00045966673461068084, "loss": 3.4696, "step": 63645 }, { "epoch": 4.3246364995243916, "grad_norm": 1.1444714069366455, "learning_rate": 0.0004596242696018481, "loss": 3.3991, "step": 63650 }, { "epoch": 4.324976219595054, "grad_norm": 1.3215612173080444, "learning_rate": 0.00045958180459301534, "loss": 3.3225, "step": 63655 }, { "epoch": 4.325315939665716, "grad_norm": 1.3266704082489014, "learning_rate": 0.0004595393395841826, "loss": 3.2226, "step": 63660 }, { "epoch": 4.325655659736377, "grad_norm": 1.468458890914917, "learning_rate": 0.00045949687457534996, "loss": 3.6887, "step": 63665 }, { "epoch": 4.325995379807039, "grad_norm": 1.1829838752746582, "learning_rate": 0.0004594544095665172, "loss": 3.4088, "step": 63670 }, { "epoch": 4.326335099877701, "grad_norm": 1.072967529296875, "learning_rate": 0.00045941194455768447, "loss": 3.44, "step": 63675 }, { "epoch": 4.326674819948362, "grad_norm": 1.3880598545074463, "learning_rate": 0.0004593694795488518, "loss": 3.6878, "step": 63680 }, { "epoch": 4.327014540019024, "grad_norm": 1.0746303796768188, "learning_rate": 0.000459327014540019, "loss": 3.4257, "step": 63685 }, { "epoch": 4.327354260089686, "grad_norm": 1.1338608264923096, "learning_rate": 0.0004592845495311863, "loss": 3.5192, "step": 63690 }, { "epoch": 4.327693980160348, "grad_norm": 1.2600938081741333, "learning_rate": 0.0004592420845223536, "loss": 3.4723, "step": 63695 }, { "epoch": 4.32803370023101, "grad_norm": 1.2213491201400757, "learning_rate": 0.00045919961951352087, "loss": 3.2007, "step": 63700 }, { "epoch": 4.328373420301672, "grad_norm": 1.2344661951065063, "learning_rate": 0.00045915715450468815, "loss": 3.3449, "step": 63705 }, { "epoch": 4.328713140372333, "grad_norm": 1.1075787544250488, "learning_rate": 0.0004591146894958554, "loss": 3.4057, "step": 63710 }, { "epoch": 4.329052860442995, "grad_norm": 1.149979591369629, "learning_rate": 0.0004590722244870227, "loss": 3.0884, "step": 63715 }, { "epoch": 4.329392580513657, "grad_norm": 1.0782521963119507, "learning_rate": 0.00045902975947819, "loss": 3.3962, "step": 63720 }, { "epoch": 4.329732300584318, "grad_norm": 1.146954894065857, "learning_rate": 0.00045898729446935727, "loss": 3.1246, "step": 63725 }, { "epoch": 4.33007202065498, "grad_norm": 1.1545759439468384, "learning_rate": 0.0004589448294605245, "loss": 3.2386, "step": 63730 }, { "epoch": 4.330411740725642, "grad_norm": 1.525982141494751, "learning_rate": 0.0004589023644516918, "loss": 3.0275, "step": 63735 }, { "epoch": 4.330751460796304, "grad_norm": 1.183767557144165, "learning_rate": 0.0004588598994428591, "loss": 3.2861, "step": 63740 }, { "epoch": 4.331091180866966, "grad_norm": 1.6642348766326904, "learning_rate": 0.00045881743443402633, "loss": 3.0507, "step": 63745 }, { "epoch": 4.331430900937628, "grad_norm": 1.2137526273727417, "learning_rate": 0.00045877496942519367, "loss": 3.5584, "step": 63750 }, { "epoch": 4.331770621008289, "grad_norm": 1.4041911363601685, "learning_rate": 0.00045873250441636095, "loss": 3.2207, "step": 63755 }, { "epoch": 4.332110341078951, "grad_norm": 1.23543381690979, "learning_rate": 0.00045869003940752817, "loss": 3.2651, "step": 63760 }, { "epoch": 4.332450061149613, "grad_norm": 1.3730084896087646, "learning_rate": 0.00045864757439869545, "loss": 3.1873, "step": 63765 }, { "epoch": 4.332789781220274, "grad_norm": 0.9998545050621033, "learning_rate": 0.0004586051093898628, "loss": 3.3412, "step": 63770 }, { "epoch": 4.333129501290936, "grad_norm": 1.7711031436920166, "learning_rate": 0.00045856264438103, "loss": 3.3521, "step": 63775 }, { "epoch": 4.333469221361598, "grad_norm": 1.1917164325714111, "learning_rate": 0.0004585201793721973, "loss": 3.1531, "step": 63780 }, { "epoch": 4.33380894143226, "grad_norm": 1.2262145280838013, "learning_rate": 0.0004584777143633646, "loss": 3.8422, "step": 63785 }, { "epoch": 4.334148661502922, "grad_norm": 1.0253173112869263, "learning_rate": 0.0004584352493545319, "loss": 3.2944, "step": 63790 }, { "epoch": 4.334488381573584, "grad_norm": 1.5113691091537476, "learning_rate": 0.00045839278434569913, "loss": 3.3053, "step": 63795 }, { "epoch": 4.334828101644245, "grad_norm": 1.4820194244384766, "learning_rate": 0.0004583503193368664, "loss": 3.0725, "step": 63800 }, { "epoch": 4.335167821714907, "grad_norm": 1.3057631254196167, "learning_rate": 0.00045830785432803375, "loss": 3.4731, "step": 63805 }, { "epoch": 4.335507541785569, "grad_norm": 1.571965217590332, "learning_rate": 0.000458265389319201, "loss": 3.0762, "step": 63810 }, { "epoch": 4.33584726185623, "grad_norm": 1.2855002880096436, "learning_rate": 0.00045822292431036825, "loss": 3.6508, "step": 63815 }, { "epoch": 4.336186981926892, "grad_norm": 1.4002394676208496, "learning_rate": 0.0004581804593015356, "loss": 3.371, "step": 63820 }, { "epoch": 4.336526701997554, "grad_norm": 1.2932125329971313, "learning_rate": 0.0004581379942927028, "loss": 3.7407, "step": 63825 }, { "epoch": 4.336866422068216, "grad_norm": 1.3269288539886475, "learning_rate": 0.0004580955292838701, "loss": 3.5594, "step": 63830 }, { "epoch": 4.337206142138878, "grad_norm": 1.3436599969863892, "learning_rate": 0.00045805306427503743, "loss": 3.2985, "step": 63835 }, { "epoch": 4.33754586220954, "grad_norm": 1.134859323501587, "learning_rate": 0.00045801059926620465, "loss": 3.3822, "step": 63840 }, { "epoch": 4.337885582280201, "grad_norm": 1.2749603986740112, "learning_rate": 0.00045796813425737193, "loss": 3.4347, "step": 63845 }, { "epoch": 4.338225302350863, "grad_norm": 1.3001760244369507, "learning_rate": 0.0004579256692485392, "loss": 2.9329, "step": 63850 }, { "epoch": 4.338565022421525, "grad_norm": 0.9457477927207947, "learning_rate": 0.0004578832042397065, "loss": 3.3462, "step": 63855 }, { "epoch": 4.338904742492186, "grad_norm": 1.2139887809753418, "learning_rate": 0.0004578407392308738, "loss": 3.3889, "step": 63860 }, { "epoch": 4.339244462562848, "grad_norm": 1.7245569229125977, "learning_rate": 0.00045779827422204105, "loss": 3.6427, "step": 63865 }, { "epoch": 4.33958418263351, "grad_norm": 1.45416259765625, "learning_rate": 0.00045775580921320833, "loss": 3.4984, "step": 63870 }, { "epoch": 4.339923902704172, "grad_norm": 1.3321548700332642, "learning_rate": 0.0004577133442043756, "loss": 3.4605, "step": 63875 }, { "epoch": 4.340263622774834, "grad_norm": 1.1927133798599243, "learning_rate": 0.0004576708791955429, "loss": 3.6093, "step": 63880 }, { "epoch": 4.340603342845496, "grad_norm": 1.3580251932144165, "learning_rate": 0.0004576284141867101, "loss": 3.5891, "step": 63885 }, { "epoch": 4.340943062916157, "grad_norm": 1.7003302574157715, "learning_rate": 0.00045758594917787745, "loss": 3.4154, "step": 63890 }, { "epoch": 4.341282782986819, "grad_norm": 0.9843859672546387, "learning_rate": 0.00045754348416904473, "loss": 3.3401, "step": 63895 }, { "epoch": 4.341622503057481, "grad_norm": 1.3408585786819458, "learning_rate": 0.00045750101916021196, "loss": 3.281, "step": 63900 }, { "epoch": 4.341962223128142, "grad_norm": 1.1890534162521362, "learning_rate": 0.0004574585541513793, "loss": 3.2132, "step": 63905 }, { "epoch": 4.342301943198804, "grad_norm": 1.2871164083480835, "learning_rate": 0.0004574160891425466, "loss": 3.279, "step": 63910 }, { "epoch": 4.3426416632694655, "grad_norm": 1.2853131294250488, "learning_rate": 0.0004573736241337138, "loss": 3.4137, "step": 63915 }, { "epoch": 4.342981383340128, "grad_norm": 1.2402336597442627, "learning_rate": 0.0004573311591248811, "loss": 3.4175, "step": 63920 }, { "epoch": 4.34332110341079, "grad_norm": 1.1396288871765137, "learning_rate": 0.0004572886941160484, "loss": 3.6661, "step": 63925 }, { "epoch": 4.343660823481451, "grad_norm": 1.1958917379379272, "learning_rate": 0.00045724622910721564, "loss": 3.6197, "step": 63930 }, { "epoch": 4.344000543552113, "grad_norm": 1.2349541187286377, "learning_rate": 0.0004572037640983829, "loss": 3.5895, "step": 63935 }, { "epoch": 4.344340263622775, "grad_norm": 1.361986756324768, "learning_rate": 0.00045716129908955025, "loss": 3.3786, "step": 63940 }, { "epoch": 4.344679983693436, "grad_norm": 1.5317833423614502, "learning_rate": 0.0004571188340807175, "loss": 3.2561, "step": 63945 }, { "epoch": 4.345019703764098, "grad_norm": 1.744939923286438, "learning_rate": 0.00045707636907188476, "loss": 3.3956, "step": 63950 }, { "epoch": 4.34535942383476, "grad_norm": 1.188004493713379, "learning_rate": 0.00045703390406305204, "loss": 3.2421, "step": 63955 }, { "epoch": 4.345699143905422, "grad_norm": 1.2710442543029785, "learning_rate": 0.0004569914390542194, "loss": 3.2857, "step": 63960 }, { "epoch": 4.346038863976084, "grad_norm": 1.7691570520401, "learning_rate": 0.0004569489740453866, "loss": 3.3017, "step": 63965 }, { "epoch": 4.346378584046746, "grad_norm": 1.3206063508987427, "learning_rate": 0.0004569065090365539, "loss": 3.0038, "step": 63970 }, { "epoch": 4.346718304117407, "grad_norm": 1.3135253190994263, "learning_rate": 0.0004568640440277212, "loss": 3.5775, "step": 63975 }, { "epoch": 4.347058024188069, "grad_norm": 1.3866416215896606, "learning_rate": 0.00045682157901888844, "loss": 3.2498, "step": 63980 }, { "epoch": 4.347397744258731, "grad_norm": 1.09800124168396, "learning_rate": 0.0004567791140100557, "loss": 3.4769, "step": 63985 }, { "epoch": 4.347737464329392, "grad_norm": 1.4942185878753662, "learning_rate": 0.000456736649001223, "loss": 3.4589, "step": 63990 }, { "epoch": 4.348077184400054, "grad_norm": 1.4462953805923462, "learning_rate": 0.0004566941839923903, "loss": 3.5231, "step": 63995 }, { "epoch": 4.348416904470716, "grad_norm": 1.2463899850845337, "learning_rate": 0.00045665171898355756, "loss": 3.1964, "step": 64000 }, { "epoch": 4.348756624541378, "grad_norm": 1.533568263053894, "learning_rate": 0.00045660925397472484, "loss": 3.3263, "step": 64005 }, { "epoch": 4.34909634461204, "grad_norm": 1.5768295526504517, "learning_rate": 0.0004565667889658921, "loss": 3.5577, "step": 64010 }, { "epoch": 4.349436064682702, "grad_norm": 1.5218302011489868, "learning_rate": 0.0004565243239570594, "loss": 3.1909, "step": 64015 }, { "epoch": 4.349775784753363, "grad_norm": 1.223602294921875, "learning_rate": 0.0004564818589482267, "loss": 3.554, "step": 64020 }, { "epoch": 4.350115504824025, "grad_norm": 1.537448525428772, "learning_rate": 0.0004564393939393939, "loss": 3.2449, "step": 64025 }, { "epoch": 4.350455224894687, "grad_norm": 1.340606689453125, "learning_rate": 0.00045639692893056124, "loss": 3.645, "step": 64030 }, { "epoch": 4.350794944965348, "grad_norm": 1.280060052871704, "learning_rate": 0.0004563544639217285, "loss": 3.1679, "step": 64035 }, { "epoch": 4.35113466503601, "grad_norm": 1.2926832437515259, "learning_rate": 0.00045631199891289575, "loss": 3.6009, "step": 64040 }, { "epoch": 4.351474385106672, "grad_norm": 1.0413792133331299, "learning_rate": 0.0004562695339040631, "loss": 3.3755, "step": 64045 }, { "epoch": 4.351814105177334, "grad_norm": 1.0906150341033936, "learning_rate": 0.00045622706889523036, "loss": 3.0922, "step": 64050 }, { "epoch": 4.352153825247996, "grad_norm": 1.149782657623291, "learning_rate": 0.0004561846038863976, "loss": 3.1939, "step": 64055 }, { "epoch": 4.352493545318658, "grad_norm": 1.4710172414779663, "learning_rate": 0.00045614213887756487, "loss": 3.3768, "step": 64060 }, { "epoch": 4.352833265389319, "grad_norm": 1.227387547492981, "learning_rate": 0.0004560996738687322, "loss": 3.6489, "step": 64065 }, { "epoch": 4.353172985459981, "grad_norm": 1.2158452272415161, "learning_rate": 0.00045605720885989943, "loss": 3.4036, "step": 64070 }, { "epoch": 4.353512705530643, "grad_norm": 1.3211612701416016, "learning_rate": 0.0004560147438510667, "loss": 3.568, "step": 64075 }, { "epoch": 4.353852425601304, "grad_norm": 1.1435081958770752, "learning_rate": 0.00045597227884223404, "loss": 3.4192, "step": 64080 }, { "epoch": 4.354192145671966, "grad_norm": 1.6544950008392334, "learning_rate": 0.00045592981383340127, "loss": 3.2582, "step": 64085 }, { "epoch": 4.354531865742628, "grad_norm": 1.2195384502410889, "learning_rate": 0.00045588734882456855, "loss": 3.2743, "step": 64090 }, { "epoch": 4.35487158581329, "grad_norm": 1.1012030839920044, "learning_rate": 0.00045584488381573583, "loss": 3.5105, "step": 64095 }, { "epoch": 4.355211305883952, "grad_norm": 1.1098185777664185, "learning_rate": 0.0004558024188069031, "loss": 3.2837, "step": 64100 }, { "epoch": 4.355551025954614, "grad_norm": 1.2665977478027344, "learning_rate": 0.0004557599537980704, "loss": 3.4428, "step": 64105 }, { "epoch": 4.355890746025275, "grad_norm": 1.2061131000518799, "learning_rate": 0.00045571748878923767, "loss": 3.4271, "step": 64110 }, { "epoch": 4.356230466095937, "grad_norm": 1.2660436630249023, "learning_rate": 0.00045567502378040495, "loss": 3.2026, "step": 64115 }, { "epoch": 4.356570186166599, "grad_norm": 1.2204899787902832, "learning_rate": 0.00045563255877157223, "loss": 3.6735, "step": 64120 }, { "epoch": 4.35690990623726, "grad_norm": 1.2471054792404175, "learning_rate": 0.0004555900937627395, "loss": 3.4318, "step": 64125 }, { "epoch": 4.357249626307922, "grad_norm": 1.2171412706375122, "learning_rate": 0.00045554762875390684, "loss": 3.3283, "step": 64130 }, { "epoch": 4.357589346378584, "grad_norm": 1.146413803100586, "learning_rate": 0.00045550516374507407, "loss": 3.3248, "step": 64135 }, { "epoch": 4.357929066449246, "grad_norm": 1.0951852798461914, "learning_rate": 0.00045546269873624135, "loss": 3.352, "step": 64140 }, { "epoch": 4.358268786519908, "grad_norm": 1.1986087560653687, "learning_rate": 0.00045542023372740863, "loss": 3.5463, "step": 64145 }, { "epoch": 4.35860850659057, "grad_norm": 1.1434698104858398, "learning_rate": 0.0004553777687185759, "loss": 3.1642, "step": 64150 }, { "epoch": 4.358948226661231, "grad_norm": 1.154373049736023, "learning_rate": 0.0004553353037097432, "loss": 3.5619, "step": 64155 }, { "epoch": 4.359287946731893, "grad_norm": 1.8378145694732666, "learning_rate": 0.00045529283870091047, "loss": 3.4129, "step": 64160 }, { "epoch": 4.359627666802555, "grad_norm": 1.3307522535324097, "learning_rate": 0.00045525037369207775, "loss": 3.2799, "step": 64165 }, { "epoch": 4.359967386873216, "grad_norm": 1.5460044145584106, "learning_rate": 0.00045520790868324503, "loss": 3.3195, "step": 64170 }, { "epoch": 4.360307106943878, "grad_norm": 1.2039064168930054, "learning_rate": 0.0004551654436744123, "loss": 3.4214, "step": 64175 }, { "epoch": 4.36064682701454, "grad_norm": 1.3189266920089722, "learning_rate": 0.00045512297866557954, "loss": 3.2918, "step": 64180 }, { "epoch": 4.360986547085202, "grad_norm": 1.254732608795166, "learning_rate": 0.00045508051365674687, "loss": 3.6534, "step": 64185 }, { "epoch": 4.361326267155864, "grad_norm": 1.1969455480575562, "learning_rate": 0.00045503804864791415, "loss": 3.3938, "step": 64190 }, { "epoch": 4.361665987226525, "grad_norm": 1.360477089881897, "learning_rate": 0.0004549955836390814, "loss": 3.4594, "step": 64195 }, { "epoch": 4.362005707297187, "grad_norm": 1.5614341497421265, "learning_rate": 0.0004549531186302487, "loss": 3.2285, "step": 64200 }, { "epoch": 4.362345427367849, "grad_norm": 1.5152113437652588, "learning_rate": 0.000454910653621416, "loss": 3.498, "step": 64205 }, { "epoch": 4.36268514743851, "grad_norm": 1.7999820709228516, "learning_rate": 0.0004548681886125832, "loss": 3.2543, "step": 64210 }, { "epoch": 4.363024867509172, "grad_norm": 1.5500895977020264, "learning_rate": 0.0004548257236037505, "loss": 3.5233, "step": 64215 }, { "epoch": 4.363364587579834, "grad_norm": 1.0455741882324219, "learning_rate": 0.00045478325859491783, "loss": 3.5014, "step": 64220 }, { "epoch": 4.3637043076504956, "grad_norm": 1.2554012537002563, "learning_rate": 0.00045474079358608506, "loss": 3.4133, "step": 64225 }, { "epoch": 4.364044027721158, "grad_norm": 1.2119853496551514, "learning_rate": 0.00045469832857725234, "loss": 3.4519, "step": 64230 }, { "epoch": 4.36438374779182, "grad_norm": 1.5312557220458984, "learning_rate": 0.00045465586356841967, "loss": 3.3336, "step": 64235 }, { "epoch": 4.364723467862481, "grad_norm": 1.1753767728805542, "learning_rate": 0.0004546133985595869, "loss": 3.4644, "step": 64240 }, { "epoch": 4.365063187933143, "grad_norm": 1.2299270629882812, "learning_rate": 0.0004545709335507542, "loss": 3.587, "step": 64245 }, { "epoch": 4.365402908003805, "grad_norm": 1.689468502998352, "learning_rate": 0.00045452846854192146, "loss": 3.3046, "step": 64250 }, { "epoch": 4.365742628074466, "grad_norm": 1.3188291788101196, "learning_rate": 0.00045448600353308874, "loss": 3.1993, "step": 64255 }, { "epoch": 4.366082348145128, "grad_norm": 1.3819942474365234, "learning_rate": 0.000454443538524256, "loss": 3.3188, "step": 64260 }, { "epoch": 4.36642206821579, "grad_norm": 1.1309895515441895, "learning_rate": 0.0004544010735154233, "loss": 3.0379, "step": 64265 }, { "epoch": 4.366761788286452, "grad_norm": 1.0935214757919312, "learning_rate": 0.0004543586085065906, "loss": 3.1745, "step": 64270 }, { "epoch": 4.367101508357114, "grad_norm": 1.144679069519043, "learning_rate": 0.00045431614349775786, "loss": 3.5766, "step": 64275 }, { "epoch": 4.367441228427776, "grad_norm": 1.397840976715088, "learning_rate": 0.00045427367848892514, "loss": 3.3584, "step": 64280 }, { "epoch": 4.367780948498437, "grad_norm": 1.2018007040023804, "learning_rate": 0.00045423121348009236, "loss": 3.3766, "step": 64285 }, { "epoch": 4.368120668569099, "grad_norm": 1.8521246910095215, "learning_rate": 0.0004541887484712597, "loss": 3.3322, "step": 64290 }, { "epoch": 4.368460388639761, "grad_norm": 1.228774070739746, "learning_rate": 0.000454146283462427, "loss": 3.4398, "step": 64295 }, { "epoch": 4.368800108710422, "grad_norm": 1.2993155717849731, "learning_rate": 0.00045410381845359426, "loss": 3.5725, "step": 64300 }, { "epoch": 4.369139828781084, "grad_norm": 1.272113561630249, "learning_rate": 0.00045406135344476154, "loss": 3.7686, "step": 64305 }, { "epoch": 4.369479548851746, "grad_norm": 1.3187203407287598, "learning_rate": 0.0004540188884359288, "loss": 3.3553, "step": 64310 }, { "epoch": 4.369819268922408, "grad_norm": 1.0988737344741821, "learning_rate": 0.0004539764234270961, "loss": 3.1408, "step": 64315 }, { "epoch": 4.37015898899307, "grad_norm": 1.3957562446594238, "learning_rate": 0.0004539339584182633, "loss": 3.6302, "step": 64320 }, { "epoch": 4.370498709063732, "grad_norm": 1.1111254692077637, "learning_rate": 0.00045389149340943066, "loss": 3.2521, "step": 64325 }, { "epoch": 4.370838429134393, "grad_norm": 0.9597349166870117, "learning_rate": 0.00045384902840059794, "loss": 3.7221, "step": 64330 }, { "epoch": 4.371178149205055, "grad_norm": 1.7701176404953003, "learning_rate": 0.00045380656339176516, "loss": 3.521, "step": 64335 }, { "epoch": 4.371517869275717, "grad_norm": 1.33388352394104, "learning_rate": 0.0004537640983829325, "loss": 3.0721, "step": 64340 }, { "epoch": 4.371857589346378, "grad_norm": 1.108649492263794, "learning_rate": 0.0004537216333740998, "loss": 3.861, "step": 64345 }, { "epoch": 4.37219730941704, "grad_norm": 1.2270365953445435, "learning_rate": 0.000453679168365267, "loss": 3.2552, "step": 64350 }, { "epoch": 4.372537029487702, "grad_norm": 1.2935701608657837, "learning_rate": 0.0004536367033564343, "loss": 3.5882, "step": 64355 }, { "epoch": 4.372876749558364, "grad_norm": 1.4198607206344604, "learning_rate": 0.0004535942383476016, "loss": 3.4514, "step": 64360 }, { "epoch": 4.373216469629026, "grad_norm": 1.4214929342269897, "learning_rate": 0.00045355177333876884, "loss": 3.5543, "step": 64365 }, { "epoch": 4.373556189699688, "grad_norm": 1.2934333086013794, "learning_rate": 0.0004535093083299361, "loss": 3.3796, "step": 64370 }, { "epoch": 4.373895909770349, "grad_norm": 1.3739275932312012, "learning_rate": 0.00045346684332110346, "loss": 3.6083, "step": 64375 }, { "epoch": 4.374235629841011, "grad_norm": 1.410956859588623, "learning_rate": 0.0004534243783122707, "loss": 3.549, "step": 64380 }, { "epoch": 4.374575349911673, "grad_norm": 1.1791142225265503, "learning_rate": 0.00045338191330343796, "loss": 3.0571, "step": 64385 }, { "epoch": 4.374915069982334, "grad_norm": 1.2884199619293213, "learning_rate": 0.0004533394482946053, "loss": 3.2255, "step": 64390 }, { "epoch": 4.375254790052996, "grad_norm": 1.5308183431625366, "learning_rate": 0.0004532969832857725, "loss": 3.5526, "step": 64395 }, { "epoch": 4.375594510123658, "grad_norm": 1.666492223739624, "learning_rate": 0.0004532545182769398, "loss": 3.2412, "step": 64400 }, { "epoch": 4.37593423019432, "grad_norm": 1.403631329536438, "learning_rate": 0.0004532120532681071, "loss": 3.4048, "step": 64405 }, { "epoch": 4.376273950264982, "grad_norm": 1.2409473657608032, "learning_rate": 0.00045316958825927437, "loss": 3.2475, "step": 64410 }, { "epoch": 4.376613670335644, "grad_norm": 1.1527588367462158, "learning_rate": 0.00045312712325044165, "loss": 3.3959, "step": 64415 }, { "epoch": 4.376953390406305, "grad_norm": 1.1447079181671143, "learning_rate": 0.0004530846582416089, "loss": 3.5658, "step": 64420 }, { "epoch": 4.377293110476967, "grad_norm": 1.6811726093292236, "learning_rate": 0.0004530421932327762, "loss": 3.3659, "step": 64425 }, { "epoch": 4.377632830547629, "grad_norm": 1.323678970336914, "learning_rate": 0.0004529997282239435, "loss": 3.6983, "step": 64430 }, { "epoch": 4.37797255061829, "grad_norm": 1.0534276962280273, "learning_rate": 0.00045295726321511077, "loss": 3.6609, "step": 64435 }, { "epoch": 4.378312270688952, "grad_norm": 1.2135955095291138, "learning_rate": 0.000452914798206278, "loss": 3.2708, "step": 64440 }, { "epoch": 4.378651990759614, "grad_norm": 1.1325528621673584, "learning_rate": 0.0004528723331974453, "loss": 3.262, "step": 64445 }, { "epoch": 4.378991710830276, "grad_norm": 1.6381337642669678, "learning_rate": 0.0004528298681886126, "loss": 3.7449, "step": 64450 }, { "epoch": 4.379331430900938, "grad_norm": 1.0785359144210815, "learning_rate": 0.00045278740317977983, "loss": 3.2727, "step": 64455 }, { "epoch": 4.3796711509716, "grad_norm": 1.0891966819763184, "learning_rate": 0.00045274493817094717, "loss": 3.2706, "step": 64460 }, { "epoch": 4.380010871042261, "grad_norm": 1.1647273302078247, "learning_rate": 0.00045270247316211445, "loss": 3.4507, "step": 64465 }, { "epoch": 4.380350591112923, "grad_norm": 1.2365117073059082, "learning_rate": 0.0004526600081532817, "loss": 3.2993, "step": 64470 }, { "epoch": 4.380690311183585, "grad_norm": 1.1265767812728882, "learning_rate": 0.00045261754314444895, "loss": 3.3602, "step": 64475 }, { "epoch": 4.381030031254246, "grad_norm": 1.6113567352294922, "learning_rate": 0.0004525750781356163, "loss": 3.4509, "step": 64480 }, { "epoch": 4.381369751324908, "grad_norm": 1.3959686756134033, "learning_rate": 0.00045253261312678357, "loss": 3.4269, "step": 64485 }, { "epoch": 4.38170947139557, "grad_norm": 1.3102927207946777, "learning_rate": 0.0004524901481179508, "loss": 3.4794, "step": 64490 }, { "epoch": 4.382049191466232, "grad_norm": 1.6108393669128418, "learning_rate": 0.0004524476831091181, "loss": 3.4414, "step": 64495 }, { "epoch": 4.382388911536894, "grad_norm": 1.727401852607727, "learning_rate": 0.0004524052181002854, "loss": 3.1519, "step": 64500 }, { "epoch": 4.382728631607556, "grad_norm": 1.31441068649292, "learning_rate": 0.00045236275309145263, "loss": 3.5416, "step": 64505 }, { "epoch": 4.383068351678217, "grad_norm": 1.0787036418914795, "learning_rate": 0.0004523202880826199, "loss": 3.3478, "step": 64510 }, { "epoch": 4.383408071748879, "grad_norm": 1.1312806606292725, "learning_rate": 0.00045227782307378725, "loss": 3.4644, "step": 64515 }, { "epoch": 4.383747791819541, "grad_norm": 1.3956886529922485, "learning_rate": 0.00045223535806495447, "loss": 3.4683, "step": 64520 }, { "epoch": 4.384087511890202, "grad_norm": 1.4447689056396484, "learning_rate": 0.00045219289305612175, "loss": 3.7367, "step": 64525 }, { "epoch": 4.384427231960864, "grad_norm": 1.246535301208496, "learning_rate": 0.0004521504280472891, "loss": 3.6569, "step": 64530 }, { "epoch": 4.384766952031526, "grad_norm": 1.2158994674682617, "learning_rate": 0.0004521079630384563, "loss": 3.3809, "step": 64535 }, { "epoch": 4.385106672102188, "grad_norm": 1.3298544883728027, "learning_rate": 0.0004520654980296236, "loss": 3.5597, "step": 64540 }, { "epoch": 4.38544639217285, "grad_norm": 1.3226954936981201, "learning_rate": 0.0004520230330207909, "loss": 3.2228, "step": 64545 }, { "epoch": 4.385786112243512, "grad_norm": 1.6637846231460571, "learning_rate": 0.00045198056801195815, "loss": 3.5832, "step": 64550 }, { "epoch": 4.386125832314173, "grad_norm": 1.1051650047302246, "learning_rate": 0.00045193810300312543, "loss": 3.3944, "step": 64555 }, { "epoch": 4.386465552384835, "grad_norm": 1.141703486442566, "learning_rate": 0.0004518956379942927, "loss": 3.3466, "step": 64560 }, { "epoch": 4.386805272455497, "grad_norm": 1.2292919158935547, "learning_rate": 0.00045185317298546, "loss": 3.5266, "step": 64565 }, { "epoch": 4.387144992526158, "grad_norm": 1.2385609149932861, "learning_rate": 0.0004518107079766273, "loss": 3.4088, "step": 64570 }, { "epoch": 4.38748471259682, "grad_norm": 1.376251459121704, "learning_rate": 0.00045176824296779455, "loss": 3.3705, "step": 64575 }, { "epoch": 4.3878244326674825, "grad_norm": 1.3009361028671265, "learning_rate": 0.0004517257779589618, "loss": 3.5495, "step": 64580 }, { "epoch": 4.388164152738144, "grad_norm": 0.9987362623214722, "learning_rate": 0.0004516833129501291, "loss": 3.4379, "step": 64585 }, { "epoch": 4.388503872808806, "grad_norm": 1.1664029359817505, "learning_rate": 0.0004516408479412964, "loss": 3.3919, "step": 64590 }, { "epoch": 4.388843592879467, "grad_norm": 1.2089207172393799, "learning_rate": 0.0004515983829324636, "loss": 3.4858, "step": 64595 }, { "epoch": 4.389183312950129, "grad_norm": 1.1677607297897339, "learning_rate": 0.00045155591792363095, "loss": 3.3425, "step": 64600 }, { "epoch": 4.389523033020791, "grad_norm": 1.2474614381790161, "learning_rate": 0.00045151345291479823, "loss": 3.5936, "step": 64605 }, { "epoch": 4.389862753091452, "grad_norm": 1.4989535808563232, "learning_rate": 0.00045147098790596546, "loss": 3.607, "step": 64610 }, { "epoch": 4.390202473162114, "grad_norm": 1.2342724800109863, "learning_rate": 0.00045142852289713274, "loss": 3.6061, "step": 64615 }, { "epoch": 4.390542193232776, "grad_norm": 1.1673249006271362, "learning_rate": 0.0004513860578883001, "loss": 3.3621, "step": 64620 }, { "epoch": 4.390881913303438, "grad_norm": 1.4052814245224, "learning_rate": 0.0004513435928794673, "loss": 3.3624, "step": 64625 }, { "epoch": 4.3912216333741, "grad_norm": 1.173413634300232, "learning_rate": 0.0004513011278706346, "loss": 3.2558, "step": 64630 }, { "epoch": 4.391561353444762, "grad_norm": 1.1685497760772705, "learning_rate": 0.0004512586628618019, "loss": 3.2554, "step": 64635 }, { "epoch": 4.391901073515423, "grad_norm": 1.2501739263534546, "learning_rate": 0.0004512161978529692, "loss": 3.5773, "step": 64640 }, { "epoch": 4.392240793586085, "grad_norm": 1.207245111465454, "learning_rate": 0.0004511737328441364, "loss": 3.2879, "step": 64645 }, { "epoch": 4.392580513656747, "grad_norm": 1.3792619705200195, "learning_rate": 0.0004511312678353037, "loss": 3.3108, "step": 64650 }, { "epoch": 4.392920233727408, "grad_norm": 1.4400241374969482, "learning_rate": 0.00045108880282647103, "loss": 3.4239, "step": 64655 }, { "epoch": 4.39325995379807, "grad_norm": 1.2538111209869385, "learning_rate": 0.00045104633781763826, "loss": 3.6218, "step": 64660 }, { "epoch": 4.393599673868732, "grad_norm": 1.3753559589385986, "learning_rate": 0.00045100387280880554, "loss": 3.234, "step": 64665 }, { "epoch": 4.393939393939394, "grad_norm": 1.0301401615142822, "learning_rate": 0.0004509614077999729, "loss": 3.3532, "step": 64670 }, { "epoch": 4.394279114010056, "grad_norm": 1.1696653366088867, "learning_rate": 0.0004509189427911401, "loss": 3.2123, "step": 64675 }, { "epoch": 4.394618834080718, "grad_norm": 1.1065990924835205, "learning_rate": 0.0004508764777823074, "loss": 3.559, "step": 64680 }, { "epoch": 4.394958554151379, "grad_norm": 1.3586900234222412, "learning_rate": 0.0004508340127734747, "loss": 3.479, "step": 64685 }, { "epoch": 4.395298274222041, "grad_norm": 1.3521026372909546, "learning_rate": 0.00045079154776464194, "loss": 3.4682, "step": 64690 }, { "epoch": 4.395637994292703, "grad_norm": 1.5378221273422241, "learning_rate": 0.0004507490827558092, "loss": 3.329, "step": 64695 }, { "epoch": 4.395977714363364, "grad_norm": 1.3107917308807373, "learning_rate": 0.0004507066177469765, "loss": 3.1454, "step": 64700 }, { "epoch": 4.396317434434026, "grad_norm": 1.9928629398345947, "learning_rate": 0.0004506641527381438, "loss": 3.3933, "step": 64705 }, { "epoch": 4.396657154504688, "grad_norm": 1.3464674949645996, "learning_rate": 0.00045062168772931106, "loss": 3.2347, "step": 64710 }, { "epoch": 4.39699687457535, "grad_norm": 1.0925263166427612, "learning_rate": 0.00045057922272047834, "loss": 3.3144, "step": 64715 }, { "epoch": 4.397336594646012, "grad_norm": 1.4196672439575195, "learning_rate": 0.0004505367577116456, "loss": 3.2043, "step": 64720 }, { "epoch": 4.397676314716674, "grad_norm": 1.2341102361679077, "learning_rate": 0.0004504942927028129, "loss": 3.3345, "step": 64725 }, { "epoch": 4.398016034787335, "grad_norm": 1.2782267332077026, "learning_rate": 0.0004504518276939802, "loss": 3.3121, "step": 64730 }, { "epoch": 4.398355754857997, "grad_norm": 0.8981735110282898, "learning_rate": 0.0004504093626851474, "loss": 3.5284, "step": 64735 }, { "epoch": 4.398695474928659, "grad_norm": 1.5687329769134521, "learning_rate": 0.00045036689767631474, "loss": 3.2189, "step": 64740 }, { "epoch": 4.39903519499932, "grad_norm": 1.4641715288162231, "learning_rate": 0.000450324432667482, "loss": 3.3876, "step": 64745 }, { "epoch": 4.399374915069982, "grad_norm": 1.1824615001678467, "learning_rate": 0.00045028196765864925, "loss": 3.1333, "step": 64750 }, { "epoch": 4.399714635140644, "grad_norm": 1.2007158994674683, "learning_rate": 0.0004502395026498166, "loss": 3.4539, "step": 64755 }, { "epoch": 4.400054355211306, "grad_norm": 1.3727654218673706, "learning_rate": 0.00045019703764098386, "loss": 3.3454, "step": 64760 }, { "epoch": 4.400394075281968, "grad_norm": 1.2901829481124878, "learning_rate": 0.0004501545726321511, "loss": 3.2006, "step": 64765 }, { "epoch": 4.40073379535263, "grad_norm": 1.3468644618988037, "learning_rate": 0.00045011210762331837, "loss": 3.2259, "step": 64770 }, { "epoch": 4.401073515423291, "grad_norm": 1.2090046405792236, "learning_rate": 0.0004500696426144857, "loss": 3.3622, "step": 64775 }, { "epoch": 4.401413235493953, "grad_norm": 1.24351167678833, "learning_rate": 0.00045002717760565293, "loss": 3.2626, "step": 64780 }, { "epoch": 4.401752955564615, "grad_norm": 1.2962099313735962, "learning_rate": 0.0004499847125968202, "loss": 3.1368, "step": 64785 }, { "epoch": 4.402092675635276, "grad_norm": 1.5789453983306885, "learning_rate": 0.00044994224758798754, "loss": 3.4479, "step": 64790 }, { "epoch": 4.402432395705938, "grad_norm": 1.435905933380127, "learning_rate": 0.00044989978257915477, "loss": 3.3618, "step": 64795 }, { "epoch": 4.4027721157766, "grad_norm": 1.2945948839187622, "learning_rate": 0.00044985731757032205, "loss": 3.1138, "step": 64800 }, { "epoch": 4.403111835847262, "grad_norm": 1.621772289276123, "learning_rate": 0.00044981485256148933, "loss": 3.3278, "step": 64805 }, { "epoch": 4.403451555917924, "grad_norm": 1.578446865081787, "learning_rate": 0.00044977238755265666, "loss": 3.3831, "step": 64810 }, { "epoch": 4.403791275988586, "grad_norm": 1.5082499980926514, "learning_rate": 0.0004497299225438239, "loss": 3.3624, "step": 64815 }, { "epoch": 4.404130996059247, "grad_norm": 1.2348593473434448, "learning_rate": 0.00044968745753499117, "loss": 3.2769, "step": 64820 }, { "epoch": 4.404470716129909, "grad_norm": 1.0223720073699951, "learning_rate": 0.0004496449925261585, "loss": 3.2323, "step": 64825 }, { "epoch": 4.404810436200571, "grad_norm": 1.1426403522491455, "learning_rate": 0.00044960252751732573, "loss": 3.7787, "step": 64830 }, { "epoch": 4.405150156271232, "grad_norm": 1.193690299987793, "learning_rate": 0.000449560062508493, "loss": 3.6531, "step": 64835 }, { "epoch": 4.405489876341894, "grad_norm": 1.2240623235702515, "learning_rate": 0.0004495175974996603, "loss": 3.346, "step": 64840 }, { "epoch": 4.4058295964125564, "grad_norm": 1.3518245220184326, "learning_rate": 0.00044947513249082757, "loss": 3.1495, "step": 64845 }, { "epoch": 4.406169316483218, "grad_norm": 1.1374671459197998, "learning_rate": 0.00044943266748199485, "loss": 3.3657, "step": 64850 }, { "epoch": 4.40650903655388, "grad_norm": 1.1521775722503662, "learning_rate": 0.00044939020247316213, "loss": 3.3067, "step": 64855 }, { "epoch": 4.406848756624542, "grad_norm": 1.3482309579849243, "learning_rate": 0.0004493477374643294, "loss": 3.2063, "step": 64860 }, { "epoch": 4.407188476695203, "grad_norm": 1.2419545650482178, "learning_rate": 0.0004493052724554967, "loss": 3.5438, "step": 64865 }, { "epoch": 4.407528196765865, "grad_norm": 1.3998719453811646, "learning_rate": 0.00044926280744666397, "loss": 3.2762, "step": 64870 }, { "epoch": 4.407867916836526, "grad_norm": 1.6229709386825562, "learning_rate": 0.0004492203424378312, "loss": 3.4442, "step": 64875 }, { "epoch": 4.408207636907188, "grad_norm": 1.344210147857666, "learning_rate": 0.00044917787742899853, "loss": 3.5703, "step": 64880 }, { "epoch": 4.40854735697785, "grad_norm": 1.0519505739212036, "learning_rate": 0.0004491354124201658, "loss": 3.1886, "step": 64885 }, { "epoch": 4.408887077048512, "grad_norm": 1.396669864654541, "learning_rate": 0.00044909294741133304, "loss": 3.3589, "step": 64890 }, { "epoch": 4.409226797119174, "grad_norm": 1.091509222984314, "learning_rate": 0.00044905048240250037, "loss": 3.3018, "step": 64895 }, { "epoch": 4.409566517189836, "grad_norm": 1.3347041606903076, "learning_rate": 0.00044900801739366765, "loss": 3.3032, "step": 64900 }, { "epoch": 4.409906237260497, "grad_norm": 1.223624587059021, "learning_rate": 0.0004489655523848349, "loss": 3.3448, "step": 64905 }, { "epoch": 4.410245957331159, "grad_norm": 1.3673651218414307, "learning_rate": 0.00044892308737600216, "loss": 3.4732, "step": 64910 }, { "epoch": 4.410585677401821, "grad_norm": 1.4178248643875122, "learning_rate": 0.0004488806223671695, "loss": 3.5211, "step": 64915 }, { "epoch": 4.410925397472482, "grad_norm": 1.7062280178070068, "learning_rate": 0.0004488381573583367, "loss": 3.1044, "step": 64920 }, { "epoch": 4.411265117543144, "grad_norm": 1.1981124877929688, "learning_rate": 0.000448795692349504, "loss": 3.1896, "step": 64925 }, { "epoch": 4.411604837613806, "grad_norm": 1.1239664554595947, "learning_rate": 0.00044875322734067133, "loss": 3.2128, "step": 64930 }, { "epoch": 4.411944557684468, "grad_norm": 1.3467012643814087, "learning_rate": 0.00044871076233183856, "loss": 3.5458, "step": 64935 }, { "epoch": 4.41228427775513, "grad_norm": 1.2487270832061768, "learning_rate": 0.00044866829732300584, "loss": 3.4022, "step": 64940 }, { "epoch": 4.412623997825792, "grad_norm": 1.1891930103302002, "learning_rate": 0.0004486258323141731, "loss": 3.3069, "step": 64945 }, { "epoch": 4.412963717896453, "grad_norm": 1.357880711555481, "learning_rate": 0.0004485833673053404, "loss": 3.3058, "step": 64950 }, { "epoch": 4.413303437967115, "grad_norm": 1.4649646282196045, "learning_rate": 0.0004485409022965077, "loss": 3.429, "step": 64955 }, { "epoch": 4.413643158037777, "grad_norm": 1.3555617332458496, "learning_rate": 0.00044849843728767496, "loss": 3.6442, "step": 64960 }, { "epoch": 4.413982878108438, "grad_norm": 0.9133481979370117, "learning_rate": 0.00044845597227884224, "loss": 3.2873, "step": 64965 }, { "epoch": 4.4143225981791, "grad_norm": 1.4348745346069336, "learning_rate": 0.0004484135072700095, "loss": 3.5249, "step": 64970 }, { "epoch": 4.414662318249762, "grad_norm": 1.0263819694519043, "learning_rate": 0.0004483710422611768, "loss": 3.3079, "step": 64975 }, { "epoch": 4.415002038320424, "grad_norm": 1.1842979192733765, "learning_rate": 0.00044832857725234413, "loss": 3.5033, "step": 64980 }, { "epoch": 4.415341758391086, "grad_norm": 1.3079991340637207, "learning_rate": 0.00044828611224351136, "loss": 3.5421, "step": 64985 }, { "epoch": 4.415681478461748, "grad_norm": 1.1804966926574707, "learning_rate": 0.00044824364723467864, "loss": 3.5327, "step": 64990 }, { "epoch": 4.416021198532409, "grad_norm": 1.1309504508972168, "learning_rate": 0.0004482011822258459, "loss": 3.4593, "step": 64995 }, { "epoch": 4.416360918603071, "grad_norm": 1.3590317964553833, "learning_rate": 0.0004481587172170132, "loss": 3.1295, "step": 65000 }, { "epoch": 4.416700638673733, "grad_norm": 1.339996576309204, "learning_rate": 0.0004481162522081805, "loss": 3.0925, "step": 65005 }, { "epoch": 4.417040358744394, "grad_norm": 1.0873901844024658, "learning_rate": 0.00044807378719934776, "loss": 3.2659, "step": 65010 }, { "epoch": 4.417380078815056, "grad_norm": 1.3673666715621948, "learning_rate": 0.00044803132219051504, "loss": 3.3814, "step": 65015 }, { "epoch": 4.417719798885718, "grad_norm": 1.3280335664749146, "learning_rate": 0.0004479888571816823, "loss": 3.4117, "step": 65020 }, { "epoch": 4.41805951895638, "grad_norm": 1.3705114126205444, "learning_rate": 0.0004479463921728496, "loss": 3.2193, "step": 65025 }, { "epoch": 4.418399239027042, "grad_norm": 1.3806524276733398, "learning_rate": 0.0004479039271640168, "loss": 3.4996, "step": 65030 }, { "epoch": 4.418738959097704, "grad_norm": 1.3403689861297607, "learning_rate": 0.00044786146215518416, "loss": 3.4374, "step": 65035 }, { "epoch": 4.419078679168365, "grad_norm": 1.026471495628357, "learning_rate": 0.00044781899714635144, "loss": 3.2737, "step": 65040 }, { "epoch": 4.419418399239027, "grad_norm": 1.4471427202224731, "learning_rate": 0.00044777653213751866, "loss": 3.1187, "step": 65045 }, { "epoch": 4.419758119309689, "grad_norm": 1.3883447647094727, "learning_rate": 0.000447734067128686, "loss": 3.304, "step": 65050 }, { "epoch": 4.42009783938035, "grad_norm": 1.141686201095581, "learning_rate": 0.0004476916021198533, "loss": 3.6146, "step": 65055 }, { "epoch": 4.420437559451012, "grad_norm": 1.2085387706756592, "learning_rate": 0.0004476491371110205, "loss": 3.6311, "step": 65060 }, { "epoch": 4.420777279521674, "grad_norm": 1.1200387477874756, "learning_rate": 0.0004476066721021878, "loss": 3.4416, "step": 65065 }, { "epoch": 4.421116999592336, "grad_norm": 1.1282299757003784, "learning_rate": 0.0004475642070933551, "loss": 3.2167, "step": 65070 }, { "epoch": 4.421456719662998, "grad_norm": 1.4737255573272705, "learning_rate": 0.00044752174208452234, "loss": 3.2518, "step": 65075 }, { "epoch": 4.42179643973366, "grad_norm": 1.2310434579849243, "learning_rate": 0.0004474792770756896, "loss": 3.3519, "step": 65080 }, { "epoch": 4.422136159804321, "grad_norm": 1.2236567735671997, "learning_rate": 0.00044743681206685696, "loss": 3.5138, "step": 65085 }, { "epoch": 4.422475879874983, "grad_norm": 1.2728854417800903, "learning_rate": 0.0004473943470580242, "loss": 3.0815, "step": 65090 }, { "epoch": 4.422815599945645, "grad_norm": 1.4281402826309204, "learning_rate": 0.00044735188204919146, "loss": 3.4639, "step": 65095 }, { "epoch": 4.423155320016306, "grad_norm": 1.2335829734802246, "learning_rate": 0.00044730941704035874, "loss": 3.5532, "step": 65100 }, { "epoch": 4.423495040086968, "grad_norm": 1.6210728883743286, "learning_rate": 0.000447266952031526, "loss": 3.4708, "step": 65105 }, { "epoch": 4.42383476015763, "grad_norm": 1.0563323497772217, "learning_rate": 0.0004472244870226933, "loss": 3.5345, "step": 65110 }, { "epoch": 4.424174480228292, "grad_norm": 1.4110896587371826, "learning_rate": 0.0004471820220138606, "loss": 3.4346, "step": 65115 }, { "epoch": 4.424514200298954, "grad_norm": 1.6172219514846802, "learning_rate": 0.00044713955700502787, "loss": 3.3319, "step": 65120 }, { "epoch": 4.424853920369616, "grad_norm": 1.7906744480133057, "learning_rate": 0.00044709709199619515, "loss": 3.2926, "step": 65125 }, { "epoch": 4.425193640440277, "grad_norm": 1.2907520532608032, "learning_rate": 0.0004470546269873624, "loss": 3.1906, "step": 65130 }, { "epoch": 4.425533360510939, "grad_norm": 1.1878089904785156, "learning_rate": 0.00044701216197852965, "loss": 3.3037, "step": 65135 }, { "epoch": 4.425873080581601, "grad_norm": 1.356939435005188, "learning_rate": 0.000446969696969697, "loss": 3.2982, "step": 65140 }, { "epoch": 4.426212800652262, "grad_norm": 1.810360312461853, "learning_rate": 0.00044692723196086427, "loss": 3.4662, "step": 65145 }, { "epoch": 4.426552520722924, "grad_norm": 1.354878544807434, "learning_rate": 0.00044688476695203155, "loss": 3.3836, "step": 65150 }, { "epoch": 4.4268922407935865, "grad_norm": 1.3521114587783813, "learning_rate": 0.0004468423019431988, "loss": 3.4152, "step": 65155 }, { "epoch": 4.427231960864248, "grad_norm": 1.4132287502288818, "learning_rate": 0.0004467998369343661, "loss": 3.8304, "step": 65160 }, { "epoch": 4.42757168093491, "grad_norm": 1.3660365343093872, "learning_rate": 0.0004467573719255334, "loss": 3.771, "step": 65165 }, { "epoch": 4.427911401005572, "grad_norm": 1.2859688997268677, "learning_rate": 0.0004467149069167006, "loss": 3.4917, "step": 65170 }, { "epoch": 4.428251121076233, "grad_norm": 1.3884944915771484, "learning_rate": 0.00044667244190786795, "loss": 3.2457, "step": 65175 }, { "epoch": 4.428590841146895, "grad_norm": 1.1892712116241455, "learning_rate": 0.0004466299768990352, "loss": 3.4787, "step": 65180 }, { "epoch": 4.428930561217557, "grad_norm": 1.2174630165100098, "learning_rate": 0.00044658751189020245, "loss": 3.4772, "step": 65185 }, { "epoch": 4.429270281288218, "grad_norm": 1.7575111389160156, "learning_rate": 0.0004465450468813698, "loss": 3.6009, "step": 65190 }, { "epoch": 4.42961000135888, "grad_norm": 1.269446611404419, "learning_rate": 0.00044650258187253707, "loss": 3.2144, "step": 65195 }, { "epoch": 4.4299497214295425, "grad_norm": 1.1601316928863525, "learning_rate": 0.0004464601168637043, "loss": 3.6508, "step": 65200 }, { "epoch": 4.430289441500204, "grad_norm": 1.352696180343628, "learning_rate": 0.00044641765185487157, "loss": 3.461, "step": 65205 }, { "epoch": 4.430629161570866, "grad_norm": 1.1199934482574463, "learning_rate": 0.0004463751868460389, "loss": 3.1238, "step": 65210 }, { "epoch": 4.430968881641528, "grad_norm": 1.8441596031188965, "learning_rate": 0.00044633272183720613, "loss": 3.6027, "step": 65215 }, { "epoch": 4.431308601712189, "grad_norm": 1.2307640314102173, "learning_rate": 0.0004462902568283734, "loss": 3.6297, "step": 65220 }, { "epoch": 4.431648321782851, "grad_norm": 1.3289424180984497, "learning_rate": 0.00044624779181954075, "loss": 3.4257, "step": 65225 }, { "epoch": 4.431988041853513, "grad_norm": 1.4034479856491089, "learning_rate": 0.00044620532681070797, "loss": 3.5319, "step": 65230 }, { "epoch": 4.432327761924174, "grad_norm": 1.628908634185791, "learning_rate": 0.00044616286180187525, "loss": 3.6153, "step": 65235 }, { "epoch": 4.432667481994836, "grad_norm": 1.6733571290969849, "learning_rate": 0.00044612039679304253, "loss": 3.3876, "step": 65240 }, { "epoch": 4.4330072020654985, "grad_norm": 1.3844099044799805, "learning_rate": 0.0004460779317842098, "loss": 3.3991, "step": 65245 }, { "epoch": 4.43334692213616, "grad_norm": 1.0431467294692993, "learning_rate": 0.0004460354667753771, "loss": 3.8149, "step": 65250 }, { "epoch": 4.433686642206822, "grad_norm": 1.4777071475982666, "learning_rate": 0.0004459930017665444, "loss": 3.4048, "step": 65255 }, { "epoch": 4.434026362277484, "grad_norm": 1.5278400182724, "learning_rate": 0.00044595053675771165, "loss": 3.2215, "step": 65260 }, { "epoch": 4.434366082348145, "grad_norm": 1.5574369430541992, "learning_rate": 0.00044590807174887893, "loss": 3.5768, "step": 65265 }, { "epoch": 4.434705802418807, "grad_norm": 1.2228035926818848, "learning_rate": 0.0004458656067400462, "loss": 3.5519, "step": 65270 }, { "epoch": 4.435045522489468, "grad_norm": 1.2569447755813599, "learning_rate": 0.00044582314173121344, "loss": 3.3722, "step": 65275 }, { "epoch": 4.43538524256013, "grad_norm": 1.3800157308578491, "learning_rate": 0.0004457806767223808, "loss": 3.2548, "step": 65280 }, { "epoch": 4.435724962630792, "grad_norm": 1.461359977722168, "learning_rate": 0.00044573821171354805, "loss": 3.7965, "step": 65285 }, { "epoch": 4.436064682701454, "grad_norm": 1.4640952348709106, "learning_rate": 0.0004456957467047153, "loss": 3.0447, "step": 65290 }, { "epoch": 4.436404402772116, "grad_norm": 1.6920634508132935, "learning_rate": 0.0004456532816958826, "loss": 3.2717, "step": 65295 }, { "epoch": 4.436744122842778, "grad_norm": 1.2345805168151855, "learning_rate": 0.0004456108166870499, "loss": 3.4473, "step": 65300 }, { "epoch": 4.437083842913439, "grad_norm": 1.2653669118881226, "learning_rate": 0.0004455683516782171, "loss": 3.5646, "step": 65305 }, { "epoch": 4.437423562984101, "grad_norm": 1.3721317052841187, "learning_rate": 0.00044552588666938445, "loss": 3.551, "step": 65310 }, { "epoch": 4.437763283054763, "grad_norm": 1.4830244779586792, "learning_rate": 0.00044548342166055173, "loss": 3.2744, "step": 65315 }, { "epoch": 4.438103003125424, "grad_norm": 1.0942796468734741, "learning_rate": 0.000445440956651719, "loss": 3.5867, "step": 65320 }, { "epoch": 4.438442723196086, "grad_norm": 1.1484558582305908, "learning_rate": 0.00044539849164288624, "loss": 3.2385, "step": 65325 }, { "epoch": 4.438782443266748, "grad_norm": 1.0989153385162354, "learning_rate": 0.0004453560266340536, "loss": 3.35, "step": 65330 }, { "epoch": 4.43912216333741, "grad_norm": 1.2311071157455444, "learning_rate": 0.00044531356162522085, "loss": 3.6712, "step": 65335 }, { "epoch": 4.439461883408072, "grad_norm": 1.3550888299942017, "learning_rate": 0.0004452710966163881, "loss": 3.2516, "step": 65340 }, { "epoch": 4.439801603478734, "grad_norm": 1.1953017711639404, "learning_rate": 0.0004452286316075554, "loss": 3.2965, "step": 65345 }, { "epoch": 4.440141323549395, "grad_norm": 1.232203483581543, "learning_rate": 0.0004451861665987227, "loss": 3.511, "step": 65350 }, { "epoch": 4.440481043620057, "grad_norm": 1.1375828981399536, "learning_rate": 0.0004451437015898899, "loss": 3.2368, "step": 65355 }, { "epoch": 4.440820763690719, "grad_norm": 1.475502848625183, "learning_rate": 0.0004451012365810572, "loss": 3.4209, "step": 65360 }, { "epoch": 4.44116048376138, "grad_norm": 1.3514881134033203, "learning_rate": 0.00044505877157222453, "loss": 3.5223, "step": 65365 }, { "epoch": 4.441500203832042, "grad_norm": 1.281400203704834, "learning_rate": 0.00044501630656339176, "loss": 3.3519, "step": 65370 }, { "epoch": 4.441839923902704, "grad_norm": 1.4487839937210083, "learning_rate": 0.00044497384155455904, "loss": 3.2121, "step": 65375 }, { "epoch": 4.442179643973366, "grad_norm": 1.7481231689453125, "learning_rate": 0.0004449313765457264, "loss": 3.4592, "step": 65380 }, { "epoch": 4.442519364044028, "grad_norm": 1.3894222974777222, "learning_rate": 0.0004448889115368936, "loss": 3.1503, "step": 65385 }, { "epoch": 4.44285908411469, "grad_norm": 1.3075690269470215, "learning_rate": 0.0004448464465280609, "loss": 3.4777, "step": 65390 }, { "epoch": 4.443198804185351, "grad_norm": 1.5277104377746582, "learning_rate": 0.00044480398151922816, "loss": 3.4888, "step": 65395 }, { "epoch": 4.443538524256013, "grad_norm": 1.4215582609176636, "learning_rate": 0.00044476151651039544, "loss": 3.4798, "step": 65400 }, { "epoch": 4.443878244326675, "grad_norm": 1.405271053314209, "learning_rate": 0.0004447190515015627, "loss": 3.2755, "step": 65405 }, { "epoch": 4.444217964397336, "grad_norm": 1.2242265939712524, "learning_rate": 0.00044467658649273, "loss": 3.2698, "step": 65410 }, { "epoch": 4.444557684467998, "grad_norm": 1.3202377557754517, "learning_rate": 0.0004446341214838973, "loss": 3.3977, "step": 65415 }, { "epoch": 4.4448974045386604, "grad_norm": 1.1154980659484863, "learning_rate": 0.00044459165647506456, "loss": 3.4845, "step": 65420 }, { "epoch": 4.445237124609322, "grad_norm": 1.432619571685791, "learning_rate": 0.00044454919146623184, "loss": 3.553, "step": 65425 }, { "epoch": 4.445576844679984, "grad_norm": 1.3249238729476929, "learning_rate": 0.00044450672645739907, "loss": 3.4572, "step": 65430 }, { "epoch": 4.445916564750646, "grad_norm": 1.6157019138336182, "learning_rate": 0.0004444642614485664, "loss": 3.2767, "step": 65435 }, { "epoch": 4.446256284821307, "grad_norm": 1.00979483127594, "learning_rate": 0.0004444217964397337, "loss": 3.4107, "step": 65440 }, { "epoch": 4.446596004891969, "grad_norm": 1.4323503971099854, "learning_rate": 0.0004443793314309009, "loss": 3.3575, "step": 65445 }, { "epoch": 4.446935724962631, "grad_norm": 1.1418086290359497, "learning_rate": 0.00044433686642206824, "loss": 3.477, "step": 65450 }, { "epoch": 4.447275445033292, "grad_norm": 1.2675929069519043, "learning_rate": 0.0004442944014132355, "loss": 3.5708, "step": 65455 }, { "epoch": 4.447615165103954, "grad_norm": 1.390244960784912, "learning_rate": 0.00044425193640440275, "loss": 3.2419, "step": 65460 }, { "epoch": 4.4479548851746165, "grad_norm": 1.6478439569473267, "learning_rate": 0.00044420947139557003, "loss": 3.554, "step": 65465 }, { "epoch": 4.448294605245278, "grad_norm": 1.3790165185928345, "learning_rate": 0.00044416700638673736, "loss": 3.3373, "step": 65470 }, { "epoch": 4.44863432531594, "grad_norm": 1.4004533290863037, "learning_rate": 0.0004441245413779046, "loss": 3.2902, "step": 65475 }, { "epoch": 4.448974045386602, "grad_norm": 1.575722098350525, "learning_rate": 0.00044408207636907187, "loss": 3.2794, "step": 65480 }, { "epoch": 4.449313765457263, "grad_norm": 1.2940027713775635, "learning_rate": 0.0004440396113602392, "loss": 3.2258, "step": 65485 }, { "epoch": 4.449653485527925, "grad_norm": 1.2484757900238037, "learning_rate": 0.0004439971463514065, "loss": 3.4243, "step": 65490 }, { "epoch": 4.449993205598587, "grad_norm": 1.1840742826461792, "learning_rate": 0.0004439546813425737, "loss": 3.4437, "step": 65495 }, { "epoch": 4.450332925669248, "grad_norm": 1.0051836967468262, "learning_rate": 0.000443912216333741, "loss": 3.4361, "step": 65500 }, { "epoch": 4.45067264573991, "grad_norm": 1.1426255702972412, "learning_rate": 0.0004438697513249083, "loss": 3.2889, "step": 65505 }, { "epoch": 4.4510123658105725, "grad_norm": 1.1696586608886719, "learning_rate": 0.00044382728631607555, "loss": 3.3263, "step": 65510 }, { "epoch": 4.451352085881234, "grad_norm": 1.4118518829345703, "learning_rate": 0.00044378482130724283, "loss": 3.7308, "step": 65515 }, { "epoch": 4.451691805951896, "grad_norm": 1.173020601272583, "learning_rate": 0.00044374235629841016, "loss": 3.4743, "step": 65520 }, { "epoch": 4.452031526022558, "grad_norm": 1.3607755899429321, "learning_rate": 0.0004436998912895774, "loss": 3.5245, "step": 65525 }, { "epoch": 4.452371246093219, "grad_norm": 1.020055890083313, "learning_rate": 0.00044365742628074467, "loss": 2.9623, "step": 65530 }, { "epoch": 4.452710966163881, "grad_norm": 1.2762846946716309, "learning_rate": 0.000443614961271912, "loss": 3.2113, "step": 65535 }, { "epoch": 4.453050686234543, "grad_norm": 1.1469430923461914, "learning_rate": 0.00044357249626307923, "loss": 3.2799, "step": 65540 }, { "epoch": 4.453390406305204, "grad_norm": 1.4631808996200562, "learning_rate": 0.0004435300312542465, "loss": 3.5134, "step": 65545 }, { "epoch": 4.453730126375866, "grad_norm": 1.131528615951538, "learning_rate": 0.0004434875662454138, "loss": 3.331, "step": 65550 }, { "epoch": 4.454069846446528, "grad_norm": 1.082085371017456, "learning_rate": 0.00044344510123658107, "loss": 3.3561, "step": 65555 }, { "epoch": 4.45440956651719, "grad_norm": 0.9974183440208435, "learning_rate": 0.00044340263622774835, "loss": 3.3809, "step": 65560 }, { "epoch": 4.454749286587852, "grad_norm": 1.2337396144866943, "learning_rate": 0.00044336017121891563, "loss": 3.3261, "step": 65565 }, { "epoch": 4.455089006658513, "grad_norm": 1.4912112951278687, "learning_rate": 0.0004433177062100829, "loss": 3.6332, "step": 65570 }, { "epoch": 4.455428726729175, "grad_norm": 0.9330472946166992, "learning_rate": 0.0004432752412012502, "loss": 3.5684, "step": 65575 }, { "epoch": 4.455768446799837, "grad_norm": 1.141942024230957, "learning_rate": 0.00044323277619241747, "loss": 3.3278, "step": 65580 }, { "epoch": 4.456108166870498, "grad_norm": 1.5701098442077637, "learning_rate": 0.0004431903111835847, "loss": 3.1641, "step": 65585 }, { "epoch": 4.45644788694116, "grad_norm": 1.5102763175964355, "learning_rate": 0.00044314784617475203, "loss": 3.316, "step": 65590 }, { "epoch": 4.456787607011822, "grad_norm": 1.1626360416412354, "learning_rate": 0.0004431053811659193, "loss": 3.4584, "step": 65595 }, { "epoch": 4.457127327082484, "grad_norm": 1.4515724182128906, "learning_rate": 0.00044306291615708654, "loss": 3.4474, "step": 65600 }, { "epoch": 4.457467047153146, "grad_norm": 1.335540771484375, "learning_rate": 0.00044302045114825387, "loss": 3.4646, "step": 65605 }, { "epoch": 4.457806767223808, "grad_norm": 1.6879113912582397, "learning_rate": 0.00044297798613942115, "loss": 3.4207, "step": 65610 }, { "epoch": 4.458146487294469, "grad_norm": 1.5427623987197876, "learning_rate": 0.0004429355211305884, "loss": 3.3798, "step": 65615 }, { "epoch": 4.458486207365131, "grad_norm": 1.2437824010849, "learning_rate": 0.00044289305612175566, "loss": 3.5977, "step": 65620 }, { "epoch": 4.458825927435793, "grad_norm": 1.3976783752441406, "learning_rate": 0.000442850591112923, "loss": 3.1359, "step": 65625 }, { "epoch": 4.459165647506454, "grad_norm": 1.545943021774292, "learning_rate": 0.0004428081261040902, "loss": 3.4148, "step": 65630 }, { "epoch": 4.459505367577116, "grad_norm": 1.5573322772979736, "learning_rate": 0.0004427656610952575, "loss": 3.3533, "step": 65635 }, { "epoch": 4.459845087647778, "grad_norm": 1.162153720855713, "learning_rate": 0.00044272319608642483, "loss": 3.641, "step": 65640 }, { "epoch": 4.46018480771844, "grad_norm": 1.5355583429336548, "learning_rate": 0.00044268073107759206, "loss": 3.3214, "step": 65645 }, { "epoch": 4.460524527789102, "grad_norm": 1.1931493282318115, "learning_rate": 0.00044263826606875934, "loss": 3.2927, "step": 65650 }, { "epoch": 4.460864247859764, "grad_norm": 1.6369003057479858, "learning_rate": 0.0004425958010599266, "loss": 3.3415, "step": 65655 }, { "epoch": 4.461203967930425, "grad_norm": 1.6896418333053589, "learning_rate": 0.00044255333605109395, "loss": 3.1417, "step": 65660 }, { "epoch": 4.461543688001087, "grad_norm": 1.1457656621932983, "learning_rate": 0.0004425108710422612, "loss": 3.3636, "step": 65665 }, { "epoch": 4.461883408071749, "grad_norm": 1.2373987436294556, "learning_rate": 0.00044246840603342846, "loss": 3.4218, "step": 65670 }, { "epoch": 4.46222312814241, "grad_norm": 1.498847484588623, "learning_rate": 0.0004424259410245958, "loss": 3.5793, "step": 65675 }, { "epoch": 4.462562848213072, "grad_norm": 0.9538022875785828, "learning_rate": 0.000442383476015763, "loss": 3.4345, "step": 65680 }, { "epoch": 4.462902568283734, "grad_norm": 1.4281748533248901, "learning_rate": 0.0004423410110069303, "loss": 3.4648, "step": 65685 }, { "epoch": 4.463242288354396, "grad_norm": 1.32038152217865, "learning_rate": 0.0004422985459980976, "loss": 3.524, "step": 65690 }, { "epoch": 4.463582008425058, "grad_norm": 1.537793755531311, "learning_rate": 0.00044225608098926486, "loss": 3.1064, "step": 65695 }, { "epoch": 4.46392172849572, "grad_norm": 1.3071922063827515, "learning_rate": 0.00044221361598043214, "loss": 3.2571, "step": 65700 }, { "epoch": 4.464261448566381, "grad_norm": 1.0834324359893799, "learning_rate": 0.0004421711509715994, "loss": 3.4399, "step": 65705 }, { "epoch": 4.464601168637043, "grad_norm": 1.2280709743499756, "learning_rate": 0.0004421286859627667, "loss": 3.2876, "step": 65710 }, { "epoch": 4.464940888707705, "grad_norm": 1.465113878250122, "learning_rate": 0.000442086220953934, "loss": 3.3957, "step": 65715 }, { "epoch": 4.465280608778366, "grad_norm": 1.5397124290466309, "learning_rate": 0.00044204375594510126, "loss": 3.4323, "step": 65720 }, { "epoch": 4.465620328849028, "grad_norm": 1.598325252532959, "learning_rate": 0.0004420012909362685, "loss": 3.3611, "step": 65725 }, { "epoch": 4.4659600489196905, "grad_norm": 1.2110744714736938, "learning_rate": 0.0004419588259274358, "loss": 3.2365, "step": 65730 }, { "epoch": 4.466299768990352, "grad_norm": 1.3236217498779297, "learning_rate": 0.0004419163609186031, "loss": 3.3395, "step": 65735 }, { "epoch": 4.466639489061014, "grad_norm": 1.3183034658432007, "learning_rate": 0.0004418738959097703, "loss": 3.0762, "step": 65740 }, { "epoch": 4.466979209131676, "grad_norm": 1.3450924158096313, "learning_rate": 0.00044183143090093766, "loss": 3.3205, "step": 65745 }, { "epoch": 4.467318929202337, "grad_norm": 1.0088229179382324, "learning_rate": 0.00044178896589210494, "loss": 3.2895, "step": 65750 }, { "epoch": 4.467658649272999, "grad_norm": 1.1737381219863892, "learning_rate": 0.00044174650088327216, "loss": 3.3457, "step": 65755 }, { "epoch": 4.467998369343661, "grad_norm": 1.0963703393936157, "learning_rate": 0.00044170403587443944, "loss": 3.7216, "step": 65760 }, { "epoch": 4.468338089414322, "grad_norm": 1.1243270635604858, "learning_rate": 0.0004416615708656068, "loss": 3.3924, "step": 65765 }, { "epoch": 4.468677809484984, "grad_norm": 1.384751796722412, "learning_rate": 0.000441619105856774, "loss": 3.4852, "step": 65770 }, { "epoch": 4.4690175295556465, "grad_norm": 1.1117582321166992, "learning_rate": 0.0004415766408479413, "loss": 3.4956, "step": 65775 }, { "epoch": 4.469357249626308, "grad_norm": 1.0396593809127808, "learning_rate": 0.0004415341758391086, "loss": 3.1354, "step": 65780 }, { "epoch": 4.46969696969697, "grad_norm": 1.238250970840454, "learning_rate": 0.00044149171083027584, "loss": 3.2459, "step": 65785 }, { "epoch": 4.470036689767632, "grad_norm": 1.5982269048690796, "learning_rate": 0.0004414492458214431, "loss": 3.3397, "step": 65790 }, { "epoch": 4.470376409838293, "grad_norm": 1.238472819328308, "learning_rate": 0.0004414067808126104, "loss": 3.5301, "step": 65795 }, { "epoch": 4.470716129908955, "grad_norm": 1.2896510362625122, "learning_rate": 0.0004413643158037777, "loss": 3.3317, "step": 65800 }, { "epoch": 4.471055849979617, "grad_norm": 1.508818507194519, "learning_rate": 0.00044132185079494496, "loss": 3.4702, "step": 65805 }, { "epoch": 4.471395570050278, "grad_norm": 1.2464262247085571, "learning_rate": 0.00044127938578611224, "loss": 3.5316, "step": 65810 }, { "epoch": 4.47173529012094, "grad_norm": 1.137633204460144, "learning_rate": 0.0004412369207772795, "loss": 3.3129, "step": 65815 }, { "epoch": 4.4720750101916025, "grad_norm": 1.4849166870117188, "learning_rate": 0.0004411944557684468, "loss": 3.3729, "step": 65820 }, { "epoch": 4.472414730262264, "grad_norm": 1.5302817821502686, "learning_rate": 0.0004411519907596141, "loss": 3.313, "step": 65825 }, { "epoch": 4.472754450332926, "grad_norm": 1.3100262880325317, "learning_rate": 0.0004411095257507814, "loss": 3.354, "step": 65830 }, { "epoch": 4.473094170403588, "grad_norm": 1.3345777988433838, "learning_rate": 0.00044106706074194865, "loss": 3.1921, "step": 65835 }, { "epoch": 4.473433890474249, "grad_norm": 1.2249996662139893, "learning_rate": 0.0004410245957331159, "loss": 3.4763, "step": 65840 }, { "epoch": 4.473773610544911, "grad_norm": 1.26399564743042, "learning_rate": 0.0004409821307242832, "loss": 3.1622, "step": 65845 }, { "epoch": 4.474113330615573, "grad_norm": 1.2803853750228882, "learning_rate": 0.0004409396657154505, "loss": 3.1581, "step": 65850 }, { "epoch": 4.474453050686234, "grad_norm": 2.2945988178253174, "learning_rate": 0.00044089720070661777, "loss": 3.1553, "step": 65855 }, { "epoch": 4.474792770756896, "grad_norm": 1.0877143144607544, "learning_rate": 0.00044085473569778505, "loss": 3.1877, "step": 65860 }, { "epoch": 4.4751324908275585, "grad_norm": 1.2125128507614136, "learning_rate": 0.0004408122706889523, "loss": 3.4046, "step": 65865 }, { "epoch": 4.47547221089822, "grad_norm": 1.121749758720398, "learning_rate": 0.0004407698056801196, "loss": 3.3398, "step": 65870 }, { "epoch": 4.475811930968882, "grad_norm": 1.6879500150680542, "learning_rate": 0.0004407273406712869, "loss": 3.4991, "step": 65875 }, { "epoch": 4.476151651039544, "grad_norm": 1.2349728345870972, "learning_rate": 0.0004406848756624541, "loss": 3.378, "step": 65880 }, { "epoch": 4.476491371110205, "grad_norm": 1.606169581413269, "learning_rate": 0.00044064241065362145, "loss": 3.1925, "step": 65885 }, { "epoch": 4.476831091180867, "grad_norm": 1.2812409400939941, "learning_rate": 0.0004405999456447887, "loss": 3.6042, "step": 65890 }, { "epoch": 4.477170811251529, "grad_norm": 1.287366509437561, "learning_rate": 0.00044055748063595595, "loss": 3.4309, "step": 65895 }, { "epoch": 4.47751053132219, "grad_norm": 1.4906041622161865, "learning_rate": 0.0004405150156271233, "loss": 3.3026, "step": 65900 }, { "epoch": 4.477850251392852, "grad_norm": 1.666921854019165, "learning_rate": 0.00044047255061829057, "loss": 3.3499, "step": 65905 }, { "epoch": 4.4781899714635145, "grad_norm": 1.1748632192611694, "learning_rate": 0.0004404300856094578, "loss": 3.24, "step": 65910 }, { "epoch": 4.478529691534176, "grad_norm": 1.0894657373428345, "learning_rate": 0.00044038762060062507, "loss": 3.5849, "step": 65915 }, { "epoch": 4.478869411604838, "grad_norm": 1.3669590950012207, "learning_rate": 0.0004403451555917924, "loss": 3.2268, "step": 65920 }, { "epoch": 4.4792091316755, "grad_norm": 1.335460901260376, "learning_rate": 0.00044030269058295963, "loss": 3.4343, "step": 65925 }, { "epoch": 4.479548851746161, "grad_norm": 1.2276053428649902, "learning_rate": 0.0004402602255741269, "loss": 3.3641, "step": 65930 }, { "epoch": 4.479888571816823, "grad_norm": 1.8997151851654053, "learning_rate": 0.00044021776056529425, "loss": 3.3485, "step": 65935 }, { "epoch": 4.480228291887485, "grad_norm": 1.067787528038025, "learning_rate": 0.00044017529555646147, "loss": 3.3596, "step": 65940 }, { "epoch": 4.480568011958146, "grad_norm": 1.782170295715332, "learning_rate": 0.00044013283054762875, "loss": 3.2958, "step": 65945 }, { "epoch": 4.480907732028808, "grad_norm": 1.221746802330017, "learning_rate": 0.00044009036553879603, "loss": 3.4481, "step": 65950 }, { "epoch": 4.4812474520994705, "grad_norm": 1.4416263103485107, "learning_rate": 0.0004400479005299633, "loss": 3.5006, "step": 65955 }, { "epoch": 4.481587172170132, "grad_norm": 1.4606362581253052, "learning_rate": 0.0004400054355211306, "loss": 3.482, "step": 65960 }, { "epoch": 4.481926892240794, "grad_norm": 1.1523858308792114, "learning_rate": 0.0004399629705122979, "loss": 3.3268, "step": 65965 }, { "epoch": 4.482266612311455, "grad_norm": 1.1941874027252197, "learning_rate": 0.00043992050550346515, "loss": 3.1771, "step": 65970 }, { "epoch": 4.482606332382117, "grad_norm": 1.2420696020126343, "learning_rate": 0.00043987804049463243, "loss": 3.3355, "step": 65975 }, { "epoch": 4.482946052452779, "grad_norm": 1.272029995918274, "learning_rate": 0.0004398355754857997, "loss": 3.357, "step": 65980 }, { "epoch": 4.48328577252344, "grad_norm": 1.6207479238510132, "learning_rate": 0.00043979311047696694, "loss": 3.5506, "step": 65985 }, { "epoch": 4.483625492594102, "grad_norm": 0.9432995915412903, "learning_rate": 0.0004397506454681343, "loss": 3.317, "step": 65990 }, { "epoch": 4.483965212664764, "grad_norm": 1.17629873752594, "learning_rate": 0.00043970818045930155, "loss": 3.2814, "step": 65995 }, { "epoch": 4.484304932735426, "grad_norm": 1.4419777393341064, "learning_rate": 0.00043966571545046883, "loss": 3.3202, "step": 66000 }, { "epoch": 4.484644652806088, "grad_norm": 1.5214406251907349, "learning_rate": 0.0004396232504416361, "loss": 3.5156, "step": 66005 }, { "epoch": 4.48498437287675, "grad_norm": 1.3351914882659912, "learning_rate": 0.0004395807854328034, "loss": 3.2144, "step": 66010 }, { "epoch": 4.485324092947411, "grad_norm": 1.3291962146759033, "learning_rate": 0.0004395383204239707, "loss": 3.5461, "step": 66015 }, { "epoch": 4.485663813018073, "grad_norm": 1.1586024761199951, "learning_rate": 0.0004394958554151379, "loss": 3.34, "step": 66020 }, { "epoch": 4.486003533088735, "grad_norm": 1.2301808595657349, "learning_rate": 0.00043945339040630523, "loss": 3.283, "step": 66025 }, { "epoch": 4.486343253159396, "grad_norm": 1.2328884601593018, "learning_rate": 0.0004394109253974725, "loss": 3.3074, "step": 66030 }, { "epoch": 4.486682973230058, "grad_norm": 1.5186207294464111, "learning_rate": 0.00043936846038863974, "loss": 3.3046, "step": 66035 }, { "epoch": 4.4870226933007205, "grad_norm": 1.2852452993392944, "learning_rate": 0.0004393259953798071, "loss": 3.5305, "step": 66040 }, { "epoch": 4.487362413371382, "grad_norm": 1.5636378526687622, "learning_rate": 0.00043928353037097435, "loss": 3.4777, "step": 66045 }, { "epoch": 4.487702133442044, "grad_norm": 1.2313247919082642, "learning_rate": 0.0004392410653621416, "loss": 3.4995, "step": 66050 }, { "epoch": 4.488041853512706, "grad_norm": 1.181322455406189, "learning_rate": 0.00043919860035330886, "loss": 3.2527, "step": 66055 }, { "epoch": 4.488381573583367, "grad_norm": 1.1798088550567627, "learning_rate": 0.0004391561353444762, "loss": 3.3343, "step": 66060 }, { "epoch": 4.488721293654029, "grad_norm": 1.1979377269744873, "learning_rate": 0.0004391136703356434, "loss": 3.4731, "step": 66065 }, { "epoch": 4.489061013724691, "grad_norm": 1.6287294626235962, "learning_rate": 0.0004390712053268107, "loss": 3.59, "step": 66070 }, { "epoch": 4.489400733795352, "grad_norm": 1.4827334880828857, "learning_rate": 0.00043902874031797803, "loss": 3.3804, "step": 66075 }, { "epoch": 4.489740453866014, "grad_norm": 1.3676730394363403, "learning_rate": 0.00043898627530914526, "loss": 3.1742, "step": 66080 }, { "epoch": 4.4900801739366765, "grad_norm": 1.5140042304992676, "learning_rate": 0.00043894381030031254, "loss": 3.3346, "step": 66085 }, { "epoch": 4.490419894007338, "grad_norm": 1.4747191667556763, "learning_rate": 0.0004389013452914798, "loss": 3.5083, "step": 66090 }, { "epoch": 4.490759614078, "grad_norm": 1.4334732294082642, "learning_rate": 0.0004388588802826471, "loss": 3.161, "step": 66095 }, { "epoch": 4.491099334148662, "grad_norm": 1.4362187385559082, "learning_rate": 0.0004388164152738144, "loss": 3.2609, "step": 66100 }, { "epoch": 4.491439054219323, "grad_norm": 1.1318550109863281, "learning_rate": 0.00043877395026498166, "loss": 3.3588, "step": 66105 }, { "epoch": 4.491778774289985, "grad_norm": 1.6432381868362427, "learning_rate": 0.00043873148525614894, "loss": 3.3055, "step": 66110 }, { "epoch": 4.492118494360647, "grad_norm": 1.3103276491165161, "learning_rate": 0.0004386890202473162, "loss": 3.3769, "step": 66115 }, { "epoch": 4.492458214431308, "grad_norm": 1.1612088680267334, "learning_rate": 0.0004386465552384835, "loss": 3.6423, "step": 66120 }, { "epoch": 4.49279793450197, "grad_norm": 0.896953821182251, "learning_rate": 0.0004386040902296507, "loss": 3.3788, "step": 66125 }, { "epoch": 4.4931376545726325, "grad_norm": 1.472713828086853, "learning_rate": 0.00043856162522081806, "loss": 3.3481, "step": 66130 }, { "epoch": 4.493477374643294, "grad_norm": 1.4582467079162598, "learning_rate": 0.00043851916021198534, "loss": 3.2072, "step": 66135 }, { "epoch": 4.493817094713956, "grad_norm": 1.1843140125274658, "learning_rate": 0.00043847669520315257, "loss": 3.444, "step": 66140 }, { "epoch": 4.494156814784618, "grad_norm": 1.2419170141220093, "learning_rate": 0.0004384342301943199, "loss": 3.4263, "step": 66145 }, { "epoch": 4.494496534855279, "grad_norm": 1.168869137763977, "learning_rate": 0.0004383917651854872, "loss": 3.4521, "step": 66150 }, { "epoch": 4.494836254925941, "grad_norm": 1.040709137916565, "learning_rate": 0.0004383493001766544, "loss": 3.3448, "step": 66155 }, { "epoch": 4.495175974996603, "grad_norm": 1.368626356124878, "learning_rate": 0.00043830683516782174, "loss": 3.5859, "step": 66160 }, { "epoch": 4.495515695067264, "grad_norm": 1.4113024473190308, "learning_rate": 0.000438264370158989, "loss": 3.3075, "step": 66165 }, { "epoch": 4.495855415137926, "grad_norm": 1.6027354001998901, "learning_rate": 0.0004382219051501563, "loss": 3.2449, "step": 66170 }, { "epoch": 4.4961951352085885, "grad_norm": 1.5478187799453735, "learning_rate": 0.00043817944014132353, "loss": 3.4238, "step": 66175 }, { "epoch": 4.49653485527925, "grad_norm": 1.2291650772094727, "learning_rate": 0.00043813697513249086, "loss": 3.3139, "step": 66180 }, { "epoch": 4.496874575349912, "grad_norm": 1.305246353149414, "learning_rate": 0.00043809451012365814, "loss": 3.2582, "step": 66185 }, { "epoch": 4.497214295420574, "grad_norm": 1.2623406648635864, "learning_rate": 0.00043805204511482537, "loss": 3.6428, "step": 66190 }, { "epoch": 4.497554015491235, "grad_norm": 1.1486842632293701, "learning_rate": 0.0004380095801059927, "loss": 3.4989, "step": 66195 }, { "epoch": 4.497893735561897, "grad_norm": 1.6312412023544312, "learning_rate": 0.00043796711509716, "loss": 3.4279, "step": 66200 }, { "epoch": 4.498233455632559, "grad_norm": 1.5772476196289062, "learning_rate": 0.0004379246500883272, "loss": 3.2234, "step": 66205 }, { "epoch": 4.49857317570322, "grad_norm": 1.0412981510162354, "learning_rate": 0.0004378821850794945, "loss": 3.54, "step": 66210 }, { "epoch": 4.498912895773882, "grad_norm": 1.1969448328018188, "learning_rate": 0.0004378397200706618, "loss": 3.3146, "step": 66215 }, { "epoch": 4.4992526158445445, "grad_norm": 1.1269813776016235, "learning_rate": 0.00043779725506182905, "loss": 3.3538, "step": 66220 }, { "epoch": 4.499592335915206, "grad_norm": 1.669057011604309, "learning_rate": 0.00043775479005299633, "loss": 3.5399, "step": 66225 }, { "epoch": 4.499932055985868, "grad_norm": 1.1539942026138306, "learning_rate": 0.00043771232504416366, "loss": 3.3702, "step": 66230 }, { "epoch": 4.500271776056529, "grad_norm": 1.3414689302444458, "learning_rate": 0.0004376698600353309, "loss": 3.2682, "step": 66235 }, { "epoch": 4.500611496127191, "grad_norm": 1.4259454011917114, "learning_rate": 0.00043762739502649817, "loss": 3.147, "step": 66240 }, { "epoch": 4.500951216197853, "grad_norm": 1.5548386573791504, "learning_rate": 0.00043758493001766545, "loss": 3.4102, "step": 66245 }, { "epoch": 4.501290936268514, "grad_norm": 1.3012155294418335, "learning_rate": 0.00043754246500883273, "loss": 3.392, "step": 66250 }, { "epoch": 4.501630656339176, "grad_norm": 1.2072272300720215, "learning_rate": 0.0004375, "loss": 3.1091, "step": 66255 }, { "epoch": 4.501970376409838, "grad_norm": 1.0580180883407593, "learning_rate": 0.0004374575349911673, "loss": 3.362, "step": 66260 }, { "epoch": 4.5023100964805, "grad_norm": 1.132215142250061, "learning_rate": 0.00043741506998233457, "loss": 3.2417, "step": 66265 }, { "epoch": 4.502649816551162, "grad_norm": 1.4146593809127808, "learning_rate": 0.00043737260497350185, "loss": 3.2533, "step": 66270 }, { "epoch": 4.502989536621824, "grad_norm": 1.3612900972366333, "learning_rate": 0.00043733013996466913, "loss": 3.476, "step": 66275 }, { "epoch": 4.503329256692485, "grad_norm": 1.5152772665023804, "learning_rate": 0.00043728767495583636, "loss": 3.3961, "step": 66280 }, { "epoch": 4.503668976763147, "grad_norm": 1.245917558670044, "learning_rate": 0.0004372452099470037, "loss": 3.5954, "step": 66285 }, { "epoch": 4.504008696833809, "grad_norm": 1.347381830215454, "learning_rate": 0.00043720274493817097, "loss": 3.5324, "step": 66290 }, { "epoch": 4.50434841690447, "grad_norm": 1.462282419204712, "learning_rate": 0.0004371602799293382, "loss": 3.3765, "step": 66295 }, { "epoch": 4.504688136975132, "grad_norm": 1.1182411909103394, "learning_rate": 0.00043711781492050553, "loss": 3.4385, "step": 66300 }, { "epoch": 4.5050278570457944, "grad_norm": 1.068278431892395, "learning_rate": 0.0004370753499116728, "loss": 3.486, "step": 66305 }, { "epoch": 4.505367577116456, "grad_norm": 1.416312575340271, "learning_rate": 0.00043703288490284004, "loss": 3.1971, "step": 66310 }, { "epoch": 4.505707297187118, "grad_norm": 1.3678183555603027, "learning_rate": 0.0004369904198940073, "loss": 3.333, "step": 66315 }, { "epoch": 4.50604701725778, "grad_norm": 1.3626506328582764, "learning_rate": 0.00043694795488517465, "loss": 3.3866, "step": 66320 }, { "epoch": 4.506386737328441, "grad_norm": 1.2888485193252563, "learning_rate": 0.0004369054898763419, "loss": 3.5757, "step": 66325 }, { "epoch": 4.506726457399103, "grad_norm": 1.3858978748321533, "learning_rate": 0.00043686302486750916, "loss": 3.3436, "step": 66330 }, { "epoch": 4.507066177469765, "grad_norm": 1.1203644275665283, "learning_rate": 0.0004368205598586765, "loss": 3.4947, "step": 66335 }, { "epoch": 4.507405897540426, "grad_norm": 2.0392682552337646, "learning_rate": 0.0004367780948498437, "loss": 2.9223, "step": 66340 }, { "epoch": 4.507745617611088, "grad_norm": 1.318495512008667, "learning_rate": 0.000436735629841011, "loss": 3.6611, "step": 66345 }, { "epoch": 4.5080853376817505, "grad_norm": 1.2729641199111938, "learning_rate": 0.0004366931648321783, "loss": 3.3041, "step": 66350 }, { "epoch": 4.508425057752412, "grad_norm": 1.2456473112106323, "learning_rate": 0.0004366506998233456, "loss": 3.394, "step": 66355 }, { "epoch": 4.508764777823074, "grad_norm": 1.0109889507293701, "learning_rate": 0.00043660823481451284, "loss": 3.5113, "step": 66360 }, { "epoch": 4.509104497893736, "grad_norm": 1.292382001876831, "learning_rate": 0.0004365657698056801, "loss": 3.723, "step": 66365 }, { "epoch": 4.509444217964397, "grad_norm": 1.6722544431686401, "learning_rate": 0.00043652330479684745, "loss": 3.4244, "step": 66370 }, { "epoch": 4.509783938035059, "grad_norm": 1.1697906255722046, "learning_rate": 0.0004364808397880147, "loss": 3.222, "step": 66375 }, { "epoch": 4.510123658105721, "grad_norm": 1.2380136251449585, "learning_rate": 0.00043643837477918196, "loss": 3.4012, "step": 66380 }, { "epoch": 4.510463378176382, "grad_norm": 1.381269931793213, "learning_rate": 0.0004363959097703493, "loss": 3.5848, "step": 66385 }, { "epoch": 4.510803098247044, "grad_norm": 1.37139892578125, "learning_rate": 0.0004363534447615165, "loss": 3.3916, "step": 66390 }, { "epoch": 4.5111428183177065, "grad_norm": 1.139977216720581, "learning_rate": 0.0004363109797526838, "loss": 3.2656, "step": 66395 }, { "epoch": 4.511482538388368, "grad_norm": 1.79293692111969, "learning_rate": 0.0004362685147438511, "loss": 3.2395, "step": 66400 }, { "epoch": 4.51182225845903, "grad_norm": 1.585837721824646, "learning_rate": 0.00043622604973501836, "loss": 3.4461, "step": 66405 }, { "epoch": 4.512161978529692, "grad_norm": 1.7228378057479858, "learning_rate": 0.00043618358472618564, "loss": 3.3599, "step": 66410 }, { "epoch": 4.512501698600353, "grad_norm": 1.167283058166504, "learning_rate": 0.0004361411197173529, "loss": 3.3604, "step": 66415 }, { "epoch": 4.512841418671015, "grad_norm": 1.4128283262252808, "learning_rate": 0.0004360986547085202, "loss": 3.3532, "step": 66420 }, { "epoch": 4.513181138741677, "grad_norm": 1.376621961593628, "learning_rate": 0.0004360561896996875, "loss": 3.2359, "step": 66425 }, { "epoch": 4.513520858812338, "grad_norm": 1.3917226791381836, "learning_rate": 0.00043601372469085476, "loss": 3.2214, "step": 66430 }, { "epoch": 4.513860578883, "grad_norm": 1.2406479120254517, "learning_rate": 0.000435971259682022, "loss": 3.5386, "step": 66435 }, { "epoch": 4.5142002989536625, "grad_norm": 1.5444687604904175, "learning_rate": 0.0004359287946731893, "loss": 3.4791, "step": 66440 }, { "epoch": 4.514540019024324, "grad_norm": 1.2129372358322144, "learning_rate": 0.0004358863296643566, "loss": 3.3053, "step": 66445 }, { "epoch": 4.514879739094986, "grad_norm": 1.4181277751922607, "learning_rate": 0.0004358438646555238, "loss": 3.4986, "step": 66450 }, { "epoch": 4.515219459165648, "grad_norm": 1.315279245376587, "learning_rate": 0.00043580139964669116, "loss": 3.3042, "step": 66455 }, { "epoch": 4.515559179236309, "grad_norm": 1.024491310119629, "learning_rate": 0.00043575893463785844, "loss": 3.3555, "step": 66460 }, { "epoch": 4.515898899306971, "grad_norm": 1.5383292436599731, "learning_rate": 0.00043571646962902566, "loss": 3.4799, "step": 66465 }, { "epoch": 4.516238619377633, "grad_norm": 1.2763065099716187, "learning_rate": 0.00043567400462019294, "loss": 3.415, "step": 66470 }, { "epoch": 4.516578339448294, "grad_norm": 1.3690873384475708, "learning_rate": 0.0004356315396113603, "loss": 3.0825, "step": 66475 }, { "epoch": 4.516918059518956, "grad_norm": 1.281368374824524, "learning_rate": 0.0004355890746025275, "loss": 3.1436, "step": 66480 }, { "epoch": 4.5172577795896185, "grad_norm": 1.214491367340088, "learning_rate": 0.0004355466095936948, "loss": 3.3445, "step": 66485 }, { "epoch": 4.51759749966028, "grad_norm": 1.1752866506576538, "learning_rate": 0.0004355041445848621, "loss": 3.4288, "step": 66490 }, { "epoch": 4.517937219730942, "grad_norm": 1.2723462581634521, "learning_rate": 0.00043546167957602934, "loss": 3.4124, "step": 66495 }, { "epoch": 4.518276939801604, "grad_norm": 1.3025944232940674, "learning_rate": 0.0004354192145671966, "loss": 3.5278, "step": 66500 }, { "epoch": 4.518616659872265, "grad_norm": 1.0669792890548706, "learning_rate": 0.0004353767495583639, "loss": 3.2146, "step": 66505 }, { "epoch": 4.518956379942927, "grad_norm": 1.3953256607055664, "learning_rate": 0.0004353342845495312, "loss": 3.528, "step": 66510 }, { "epoch": 4.519296100013589, "grad_norm": 1.3945294618606567, "learning_rate": 0.00043529181954069846, "loss": 3.4062, "step": 66515 }, { "epoch": 4.51963582008425, "grad_norm": 1.163270354270935, "learning_rate": 0.00043524935453186574, "loss": 3.4275, "step": 66520 }, { "epoch": 4.519975540154912, "grad_norm": 1.10746431350708, "learning_rate": 0.0004352068895230331, "loss": 3.4447, "step": 66525 }, { "epoch": 4.5203152602255745, "grad_norm": 1.075298547744751, "learning_rate": 0.0004351644245142003, "loss": 3.4375, "step": 66530 }, { "epoch": 4.520654980296236, "grad_norm": 1.0999126434326172, "learning_rate": 0.0004351219595053676, "loss": 3.1794, "step": 66535 }, { "epoch": 4.520994700366898, "grad_norm": 2.437659978866577, "learning_rate": 0.00043507949449653487, "loss": 3.5126, "step": 66540 }, { "epoch": 4.52133442043756, "grad_norm": 1.1533176898956299, "learning_rate": 0.00043503702948770215, "loss": 3.3422, "step": 66545 }, { "epoch": 4.521674140508221, "grad_norm": 1.3625106811523438, "learning_rate": 0.0004349945644788694, "loss": 3.3855, "step": 66550 }, { "epoch": 4.522013860578883, "grad_norm": 1.1932308673858643, "learning_rate": 0.0004349520994700367, "loss": 3.3495, "step": 66555 }, { "epoch": 4.522353580649545, "grad_norm": 1.4938688278198242, "learning_rate": 0.000434909634461204, "loss": 3.3089, "step": 66560 }, { "epoch": 4.522693300720206, "grad_norm": 1.2059990167617798, "learning_rate": 0.00043486716945237127, "loss": 3.4225, "step": 66565 }, { "epoch": 4.523033020790868, "grad_norm": 1.122338891029358, "learning_rate": 0.00043482470444353855, "loss": 3.6155, "step": 66570 }, { "epoch": 4.5233727408615305, "grad_norm": 1.5197409391403198, "learning_rate": 0.00043478223943470577, "loss": 3.1418, "step": 66575 }, { "epoch": 4.523712460932192, "grad_norm": 1.106939673423767, "learning_rate": 0.0004347397744258731, "loss": 3.4011, "step": 66580 }, { "epoch": 4.524052181002854, "grad_norm": 1.3026679754257202, "learning_rate": 0.0004346973094170404, "loss": 3.2912, "step": 66585 }, { "epoch": 4.524391901073516, "grad_norm": 1.037402868270874, "learning_rate": 0.0004346548444082076, "loss": 3.5833, "step": 66590 }, { "epoch": 4.524731621144177, "grad_norm": 0.9756584167480469, "learning_rate": 0.00043461237939937495, "loss": 3.3315, "step": 66595 }, { "epoch": 4.525071341214839, "grad_norm": 1.253584384918213, "learning_rate": 0.0004345699143905422, "loss": 3.5957, "step": 66600 }, { "epoch": 4.525411061285501, "grad_norm": 1.6219290494918823, "learning_rate": 0.00043452744938170945, "loss": 3.5157, "step": 66605 }, { "epoch": 4.525750781356162, "grad_norm": 1.3429605960845947, "learning_rate": 0.00043448498437287673, "loss": 3.3653, "step": 66610 }, { "epoch": 4.5260905014268245, "grad_norm": 1.2837388515472412, "learning_rate": 0.00043444251936404407, "loss": 3.3848, "step": 66615 }, { "epoch": 4.5264302214974865, "grad_norm": 1.2395422458648682, "learning_rate": 0.0004344000543552113, "loss": 3.208, "step": 66620 }, { "epoch": 4.526769941568148, "grad_norm": 0.9186977744102478, "learning_rate": 0.00043435758934637857, "loss": 3.1547, "step": 66625 }, { "epoch": 4.52710966163881, "grad_norm": 1.8649065494537354, "learning_rate": 0.0004343151243375459, "loss": 3.2641, "step": 66630 }, { "epoch": 4.527449381709472, "grad_norm": 1.4788283109664917, "learning_rate": 0.00043427265932871313, "loss": 3.3241, "step": 66635 }, { "epoch": 4.527789101780133, "grad_norm": 1.1150208711624146, "learning_rate": 0.0004342301943198804, "loss": 3.2423, "step": 66640 }, { "epoch": 4.528128821850795, "grad_norm": 1.3126847743988037, "learning_rate": 0.0004341877293110477, "loss": 3.4145, "step": 66645 }, { "epoch": 4.528468541921457, "grad_norm": 1.1369072198867798, "learning_rate": 0.00043414526430221497, "loss": 3.394, "step": 66650 }, { "epoch": 4.528808261992118, "grad_norm": 1.7043060064315796, "learning_rate": 0.00043410279929338225, "loss": 3.4751, "step": 66655 }, { "epoch": 4.5291479820627805, "grad_norm": 6.809563159942627, "learning_rate": 0.00043406033428454953, "loss": 3.2887, "step": 66660 }, { "epoch": 4.5294877021334425, "grad_norm": 1.348393440246582, "learning_rate": 0.0004340178692757168, "loss": 3.3608, "step": 66665 }, { "epoch": 4.529827422204104, "grad_norm": 1.2954281568527222, "learning_rate": 0.0004339754042668841, "loss": 3.5009, "step": 66670 }, { "epoch": 4.530167142274766, "grad_norm": 1.3485450744628906, "learning_rate": 0.0004339329392580514, "loss": 3.67, "step": 66675 }, { "epoch": 4.530506862345427, "grad_norm": 1.2878555059432983, "learning_rate": 0.0004338904742492186, "loss": 3.5668, "step": 66680 }, { "epoch": 4.530846582416089, "grad_norm": 1.3591365814208984, "learning_rate": 0.00043384800924038593, "loss": 3.485, "step": 66685 }, { "epoch": 4.531186302486751, "grad_norm": 1.3986878395080566, "learning_rate": 0.0004338055442315532, "loss": 3.3756, "step": 66690 }, { "epoch": 4.531526022557412, "grad_norm": 1.1958180665969849, "learning_rate": 0.0004337630792227205, "loss": 3.392, "step": 66695 }, { "epoch": 4.531865742628074, "grad_norm": 1.085035800933838, "learning_rate": 0.0004337206142138878, "loss": 3.3128, "step": 66700 }, { "epoch": 4.5322054626987365, "grad_norm": 1.8398066759109497, "learning_rate": 0.00043367814920505505, "loss": 3.4859, "step": 66705 }, { "epoch": 4.532545182769398, "grad_norm": 1.2513765096664429, "learning_rate": 0.00043363568419622233, "loss": 3.4177, "step": 66710 }, { "epoch": 4.53288490284006, "grad_norm": 1.3792545795440674, "learning_rate": 0.0004335932191873896, "loss": 3.322, "step": 66715 }, { "epoch": 4.533224622910722, "grad_norm": 1.7527636289596558, "learning_rate": 0.0004335507541785569, "loss": 3.5023, "step": 66720 }, { "epoch": 4.533564342981383, "grad_norm": 1.3636993169784546, "learning_rate": 0.0004335082891697242, "loss": 3.3637, "step": 66725 }, { "epoch": 4.533904063052045, "grad_norm": 1.1737921237945557, "learning_rate": 0.0004334658241608914, "loss": 3.2871, "step": 66730 }, { "epoch": 4.534243783122707, "grad_norm": 1.0852701663970947, "learning_rate": 0.00043342335915205873, "loss": 3.4215, "step": 66735 }, { "epoch": 4.534583503193368, "grad_norm": 1.1495317220687866, "learning_rate": 0.000433380894143226, "loss": 3.3906, "step": 66740 }, { "epoch": 4.53492322326403, "grad_norm": 1.2884387969970703, "learning_rate": 0.00043333842913439324, "loss": 3.5447, "step": 66745 }, { "epoch": 4.5352629433346925, "grad_norm": 1.5792795419692993, "learning_rate": 0.0004332959641255606, "loss": 3.3917, "step": 66750 }, { "epoch": 4.535602663405354, "grad_norm": 1.5489674806594849, "learning_rate": 0.00043325349911672785, "loss": 3.2701, "step": 66755 }, { "epoch": 4.535942383476016, "grad_norm": 1.1288083791732788, "learning_rate": 0.0004332110341078951, "loss": 3.4154, "step": 66760 }, { "epoch": 4.536282103546678, "grad_norm": 1.074362874031067, "learning_rate": 0.00043316856909906236, "loss": 3.2136, "step": 66765 }, { "epoch": 4.536621823617339, "grad_norm": 1.185722827911377, "learning_rate": 0.0004331261040902297, "loss": 3.3068, "step": 66770 }, { "epoch": 4.536961543688001, "grad_norm": 1.3354748487472534, "learning_rate": 0.0004330836390813969, "loss": 3.5224, "step": 66775 }, { "epoch": 4.537301263758663, "grad_norm": 1.375928282737732, "learning_rate": 0.0004330411740725642, "loss": 3.1319, "step": 66780 }, { "epoch": 4.537640983829324, "grad_norm": 1.5698498487472534, "learning_rate": 0.00043299870906373153, "loss": 3.5776, "step": 66785 }, { "epoch": 4.537980703899986, "grad_norm": 1.4279965162277222, "learning_rate": 0.00043295624405489876, "loss": 3.4579, "step": 66790 }, { "epoch": 4.5383204239706485, "grad_norm": 1.5345234870910645, "learning_rate": 0.00043291377904606604, "loss": 3.4556, "step": 66795 }, { "epoch": 4.53866014404131, "grad_norm": 1.681439757347107, "learning_rate": 0.0004328713140372333, "loss": 3.5042, "step": 66800 }, { "epoch": 4.538999864111972, "grad_norm": 1.288156509399414, "learning_rate": 0.0004328288490284006, "loss": 3.5038, "step": 66805 }, { "epoch": 4.539339584182634, "grad_norm": 1.1293562650680542, "learning_rate": 0.0004327863840195679, "loss": 3.8091, "step": 66810 }, { "epoch": 4.539679304253295, "grad_norm": 1.8483216762542725, "learning_rate": 0.00043274391901073516, "loss": 3.622, "step": 66815 }, { "epoch": 4.540019024323957, "grad_norm": 1.1729626655578613, "learning_rate": 0.00043270145400190244, "loss": 3.4741, "step": 66820 }, { "epoch": 4.540358744394619, "grad_norm": 1.1255602836608887, "learning_rate": 0.0004326589889930697, "loss": 3.5362, "step": 66825 }, { "epoch": 4.54069846446528, "grad_norm": 1.0612390041351318, "learning_rate": 0.000432616523984237, "loss": 3.3543, "step": 66830 }, { "epoch": 4.541038184535942, "grad_norm": 1.6433699131011963, "learning_rate": 0.0004325740589754042, "loss": 3.4667, "step": 66835 }, { "epoch": 4.5413779046066045, "grad_norm": 1.2859869003295898, "learning_rate": 0.00043253159396657156, "loss": 3.3566, "step": 66840 }, { "epoch": 4.541717624677266, "grad_norm": 1.8663712739944458, "learning_rate": 0.00043248912895773884, "loss": 3.2565, "step": 66845 }, { "epoch": 4.542057344747928, "grad_norm": 1.372674584388733, "learning_rate": 0.00043244666394890607, "loss": 3.5418, "step": 66850 }, { "epoch": 4.54239706481859, "grad_norm": 1.263771414756775, "learning_rate": 0.0004324041989400734, "loss": 3.3657, "step": 66855 }, { "epoch": 4.542736784889251, "grad_norm": 1.0267239809036255, "learning_rate": 0.0004323617339312407, "loss": 3.4303, "step": 66860 }, { "epoch": 4.543076504959913, "grad_norm": 2.1821274757385254, "learning_rate": 0.00043231926892240796, "loss": 3.4603, "step": 66865 }, { "epoch": 4.543416225030575, "grad_norm": 1.6453986167907715, "learning_rate": 0.0004322768039135752, "loss": 3.3733, "step": 66870 }, { "epoch": 4.543755945101236, "grad_norm": 1.2520709037780762, "learning_rate": 0.0004322343389047425, "loss": 3.468, "step": 66875 }, { "epoch": 4.5440956651718984, "grad_norm": 1.2969334125518799, "learning_rate": 0.0004321918738959098, "loss": 3.5407, "step": 66880 }, { "epoch": 4.54443538524256, "grad_norm": 1.4975364208221436, "learning_rate": 0.00043214940888707703, "loss": 3.3129, "step": 66885 }, { "epoch": 4.544775105313222, "grad_norm": 2.1521947383880615, "learning_rate": 0.00043210694387824436, "loss": 3.2219, "step": 66890 }, { "epoch": 4.545114825383884, "grad_norm": 1.2699077129364014, "learning_rate": 0.00043206447886941164, "loss": 3.264, "step": 66895 }, { "epoch": 4.545454545454545, "grad_norm": 1.3657559156417847, "learning_rate": 0.00043202201386057887, "loss": 3.2437, "step": 66900 }, { "epoch": 4.545794265525207, "grad_norm": 1.3611665964126587, "learning_rate": 0.00043197954885174615, "loss": 3.3088, "step": 66905 }, { "epoch": 4.546133985595869, "grad_norm": 1.4733532667160034, "learning_rate": 0.0004319370838429135, "loss": 3.539, "step": 66910 }, { "epoch": 4.54647370566653, "grad_norm": 1.0689005851745605, "learning_rate": 0.0004318946188340807, "loss": 3.4022, "step": 66915 }, { "epoch": 4.546813425737192, "grad_norm": 1.1042914390563965, "learning_rate": 0.000431852153825248, "loss": 3.3692, "step": 66920 }, { "epoch": 4.5471531458078545, "grad_norm": 1.261460304260254, "learning_rate": 0.0004318096888164153, "loss": 3.5415, "step": 66925 }, { "epoch": 4.547492865878516, "grad_norm": 1.166162371635437, "learning_rate": 0.00043176722380758255, "loss": 3.554, "step": 66930 }, { "epoch": 4.547832585949178, "grad_norm": 1.1752808094024658, "learning_rate": 0.00043172475879874983, "loss": 3.2655, "step": 66935 }, { "epoch": 4.54817230601984, "grad_norm": 1.7545901536941528, "learning_rate": 0.0004316822937899171, "loss": 3.2907, "step": 66940 }, { "epoch": 4.548512026090501, "grad_norm": 1.2409642934799194, "learning_rate": 0.0004316398287810844, "loss": 3.4654, "step": 66945 }, { "epoch": 4.548851746161163, "grad_norm": 1.5255258083343506, "learning_rate": 0.00043159736377225167, "loss": 3.5689, "step": 66950 }, { "epoch": 4.549191466231825, "grad_norm": 1.5680112838745117, "learning_rate": 0.00043155489876341895, "loss": 3.42, "step": 66955 }, { "epoch": 4.549531186302486, "grad_norm": 1.2929433584213257, "learning_rate": 0.00043151243375458623, "loss": 3.4745, "step": 66960 }, { "epoch": 4.549870906373148, "grad_norm": 1.3235926628112793, "learning_rate": 0.0004314699687457535, "loss": 3.2435, "step": 66965 }, { "epoch": 4.5502106264438105, "grad_norm": 1.1779048442840576, "learning_rate": 0.0004314275037369208, "loss": 3.3843, "step": 66970 }, { "epoch": 4.550550346514472, "grad_norm": 1.2179476022720337, "learning_rate": 0.000431385038728088, "loss": 3.2837, "step": 66975 }, { "epoch": 4.550890066585134, "grad_norm": 1.2053143978118896, "learning_rate": 0.00043134257371925535, "loss": 3.28, "step": 66980 }, { "epoch": 4.551229786655796, "grad_norm": 1.2146910429000854, "learning_rate": 0.00043130010871042263, "loss": 3.441, "step": 66985 }, { "epoch": 4.551569506726457, "grad_norm": 1.5026872158050537, "learning_rate": 0.00043125764370158986, "loss": 3.2994, "step": 66990 }, { "epoch": 4.551909226797119, "grad_norm": 1.2609468698501587, "learning_rate": 0.0004312151786927572, "loss": 3.476, "step": 66995 }, { "epoch": 4.552248946867781, "grad_norm": 1.458293080329895, "learning_rate": 0.00043117271368392447, "loss": 3.2211, "step": 67000 }, { "epoch": 4.552588666938442, "grad_norm": 1.168839931488037, "learning_rate": 0.0004311302486750917, "loss": 3.3277, "step": 67005 }, { "epoch": 4.552928387009104, "grad_norm": 1.055415153503418, "learning_rate": 0.00043108778366625903, "loss": 3.317, "step": 67010 }, { "epoch": 4.5532681070797665, "grad_norm": 1.2351831197738647, "learning_rate": 0.0004310453186574263, "loss": 3.3037, "step": 67015 }, { "epoch": 4.553607827150428, "grad_norm": 1.4064040184020996, "learning_rate": 0.00043100285364859354, "loss": 3.2945, "step": 67020 }, { "epoch": 4.55394754722109, "grad_norm": 1.42375910282135, "learning_rate": 0.0004309603886397608, "loss": 3.506, "step": 67025 }, { "epoch": 4.554287267291752, "grad_norm": 1.358492374420166, "learning_rate": 0.00043091792363092815, "loss": 3.4338, "step": 67030 }, { "epoch": 4.554626987362413, "grad_norm": 1.4049084186553955, "learning_rate": 0.00043087545862209543, "loss": 3.2361, "step": 67035 }, { "epoch": 4.554966707433075, "grad_norm": 1.1358164548873901, "learning_rate": 0.00043083299361326266, "loss": 3.4803, "step": 67040 }, { "epoch": 4.555306427503737, "grad_norm": 1.419797420501709, "learning_rate": 0.00043079052860443, "loss": 3.3429, "step": 67045 }, { "epoch": 4.555646147574398, "grad_norm": 1.240077257156372, "learning_rate": 0.00043074806359559727, "loss": 3.2145, "step": 67050 }, { "epoch": 4.55598586764506, "grad_norm": 1.283929705619812, "learning_rate": 0.0004307055985867645, "loss": 3.4989, "step": 67055 }, { "epoch": 4.5563255877157225, "grad_norm": 1.2940301895141602, "learning_rate": 0.0004306631335779318, "loss": 3.4142, "step": 67060 }, { "epoch": 4.556665307786384, "grad_norm": 1.4592633247375488, "learning_rate": 0.0004306206685690991, "loss": 3.5871, "step": 67065 }, { "epoch": 4.557005027857046, "grad_norm": 1.5101051330566406, "learning_rate": 0.00043057820356026634, "loss": 3.3535, "step": 67070 }, { "epoch": 4.557344747927708, "grad_norm": 1.6990846395492554, "learning_rate": 0.0004305357385514336, "loss": 3.6996, "step": 67075 }, { "epoch": 4.557684467998369, "grad_norm": 1.471919298171997, "learning_rate": 0.00043049327354260095, "loss": 3.3296, "step": 67080 }, { "epoch": 4.558024188069031, "grad_norm": 1.162171721458435, "learning_rate": 0.0004304508085337682, "loss": 3.5729, "step": 67085 }, { "epoch": 4.558363908139693, "grad_norm": 1.0569926500320435, "learning_rate": 0.00043040834352493546, "loss": 3.3607, "step": 67090 }, { "epoch": 4.558703628210354, "grad_norm": 1.0586563348770142, "learning_rate": 0.00043036587851610274, "loss": 3.405, "step": 67095 }, { "epoch": 4.559043348281016, "grad_norm": 1.4362351894378662, "learning_rate": 0.00043032341350727, "loss": 3.0628, "step": 67100 }, { "epoch": 4.5593830683516785, "grad_norm": 1.362104058265686, "learning_rate": 0.0004302809484984373, "loss": 3.5393, "step": 67105 }, { "epoch": 4.55972278842234, "grad_norm": 1.3201793432235718, "learning_rate": 0.0004302384834896046, "loss": 3.2816, "step": 67110 }, { "epoch": 4.560062508493002, "grad_norm": 1.1501531600952148, "learning_rate": 0.00043019601848077186, "loss": 3.3098, "step": 67115 }, { "epoch": 4.560402228563664, "grad_norm": 1.4109349250793457, "learning_rate": 0.00043015355347193914, "loss": 3.5216, "step": 67120 }, { "epoch": 4.560741948634325, "grad_norm": 1.4021438360214233, "learning_rate": 0.0004301110884631064, "loss": 3.262, "step": 67125 }, { "epoch": 4.561081668704987, "grad_norm": 1.0898222923278809, "learning_rate": 0.00043006862345427364, "loss": 3.3653, "step": 67130 }, { "epoch": 4.561421388775649, "grad_norm": 6.999413013458252, "learning_rate": 0.000430026158445441, "loss": 3.2164, "step": 67135 }, { "epoch": 4.56176110884631, "grad_norm": 1.0734236240386963, "learning_rate": 0.00042998369343660826, "loss": 3.4421, "step": 67140 }, { "epoch": 4.562100828916972, "grad_norm": 1.2671923637390137, "learning_rate": 0.0004299412284277755, "loss": 3.4184, "step": 67145 }, { "epoch": 4.5624405489876345, "grad_norm": 1.4027090072631836, "learning_rate": 0.0004298987634189428, "loss": 3.4573, "step": 67150 }, { "epoch": 4.562780269058296, "grad_norm": 1.1710959672927856, "learning_rate": 0.0004298562984101101, "loss": 3.4, "step": 67155 }, { "epoch": 4.563119989128958, "grad_norm": 1.433611512184143, "learning_rate": 0.0004298138334012773, "loss": 3.4801, "step": 67160 }, { "epoch": 4.56345970919962, "grad_norm": 1.379614233970642, "learning_rate": 0.0004297713683924446, "loss": 3.4678, "step": 67165 }, { "epoch": 4.563799429270281, "grad_norm": 1.1027411222457886, "learning_rate": 0.00042972890338361194, "loss": 3.522, "step": 67170 }, { "epoch": 4.564139149340943, "grad_norm": 1.2232415676116943, "learning_rate": 0.00042968643837477916, "loss": 3.4242, "step": 67175 }, { "epoch": 4.564478869411605, "grad_norm": 1.5452721118927002, "learning_rate": 0.00042964397336594644, "loss": 3.4343, "step": 67180 }, { "epoch": 4.564818589482266, "grad_norm": 1.7685108184814453, "learning_rate": 0.0004296015083571138, "loss": 3.1064, "step": 67185 }, { "epoch": 4.5651583095529285, "grad_norm": 1.4787286520004272, "learning_rate": 0.000429559043348281, "loss": 3.4306, "step": 67190 }, { "epoch": 4.5654980296235905, "grad_norm": 1.2628142833709717, "learning_rate": 0.0004295165783394483, "loss": 3.2983, "step": 67195 }, { "epoch": 4.565837749694252, "grad_norm": 1.3399722576141357, "learning_rate": 0.00042947411333061556, "loss": 3.4125, "step": 67200 }, { "epoch": 4.566177469764914, "grad_norm": 1.4586849212646484, "learning_rate": 0.0004294316483217829, "loss": 3.455, "step": 67205 }, { "epoch": 4.566517189835576, "grad_norm": 1.330333948135376, "learning_rate": 0.0004293891833129501, "loss": 3.5016, "step": 67210 }, { "epoch": 4.566856909906237, "grad_norm": 1.1651047468185425, "learning_rate": 0.0004293467183041174, "loss": 3.1121, "step": 67215 }, { "epoch": 4.567196629976899, "grad_norm": 1.1726796627044678, "learning_rate": 0.00042930425329528474, "loss": 3.2222, "step": 67220 }, { "epoch": 4.567536350047561, "grad_norm": 1.2955880165100098, "learning_rate": 0.00042926178828645196, "loss": 3.2068, "step": 67225 }, { "epoch": 4.567876070118222, "grad_norm": 1.304879903793335, "learning_rate": 0.00042921932327761924, "loss": 3.2621, "step": 67230 }, { "epoch": 4.5682157901888845, "grad_norm": 1.0319288969039917, "learning_rate": 0.0004291768582687866, "loss": 3.6433, "step": 67235 }, { "epoch": 4.5685555102595465, "grad_norm": 1.4006472826004028, "learning_rate": 0.0004291343932599538, "loss": 3.4539, "step": 67240 }, { "epoch": 4.568895230330208, "grad_norm": 1.1313897371292114, "learning_rate": 0.0004290919282511211, "loss": 3.3692, "step": 67245 }, { "epoch": 4.56923495040087, "grad_norm": 1.4645280838012695, "learning_rate": 0.00042904946324228836, "loss": 3.5846, "step": 67250 }, { "epoch": 4.569574670471532, "grad_norm": 1.2451039552688599, "learning_rate": 0.00042900699823345565, "loss": 3.2891, "step": 67255 }, { "epoch": 4.569914390542193, "grad_norm": 1.3945552110671997, "learning_rate": 0.0004289645332246229, "loss": 3.3239, "step": 67260 }, { "epoch": 4.570254110612855, "grad_norm": 1.857611060142517, "learning_rate": 0.0004289220682157902, "loss": 3.3181, "step": 67265 }, { "epoch": 4.570593830683517, "grad_norm": 1.2709530591964722, "learning_rate": 0.0004288796032069575, "loss": 3.293, "step": 67270 }, { "epoch": 4.570933550754178, "grad_norm": 1.3461885452270508, "learning_rate": 0.00042883713819812477, "loss": 3.3781, "step": 67275 }, { "epoch": 4.5712732708248405, "grad_norm": 1.1301556825637817, "learning_rate": 0.00042879467318929205, "loss": 3.1517, "step": 67280 }, { "epoch": 4.5716129908955025, "grad_norm": 1.3328585624694824, "learning_rate": 0.00042875220818045927, "loss": 3.3575, "step": 67285 }, { "epoch": 4.571952710966164, "grad_norm": 1.440502643585205, "learning_rate": 0.0004287097431716266, "loss": 3.537, "step": 67290 }, { "epoch": 4.572292431036826, "grad_norm": 1.4530595541000366, "learning_rate": 0.0004286672781627939, "loss": 3.2338, "step": 67295 }, { "epoch": 4.572632151107488, "grad_norm": 1.5762966871261597, "learning_rate": 0.0004286248131539611, "loss": 3.4944, "step": 67300 }, { "epoch": 4.572971871178149, "grad_norm": 1.4490418434143066, "learning_rate": 0.00042858234814512845, "loss": 3.2774, "step": 67305 }, { "epoch": 4.573311591248811, "grad_norm": 1.0334405899047852, "learning_rate": 0.0004285398831362957, "loss": 3.6675, "step": 67310 }, { "epoch": 4.573651311319473, "grad_norm": 1.2364827394485474, "learning_rate": 0.00042849741812746295, "loss": 3.43, "step": 67315 }, { "epoch": 4.573991031390134, "grad_norm": 1.232138752937317, "learning_rate": 0.00042845495311863023, "loss": 3.2775, "step": 67320 }, { "epoch": 4.5743307514607965, "grad_norm": 1.630894422531128, "learning_rate": 0.00042841248810979757, "loss": 3.4084, "step": 67325 }, { "epoch": 4.574670471531459, "grad_norm": 0.9937846660614014, "learning_rate": 0.0004283700231009648, "loss": 3.408, "step": 67330 }, { "epoch": 4.57501019160212, "grad_norm": 1.2591758966445923, "learning_rate": 0.00042832755809213207, "loss": 3.4162, "step": 67335 }, { "epoch": 4.575349911672782, "grad_norm": 0.9132426977157593, "learning_rate": 0.0004282850930832994, "loss": 2.9583, "step": 67340 }, { "epoch": 4.575689631743444, "grad_norm": 1.2702497243881226, "learning_rate": 0.00042824262807446663, "loss": 3.4241, "step": 67345 }, { "epoch": 4.576029351814105, "grad_norm": 1.3972028493881226, "learning_rate": 0.0004282001630656339, "loss": 3.4536, "step": 67350 }, { "epoch": 4.576369071884767, "grad_norm": 1.4484232664108276, "learning_rate": 0.0004281576980568012, "loss": 3.4021, "step": 67355 }, { "epoch": 4.576708791955428, "grad_norm": 1.303004503250122, "learning_rate": 0.00042811523304796847, "loss": 3.3997, "step": 67360 }, { "epoch": 4.57704851202609, "grad_norm": 1.05032479763031, "learning_rate": 0.00042807276803913575, "loss": 3.2822, "step": 67365 }, { "epoch": 4.5773882320967525, "grad_norm": 1.6488076448440552, "learning_rate": 0.00042803030303030303, "loss": 3.2471, "step": 67370 }, { "epoch": 4.577727952167414, "grad_norm": 1.2439477443695068, "learning_rate": 0.00042798783802147037, "loss": 3.2579, "step": 67375 }, { "epoch": 4.578067672238076, "grad_norm": 1.3124057054519653, "learning_rate": 0.0004279453730126376, "loss": 3.5442, "step": 67380 }, { "epoch": 4.578407392308738, "grad_norm": 1.2600352764129639, "learning_rate": 0.00042790290800380487, "loss": 3.5613, "step": 67385 }, { "epoch": 4.578747112379399, "grad_norm": 1.4427781105041504, "learning_rate": 0.00042786044299497215, "loss": 3.3521, "step": 67390 }, { "epoch": 4.579086832450061, "grad_norm": 1.0856189727783203, "learning_rate": 0.00042781797798613943, "loss": 3.2343, "step": 67395 }, { "epoch": 4.579426552520723, "grad_norm": 2.1447386741638184, "learning_rate": 0.0004277755129773067, "loss": 3.3924, "step": 67400 }, { "epoch": 4.579766272591384, "grad_norm": 1.2712830305099487, "learning_rate": 0.000427733047968474, "loss": 3.5584, "step": 67405 }, { "epoch": 4.580105992662046, "grad_norm": 1.4155449867248535, "learning_rate": 0.0004276905829596413, "loss": 3.6379, "step": 67410 }, { "epoch": 4.5804457127327085, "grad_norm": 1.2260178327560425, "learning_rate": 0.00042764811795080855, "loss": 3.3639, "step": 67415 }, { "epoch": 4.58078543280337, "grad_norm": 1.3969697952270508, "learning_rate": 0.00042760565294197583, "loss": 3.2463, "step": 67420 }, { "epoch": 4.581125152874032, "grad_norm": 1.0109639167785645, "learning_rate": 0.00042756318793314306, "loss": 3.2871, "step": 67425 }, { "epoch": 4.581464872944694, "grad_norm": 1.2289880514144897, "learning_rate": 0.0004275207229243104, "loss": 3.2787, "step": 67430 }, { "epoch": 4.581804593015355, "grad_norm": 1.5577062368392944, "learning_rate": 0.0004274782579154777, "loss": 3.4996, "step": 67435 }, { "epoch": 4.582144313086017, "grad_norm": 1.7904459238052368, "learning_rate": 0.0004274357929066449, "loss": 3.1777, "step": 67440 }, { "epoch": 4.582484033156679, "grad_norm": 1.1810736656188965, "learning_rate": 0.00042739332789781223, "loss": 3.3687, "step": 67445 }, { "epoch": 4.58282375322734, "grad_norm": 1.5621263980865479, "learning_rate": 0.0004273508628889795, "loss": 3.3502, "step": 67450 }, { "epoch": 4.583163473298002, "grad_norm": 1.1563318967819214, "learning_rate": 0.00042730839788014674, "loss": 3.2251, "step": 67455 }, { "epoch": 4.5835031933686645, "grad_norm": 1.4008671045303345, "learning_rate": 0.000427265932871314, "loss": 3.5512, "step": 67460 }, { "epoch": 4.583842913439326, "grad_norm": 1.2902815341949463, "learning_rate": 0.00042722346786248135, "loss": 3.2413, "step": 67465 }, { "epoch": 4.584182633509988, "grad_norm": 1.0901670455932617, "learning_rate": 0.0004271810028536486, "loss": 3.3643, "step": 67470 }, { "epoch": 4.58452235358065, "grad_norm": 1.574519157409668, "learning_rate": 0.00042713853784481586, "loss": 3.4173, "step": 67475 }, { "epoch": 4.584862073651311, "grad_norm": 1.5427085161209106, "learning_rate": 0.0004270960728359832, "loss": 3.1397, "step": 67480 }, { "epoch": 4.585201793721973, "grad_norm": 1.077244520187378, "learning_rate": 0.0004270536078271504, "loss": 3.2054, "step": 67485 }, { "epoch": 4.585541513792635, "grad_norm": 2.01723575592041, "learning_rate": 0.0004270111428183177, "loss": 3.5799, "step": 67490 }, { "epoch": 4.585881233863296, "grad_norm": 1.405965805053711, "learning_rate": 0.000426968677809485, "loss": 3.343, "step": 67495 }, { "epoch": 4.5862209539339585, "grad_norm": 1.1991499662399292, "learning_rate": 0.00042692621280065226, "loss": 3.5089, "step": 67500 }, { "epoch": 4.5865606740046205, "grad_norm": 1.0082532167434692, "learning_rate": 0.00042688374779181954, "loss": 3.3456, "step": 67505 }, { "epoch": 4.586900394075282, "grad_norm": 1.402783989906311, "learning_rate": 0.0004268412827829868, "loss": 3.3871, "step": 67510 }, { "epoch": 4.587240114145944, "grad_norm": 1.0647666454315186, "learning_rate": 0.0004267988177741541, "loss": 3.4575, "step": 67515 }, { "epoch": 4.587579834216606, "grad_norm": 1.4548488855361938, "learning_rate": 0.0004267563527653214, "loss": 3.5396, "step": 67520 }, { "epoch": 4.587919554287267, "grad_norm": 1.3212416172027588, "learning_rate": 0.00042671388775648866, "loss": 3.5285, "step": 67525 }, { "epoch": 4.588259274357929, "grad_norm": 1.5070897340774536, "learning_rate": 0.0004266714227476559, "loss": 3.4623, "step": 67530 }, { "epoch": 4.588598994428591, "grad_norm": 1.195738434791565, "learning_rate": 0.0004266289577388232, "loss": 3.2865, "step": 67535 }, { "epoch": 4.588938714499252, "grad_norm": 1.493873119354248, "learning_rate": 0.0004265864927299905, "loss": 3.3544, "step": 67540 }, { "epoch": 4.5892784345699145, "grad_norm": 1.249869465827942, "learning_rate": 0.0004265440277211578, "loss": 3.4936, "step": 67545 }, { "epoch": 4.5896181546405765, "grad_norm": 1.5505404472351074, "learning_rate": 0.00042650156271232506, "loss": 3.2943, "step": 67550 }, { "epoch": 4.589957874711238, "grad_norm": 1.540766954421997, "learning_rate": 0.00042645909770349234, "loss": 3.3959, "step": 67555 }, { "epoch": 4.5902975947819, "grad_norm": 1.1609197854995728, "learning_rate": 0.0004264166326946596, "loss": 3.5997, "step": 67560 }, { "epoch": 4.590637314852561, "grad_norm": 1.3650147914886475, "learning_rate": 0.0004263741676858269, "loss": 3.1578, "step": 67565 }, { "epoch": 4.590977034923223, "grad_norm": 1.352498173713684, "learning_rate": 0.0004263317026769942, "loss": 3.2407, "step": 67570 }, { "epoch": 4.591316754993885, "grad_norm": 1.1492995023727417, "learning_rate": 0.00042628923766816146, "loss": 3.4676, "step": 67575 }, { "epoch": 4.591656475064546, "grad_norm": 1.3004448413848877, "learning_rate": 0.0004262467726593287, "loss": 3.3397, "step": 67580 }, { "epoch": 4.591996195135208, "grad_norm": 1.0763959884643555, "learning_rate": 0.000426204307650496, "loss": 3.4073, "step": 67585 }, { "epoch": 4.5923359152058705, "grad_norm": 1.161041498184204, "learning_rate": 0.0004261618426416633, "loss": 3.3435, "step": 67590 }, { "epoch": 4.592675635276532, "grad_norm": 1.3795573711395264, "learning_rate": 0.00042611937763283053, "loss": 3.0695, "step": 67595 }, { "epoch": 4.593015355347194, "grad_norm": 1.6350207328796387, "learning_rate": 0.00042607691262399786, "loss": 3.2542, "step": 67600 }, { "epoch": 4.593355075417856, "grad_norm": 1.5242068767547607, "learning_rate": 0.00042603444761516514, "loss": 3.4154, "step": 67605 }, { "epoch": 4.593694795488517, "grad_norm": 1.1068241596221924, "learning_rate": 0.00042599198260633237, "loss": 3.3398, "step": 67610 }, { "epoch": 4.594034515559179, "grad_norm": 1.340301752090454, "learning_rate": 0.00042594951759749965, "loss": 3.3491, "step": 67615 }, { "epoch": 4.594374235629841, "grad_norm": 1.4942848682403564, "learning_rate": 0.000425907052588667, "loss": 3.3527, "step": 67620 }, { "epoch": 4.594713955700502, "grad_norm": 1.4965431690216064, "learning_rate": 0.0004258645875798342, "loss": 3.3172, "step": 67625 }, { "epoch": 4.595053675771164, "grad_norm": 1.1140707731246948, "learning_rate": 0.0004258221225710015, "loss": 3.4623, "step": 67630 }, { "epoch": 4.5953933958418265, "grad_norm": 1.0527656078338623, "learning_rate": 0.0004257796575621688, "loss": 3.2525, "step": 67635 }, { "epoch": 4.595733115912488, "grad_norm": 1.3943392038345337, "learning_rate": 0.00042573719255333605, "loss": 3.1174, "step": 67640 }, { "epoch": 4.59607283598315, "grad_norm": 1.3869236707687378, "learning_rate": 0.00042569472754450333, "loss": 3.6926, "step": 67645 }, { "epoch": 4.596412556053812, "grad_norm": 1.4597874879837036, "learning_rate": 0.0004256522625356706, "loss": 3.498, "step": 67650 }, { "epoch": 4.596752276124473, "grad_norm": 1.2781296968460083, "learning_rate": 0.0004256097975268379, "loss": 3.6292, "step": 67655 }, { "epoch": 4.597091996195135, "grad_norm": 1.511461853981018, "learning_rate": 0.00042556733251800517, "loss": 3.2311, "step": 67660 }, { "epoch": 4.597431716265797, "grad_norm": 1.2464343309402466, "learning_rate": 0.00042552486750917245, "loss": 3.309, "step": 67665 }, { "epoch": 4.597771436336458, "grad_norm": 1.0367358922958374, "learning_rate": 0.00042548240250033973, "loss": 3.5692, "step": 67670 }, { "epoch": 4.59811115640712, "grad_norm": 1.6437300443649292, "learning_rate": 0.000425439937491507, "loss": 3.3837, "step": 67675 }, { "epoch": 4.5984508764777825, "grad_norm": 1.177050232887268, "learning_rate": 0.0004253974724826743, "loss": 3.2494, "step": 67680 }, { "epoch": 4.598790596548444, "grad_norm": 1.5622187852859497, "learning_rate": 0.0004253550074738415, "loss": 3.155, "step": 67685 }, { "epoch": 4.599130316619106, "grad_norm": 1.3561221361160278, "learning_rate": 0.00042531254246500885, "loss": 3.3594, "step": 67690 }, { "epoch": 4.599470036689768, "grad_norm": 1.2153651714324951, "learning_rate": 0.00042527007745617613, "loss": 3.172, "step": 67695 }, { "epoch": 4.599809756760429, "grad_norm": 1.1823750734329224, "learning_rate": 0.00042522761244734336, "loss": 3.4911, "step": 67700 }, { "epoch": 4.600149476831091, "grad_norm": 1.429288387298584, "learning_rate": 0.0004251851474385107, "loss": 3.6249, "step": 67705 }, { "epoch": 4.600489196901753, "grad_norm": 1.476209044456482, "learning_rate": 0.00042514268242967797, "loss": 3.1577, "step": 67710 }, { "epoch": 4.600828916972414, "grad_norm": 1.4184880256652832, "learning_rate": 0.00042510021742084525, "loss": 3.1058, "step": 67715 }, { "epoch": 4.601168637043076, "grad_norm": 1.4260839223861694, "learning_rate": 0.0004250577524120125, "loss": 3.3534, "step": 67720 }, { "epoch": 4.6015083571137385, "grad_norm": 1.093066692352295, "learning_rate": 0.0004250152874031798, "loss": 3.2818, "step": 67725 }, { "epoch": 4.6018480771844, "grad_norm": 1.3363314867019653, "learning_rate": 0.0004249728223943471, "loss": 3.3016, "step": 67730 }, { "epoch": 4.602187797255062, "grad_norm": 1.1112589836120605, "learning_rate": 0.0004249303573855143, "loss": 3.3392, "step": 67735 }, { "epoch": 4.602527517325724, "grad_norm": 1.3744823932647705, "learning_rate": 0.00042488789237668165, "loss": 3.4862, "step": 67740 }, { "epoch": 4.602867237396385, "grad_norm": 1.1800525188446045, "learning_rate": 0.00042484542736784893, "loss": 3.3367, "step": 67745 }, { "epoch": 4.603206957467047, "grad_norm": 1.6330852508544922, "learning_rate": 0.00042480296235901616, "loss": 3.4715, "step": 67750 }, { "epoch": 4.603546677537709, "grad_norm": 1.5370501279830933, "learning_rate": 0.00042476049735018344, "loss": 3.188, "step": 67755 }, { "epoch": 4.60388639760837, "grad_norm": 1.4305609464645386, "learning_rate": 0.00042471803234135077, "loss": 3.5224, "step": 67760 }, { "epoch": 4.6042261176790324, "grad_norm": 1.3411091566085815, "learning_rate": 0.000424675567332518, "loss": 3.3807, "step": 67765 }, { "epoch": 4.6045658377496945, "grad_norm": 1.5728416442871094, "learning_rate": 0.0004246331023236853, "loss": 3.6143, "step": 67770 }, { "epoch": 4.604905557820356, "grad_norm": 1.3696120977401733, "learning_rate": 0.0004245906373148526, "loss": 3.4771, "step": 67775 }, { "epoch": 4.605245277891018, "grad_norm": 1.295021414756775, "learning_rate": 0.00042454817230601984, "loss": 3.4818, "step": 67780 }, { "epoch": 4.60558499796168, "grad_norm": 1.4273298978805542, "learning_rate": 0.0004245057072971871, "loss": 3.5333, "step": 67785 }, { "epoch": 4.605924718032341, "grad_norm": 1.355271577835083, "learning_rate": 0.0004244632422883544, "loss": 3.3244, "step": 67790 }, { "epoch": 4.606264438103003, "grad_norm": 0.8653645515441895, "learning_rate": 0.0004244207772795217, "loss": 3.4317, "step": 67795 }, { "epoch": 4.606604158173665, "grad_norm": 1.4311107397079468, "learning_rate": 0.00042437831227068896, "loss": 3.2672, "step": 67800 }, { "epoch": 4.606943878244326, "grad_norm": 1.1833993196487427, "learning_rate": 0.00042433584726185624, "loss": 3.4343, "step": 67805 }, { "epoch": 4.6072835983149885, "grad_norm": 1.2132259607315063, "learning_rate": 0.0004242933822530235, "loss": 3.2362, "step": 67810 }, { "epoch": 4.6076233183856505, "grad_norm": 1.10452401638031, "learning_rate": 0.0004242509172441908, "loss": 3.4557, "step": 67815 }, { "epoch": 4.607963038456312, "grad_norm": 0.9758850932121277, "learning_rate": 0.0004242084522353581, "loss": 3.0109, "step": 67820 }, { "epoch": 4.608302758526974, "grad_norm": 1.3429759740829468, "learning_rate": 0.0004241659872265253, "loss": 3.5173, "step": 67825 }, { "epoch": 4.608642478597636, "grad_norm": 1.2951148748397827, "learning_rate": 0.00042412352221769264, "loss": 3.7277, "step": 67830 }, { "epoch": 4.608982198668297, "grad_norm": 1.2952412366867065, "learning_rate": 0.0004240810572088599, "loss": 3.3669, "step": 67835 }, { "epoch": 4.609321918738959, "grad_norm": 1.255670428276062, "learning_rate": 0.00042403859220002714, "loss": 3.1777, "step": 67840 }, { "epoch": 4.609661638809621, "grad_norm": 1.5292384624481201, "learning_rate": 0.0004239961271911945, "loss": 3.3211, "step": 67845 }, { "epoch": 4.610001358880282, "grad_norm": 1.3951984643936157, "learning_rate": 0.00042395366218236176, "loss": 3.5081, "step": 67850 }, { "epoch": 4.6103410789509445, "grad_norm": 1.3968998193740845, "learning_rate": 0.000423911197173529, "loss": 3.3579, "step": 67855 }, { "epoch": 4.6106807990216065, "grad_norm": 1.3858577013015747, "learning_rate": 0.0004238687321646963, "loss": 3.3458, "step": 67860 }, { "epoch": 4.611020519092268, "grad_norm": 1.3152986764907837, "learning_rate": 0.0004238262671558636, "loss": 3.2866, "step": 67865 }, { "epoch": 4.61136023916293, "grad_norm": 1.4210516214370728, "learning_rate": 0.0004237838021470308, "loss": 3.067, "step": 67870 }, { "epoch": 4.611699959233592, "grad_norm": 1.3991751670837402, "learning_rate": 0.0004237413371381981, "loss": 3.36, "step": 67875 }, { "epoch": 4.612039679304253, "grad_norm": 1.2586115598678589, "learning_rate": 0.00042369887212936544, "loss": 3.5336, "step": 67880 }, { "epoch": 4.612379399374915, "grad_norm": 0.9603835344314575, "learning_rate": 0.0004236564071205327, "loss": 3.3116, "step": 67885 }, { "epoch": 4.612719119445577, "grad_norm": 1.2203577756881714, "learning_rate": 0.00042361394211169994, "loss": 3.5573, "step": 67890 }, { "epoch": 4.613058839516238, "grad_norm": 1.5790786743164062, "learning_rate": 0.0004235714771028673, "loss": 3.2238, "step": 67895 }, { "epoch": 4.6133985595869005, "grad_norm": 1.568189024925232, "learning_rate": 0.00042352901209403456, "loss": 3.5454, "step": 67900 }, { "epoch": 4.613738279657563, "grad_norm": 0.9863575100898743, "learning_rate": 0.0004234865470852018, "loss": 3.4254, "step": 67905 }, { "epoch": 4.614077999728224, "grad_norm": 1.5108497142791748, "learning_rate": 0.00042344408207636906, "loss": 3.1768, "step": 67910 }, { "epoch": 4.614417719798886, "grad_norm": 1.457615852355957, "learning_rate": 0.0004234016170675364, "loss": 3.4324, "step": 67915 }, { "epoch": 4.614757439869548, "grad_norm": 1.172885775566101, "learning_rate": 0.0004233591520587036, "loss": 3.3877, "step": 67920 }, { "epoch": 4.615097159940209, "grad_norm": 1.2921262979507446, "learning_rate": 0.0004233166870498709, "loss": 3.69, "step": 67925 }, { "epoch": 4.615436880010871, "grad_norm": 1.1305201053619385, "learning_rate": 0.00042327422204103824, "loss": 3.2381, "step": 67930 }, { "epoch": 4.615776600081533, "grad_norm": 1.217028260231018, "learning_rate": 0.00042323175703220546, "loss": 3.2657, "step": 67935 }, { "epoch": 4.616116320152194, "grad_norm": 1.1457791328430176, "learning_rate": 0.00042318929202337274, "loss": 3.3405, "step": 67940 }, { "epoch": 4.6164560402228565, "grad_norm": 1.2020585536956787, "learning_rate": 0.00042314682701454, "loss": 3.5535, "step": 67945 }, { "epoch": 4.616795760293519, "grad_norm": 1.1184768676757812, "learning_rate": 0.0004231043620057073, "loss": 3.4705, "step": 67950 }, { "epoch": 4.61713548036418, "grad_norm": 1.2382675409317017, "learning_rate": 0.0004230618969968746, "loss": 3.5463, "step": 67955 }, { "epoch": 4.617475200434842, "grad_norm": 1.5053988695144653, "learning_rate": 0.00042301943198804186, "loss": 3.2243, "step": 67960 }, { "epoch": 4.617814920505504, "grad_norm": 1.162697672843933, "learning_rate": 0.00042297696697920914, "loss": 3.2323, "step": 67965 }, { "epoch": 4.618154640576165, "grad_norm": NaN, "learning_rate": 0.00042294299497214297, "loss": 3.3353, "step": 67970 }, { "epoch": 4.618494360646827, "grad_norm": 1.4680836200714111, "learning_rate": 0.00042290052996331025, "loss": 3.4969, "step": 67975 }, { "epoch": 4.618834080717489, "grad_norm": 1.1611990928649902, "learning_rate": 0.00042285806495447753, "loss": 3.1956, "step": 67980 }, { "epoch": 4.61917380078815, "grad_norm": 1.2597882747650146, "learning_rate": 0.00042281559994564476, "loss": 3.3534, "step": 67985 }, { "epoch": 4.6195135208588125, "grad_norm": 1.3250800371170044, "learning_rate": 0.0004227731349368121, "loss": 3.4446, "step": 67990 }, { "epoch": 4.619853240929475, "grad_norm": 1.4759095907211304, "learning_rate": 0.00042273066992797937, "loss": 3.5028, "step": 67995 }, { "epoch": 4.620192961000136, "grad_norm": 1.4364172220230103, "learning_rate": 0.0004226882049191466, "loss": 3.2313, "step": 68000 }, { "epoch": 4.620532681070798, "grad_norm": 1.3487545251846313, "learning_rate": 0.00042264573991031393, "loss": 3.5806, "step": 68005 }, { "epoch": 4.62087240114146, "grad_norm": 1.1797293424606323, "learning_rate": 0.0004226032749014812, "loss": 3.3622, "step": 68010 }, { "epoch": 4.621212121212121, "grad_norm": 1.1183031797409058, "learning_rate": 0.00042256080989264844, "loss": 3.2279, "step": 68015 }, { "epoch": 4.621551841282783, "grad_norm": 1.3830100297927856, "learning_rate": 0.0004225183448838157, "loss": 3.3091, "step": 68020 }, { "epoch": 4.621891561353445, "grad_norm": 1.2398651838302612, "learning_rate": 0.00042247587987498305, "loss": 3.1958, "step": 68025 }, { "epoch": 4.622231281424106, "grad_norm": 1.3191308975219727, "learning_rate": 0.0004224334148661503, "loss": 3.2607, "step": 68030 }, { "epoch": 4.6225710014947685, "grad_norm": 1.3917973041534424, "learning_rate": 0.00042239094985731756, "loss": 3.3206, "step": 68035 }, { "epoch": 4.62291072156543, "grad_norm": 1.4813865423202515, "learning_rate": 0.0004223484848484849, "loss": 3.4266, "step": 68040 }, { "epoch": 4.623250441636092, "grad_norm": 1.4455782175064087, "learning_rate": 0.0004223060198396521, "loss": 3.1662, "step": 68045 }, { "epoch": 4.623590161706754, "grad_norm": 1.3071297407150269, "learning_rate": 0.0004222635548308194, "loss": 3.5306, "step": 68050 }, { "epoch": 4.623929881777415, "grad_norm": 1.2455047369003296, "learning_rate": 0.00042222108982198673, "loss": 3.2151, "step": 68055 }, { "epoch": 4.624269601848077, "grad_norm": 1.2805869579315186, "learning_rate": 0.00042217862481315396, "loss": 3.5136, "step": 68060 }, { "epoch": 4.624609321918739, "grad_norm": 1.2994681596755981, "learning_rate": 0.00042213615980432124, "loss": 3.5868, "step": 68065 }, { "epoch": 4.6249490419894, "grad_norm": 1.284737229347229, "learning_rate": 0.0004220936947954885, "loss": 3.1393, "step": 68070 }, { "epoch": 4.6252887620600625, "grad_norm": 1.0913960933685303, "learning_rate": 0.0004220512297866558, "loss": 3.4275, "step": 68075 }, { "epoch": 4.6256284821307245, "grad_norm": 1.42449951171875, "learning_rate": 0.0004220087647778231, "loss": 3.2622, "step": 68080 }, { "epoch": 4.625968202201386, "grad_norm": 1.131047010421753, "learning_rate": 0.00042196629976899036, "loss": 3.3359, "step": 68085 }, { "epoch": 4.626307922272048, "grad_norm": 1.2682921886444092, "learning_rate": 0.0004219238347601577, "loss": 3.3013, "step": 68090 }, { "epoch": 4.62664764234271, "grad_norm": 1.1953707933425903, "learning_rate": 0.0004218813697513249, "loss": 3.5168, "step": 68095 }, { "epoch": 4.626987362413371, "grad_norm": 1.5360864400863647, "learning_rate": 0.0004218389047424922, "loss": 3.5584, "step": 68100 }, { "epoch": 4.627327082484033, "grad_norm": 1.3328548669815063, "learning_rate": 0.0004217964397336595, "loss": 3.2918, "step": 68105 }, { "epoch": 4.627666802554695, "grad_norm": 1.1568902730941772, "learning_rate": 0.00042175397472482676, "loss": 3.1915, "step": 68110 }, { "epoch": 4.628006522625356, "grad_norm": 1.1853952407836914, "learning_rate": 0.00042171150971599404, "loss": 3.4834, "step": 68115 }, { "epoch": 4.6283462426960185, "grad_norm": 1.284461498260498, "learning_rate": 0.0004216690447071613, "loss": 3.3716, "step": 68120 }, { "epoch": 4.6286859627666805, "grad_norm": 1.7202433347702026, "learning_rate": 0.0004216265796983286, "loss": 3.5278, "step": 68125 }, { "epoch": 4.629025682837342, "grad_norm": 1.138560175895691, "learning_rate": 0.0004215841146894959, "loss": 3.572, "step": 68130 }, { "epoch": 4.629365402908004, "grad_norm": 1.0360337495803833, "learning_rate": 0.00042154164968066316, "loss": 3.403, "step": 68135 }, { "epoch": 4.629705122978666, "grad_norm": 1.1495027542114258, "learning_rate": 0.0004214991846718304, "loss": 3.3064, "step": 68140 }, { "epoch": 4.630044843049327, "grad_norm": 1.0409923791885376, "learning_rate": 0.0004214567196629977, "loss": 3.2589, "step": 68145 }, { "epoch": 4.630384563119989, "grad_norm": 1.664011836051941, "learning_rate": 0.000421414254654165, "loss": 3.4385, "step": 68150 }, { "epoch": 4.630724283190651, "grad_norm": 1.2562161684036255, "learning_rate": 0.0004213717896453322, "loss": 3.2879, "step": 68155 }, { "epoch": 4.631064003261312, "grad_norm": 1.2372479438781738, "learning_rate": 0.00042132932463649956, "loss": 3.4419, "step": 68160 }, { "epoch": 4.6314037233319745, "grad_norm": 1.5012402534484863, "learning_rate": 0.00042128685962766684, "loss": 3.399, "step": 68165 }, { "epoch": 4.6317434434026366, "grad_norm": 1.1589722633361816, "learning_rate": 0.00042124439461883406, "loss": 3.3862, "step": 68170 }, { "epoch": 4.632083163473298, "grad_norm": 1.3083301782608032, "learning_rate": 0.00042120192961000134, "loss": 3.3775, "step": 68175 }, { "epoch": 4.63242288354396, "grad_norm": 1.0809690952301025, "learning_rate": 0.0004211594646011687, "loss": 3.355, "step": 68180 }, { "epoch": 4.632762603614622, "grad_norm": 1.3686374425888062, "learning_rate": 0.0004211169995923359, "loss": 3.3846, "step": 68185 }, { "epoch": 4.633102323685283, "grad_norm": 1.1865288019180298, "learning_rate": 0.0004210745345835032, "loss": 3.4064, "step": 68190 }, { "epoch": 4.633442043755945, "grad_norm": 1.180705189704895, "learning_rate": 0.0004210320695746705, "loss": 3.6057, "step": 68195 }, { "epoch": 4.633781763826607, "grad_norm": 1.1570978164672852, "learning_rate": 0.00042098960456583774, "loss": 3.2761, "step": 68200 }, { "epoch": 4.634121483897268, "grad_norm": 1.3547741174697876, "learning_rate": 0.000420947139557005, "loss": 3.4873, "step": 68205 }, { "epoch": 4.6344612039679305, "grad_norm": 1.3098580837249756, "learning_rate": 0.0004209046745481723, "loss": 3.1029, "step": 68210 }, { "epoch": 4.634800924038593, "grad_norm": 1.1643861532211304, "learning_rate": 0.0004208622095393396, "loss": 3.8123, "step": 68215 }, { "epoch": 4.635140644109254, "grad_norm": 1.2948821783065796, "learning_rate": 0.00042081974453050686, "loss": 3.3448, "step": 68220 }, { "epoch": 4.635480364179916, "grad_norm": 1.0666470527648926, "learning_rate": 0.00042077727952167414, "loss": 3.5156, "step": 68225 }, { "epoch": 4.635820084250578, "grad_norm": 1.223716139793396, "learning_rate": 0.0004207348145128414, "loss": 3.2091, "step": 68230 }, { "epoch": 4.636159804321239, "grad_norm": 1.4181345701217651, "learning_rate": 0.0004206923495040087, "loss": 3.3131, "step": 68235 }, { "epoch": 4.636499524391901, "grad_norm": 1.2632840871810913, "learning_rate": 0.000420649884495176, "loss": 3.4714, "step": 68240 }, { "epoch": 4.636839244462563, "grad_norm": 1.2832276821136475, "learning_rate": 0.0004206074194863432, "loss": 3.47, "step": 68245 }, { "epoch": 4.637178964533224, "grad_norm": 2.122722625732422, "learning_rate": 0.00042056495447751054, "loss": 3.4128, "step": 68250 }, { "epoch": 4.6375186846038865, "grad_norm": 1.2061654329299927, "learning_rate": 0.0004205224894686778, "loss": 3.2874, "step": 68255 }, { "epoch": 4.637858404674548, "grad_norm": 1.0952832698822021, "learning_rate": 0.0004204800244598451, "loss": 3.4589, "step": 68260 }, { "epoch": 4.63819812474521, "grad_norm": 1.2918223142623901, "learning_rate": 0.0004204375594510124, "loss": 3.3011, "step": 68265 }, { "epoch": 4.638537844815872, "grad_norm": 1.2543445825576782, "learning_rate": 0.00042039509444217967, "loss": 3.1463, "step": 68270 }, { "epoch": 4.638877564886533, "grad_norm": 1.0747509002685547, "learning_rate": 0.00042035262943334695, "loss": 3.5441, "step": 68275 }, { "epoch": 4.639217284957195, "grad_norm": 1.4276514053344727, "learning_rate": 0.0004203186574262807, "loss": 3.5938, "step": 68280 }, { "epoch": 4.639557005027857, "grad_norm": 1.4347045421600342, "learning_rate": 0.000420276192417448, "loss": 3.1764, "step": 68285 }, { "epoch": 4.639896725098518, "grad_norm": 1.1782398223876953, "learning_rate": 0.00042023372740861533, "loss": 3.2561, "step": 68290 }, { "epoch": 4.64023644516918, "grad_norm": 1.5163309574127197, "learning_rate": 0.0004201912623997826, "loss": 3.5871, "step": 68295 }, { "epoch": 4.6405761652398425, "grad_norm": 1.3441983461380005, "learning_rate": 0.00042014879739094984, "loss": 3.1417, "step": 68300 }, { "epoch": 4.640915885310504, "grad_norm": 1.1645939350128174, "learning_rate": 0.00042010633238211717, "loss": 3.0747, "step": 68305 }, { "epoch": 4.641255605381166, "grad_norm": 1.2381670475006104, "learning_rate": 0.00042006386737328445, "loss": 3.3353, "step": 68310 }, { "epoch": 4.641595325451828, "grad_norm": 1.427199363708496, "learning_rate": 0.0004200214023644517, "loss": 3.3885, "step": 68315 }, { "epoch": 4.641935045522489, "grad_norm": 1.1721560955047607, "learning_rate": 0.000419978937355619, "loss": 3.4307, "step": 68320 }, { "epoch": 4.642274765593151, "grad_norm": 2.108952045440674, "learning_rate": 0.0004199364723467863, "loss": 3.2793, "step": 68325 }, { "epoch": 4.642614485663813, "grad_norm": 1.8271387815475464, "learning_rate": 0.0004198940073379535, "loss": 3.3979, "step": 68330 }, { "epoch": 4.642954205734474, "grad_norm": 1.0385849475860596, "learning_rate": 0.0004198515423291208, "loss": 3.3321, "step": 68335 }, { "epoch": 4.6432939258051364, "grad_norm": 1.17824387550354, "learning_rate": 0.00041980907732028813, "loss": 3.3851, "step": 68340 }, { "epoch": 4.6436336458757985, "grad_norm": 2.116868019104004, "learning_rate": 0.00041976661231145536, "loss": 3.0386, "step": 68345 }, { "epoch": 4.64397336594646, "grad_norm": 1.3731937408447266, "learning_rate": 0.00041972414730262264, "loss": 3.4223, "step": 68350 }, { "epoch": 4.644313086017122, "grad_norm": 1.303887963294983, "learning_rate": 0.00041968168229378997, "loss": 3.4408, "step": 68355 }, { "epoch": 4.644652806087784, "grad_norm": 1.3985164165496826, "learning_rate": 0.0004196392172849572, "loss": 3.3427, "step": 68360 }, { "epoch": 4.644992526158445, "grad_norm": 1.6956791877746582, "learning_rate": 0.0004195967522761245, "loss": 3.0354, "step": 68365 }, { "epoch": 4.645332246229107, "grad_norm": 1.4903699159622192, "learning_rate": 0.00041955428726729176, "loss": 3.5521, "step": 68370 }, { "epoch": 4.645671966299769, "grad_norm": 1.3185275793075562, "learning_rate": 0.00041951182225845904, "loss": 3.5078, "step": 68375 }, { "epoch": 4.64601168637043, "grad_norm": 1.328587293624878, "learning_rate": 0.0004194693572496263, "loss": 3.2565, "step": 68380 }, { "epoch": 4.6463514064410925, "grad_norm": 1.1014965772628784, "learning_rate": 0.0004194268922407936, "loss": 3.3904, "step": 68385 }, { "epoch": 4.6466911265117545, "grad_norm": 1.2965971231460571, "learning_rate": 0.0004193844272319609, "loss": 3.2806, "step": 68390 }, { "epoch": 4.647030846582416, "grad_norm": 1.739089846611023, "learning_rate": 0.00041934196222312816, "loss": 3.4736, "step": 68395 }, { "epoch": 4.647370566653078, "grad_norm": 1.2776628732681274, "learning_rate": 0.00041929949721429544, "loss": 3.5307, "step": 68400 }, { "epoch": 4.64771028672374, "grad_norm": 1.6599762439727783, "learning_rate": 0.00041925703220546266, "loss": 3.4293, "step": 68405 }, { "epoch": 4.648050006794401, "grad_norm": 1.3349413871765137, "learning_rate": 0.00041921456719663, "loss": 3.368, "step": 68410 }, { "epoch": 4.648389726865063, "grad_norm": 1.5935142040252686, "learning_rate": 0.0004191721021877973, "loss": 3.3936, "step": 68415 }, { "epoch": 4.648729446935725, "grad_norm": 1.2064251899719238, "learning_rate": 0.0004191296371789645, "loss": 3.3176, "step": 68420 }, { "epoch": 4.649069167006386, "grad_norm": 1.2221242189407349, "learning_rate": 0.00041908717217013184, "loss": 3.282, "step": 68425 }, { "epoch": 4.6494088870770485, "grad_norm": 1.362034559249878, "learning_rate": 0.0004190447071612991, "loss": 3.3403, "step": 68430 }, { "epoch": 4.6497486071477105, "grad_norm": 1.4551408290863037, "learning_rate": 0.00041900224215246634, "loss": 3.4055, "step": 68435 }, { "epoch": 4.650088327218372, "grad_norm": 1.2771438360214233, "learning_rate": 0.0004189597771436336, "loss": 3.7681, "step": 68440 }, { "epoch": 4.650428047289034, "grad_norm": 1.166424036026001, "learning_rate": 0.00041891731213480096, "loss": 3.507, "step": 68445 }, { "epoch": 4.650767767359696, "grad_norm": 1.6945436000823975, "learning_rate": 0.0004188748471259682, "loss": 3.4367, "step": 68450 }, { "epoch": 4.651107487430357, "grad_norm": 1.0840407609939575, "learning_rate": 0.00041883238211713546, "loss": 3.3799, "step": 68455 }, { "epoch": 4.651447207501019, "grad_norm": 1.4301772117614746, "learning_rate": 0.0004187899171083028, "loss": 3.7017, "step": 68460 }, { "epoch": 4.651786927571681, "grad_norm": 1.0497372150421143, "learning_rate": 0.0004187474520994701, "loss": 3.2721, "step": 68465 }, { "epoch": 4.652126647642342, "grad_norm": 1.289397120475769, "learning_rate": 0.0004187049870906373, "loss": 3.4398, "step": 68470 }, { "epoch": 4.6524663677130045, "grad_norm": 1.4661353826522827, "learning_rate": 0.0004186625220818046, "loss": 3.2104, "step": 68475 }, { "epoch": 4.6528060877836666, "grad_norm": 1.2458369731903076, "learning_rate": 0.0004186200570729719, "loss": 3.1048, "step": 68480 }, { "epoch": 4.653145807854328, "grad_norm": 1.547571063041687, "learning_rate": 0.00041857759206413914, "loss": 3.5004, "step": 68485 }, { "epoch": 4.65348552792499, "grad_norm": 1.4596545696258545, "learning_rate": 0.0004185351270553064, "loss": 3.4613, "step": 68490 }, { "epoch": 4.653825247995652, "grad_norm": 1.1532047986984253, "learning_rate": 0.00041849266204647376, "loss": 3.2138, "step": 68495 }, { "epoch": 4.654164968066313, "grad_norm": 1.1660311222076416, "learning_rate": 0.000418450197037641, "loss": 3.4587, "step": 68500 }, { "epoch": 4.654504688136975, "grad_norm": 1.2185171842575073, "learning_rate": 0.00041840773202880826, "loss": 3.4574, "step": 68505 }, { "epoch": 4.654844408207637, "grad_norm": 1.4075249433517456, "learning_rate": 0.00041836526701997554, "loss": 3.3872, "step": 68510 }, { "epoch": 4.655184128278298, "grad_norm": 1.1633895635604858, "learning_rate": 0.0004183228020111428, "loss": 3.3273, "step": 68515 }, { "epoch": 4.6555238483489605, "grad_norm": 1.299965262413025, "learning_rate": 0.0004182803370023101, "loss": 3.4061, "step": 68520 }, { "epoch": 4.655863568419623, "grad_norm": 1.223435878753662, "learning_rate": 0.0004182378719934774, "loss": 3.2923, "step": 68525 }, { "epoch": 4.656203288490284, "grad_norm": 1.4776476621627808, "learning_rate": 0.00041819540698464466, "loss": 3.3825, "step": 68530 }, { "epoch": 4.656543008560946, "grad_norm": 1.3541865348815918, "learning_rate": 0.00041815294197581194, "loss": 3.5432, "step": 68535 }, { "epoch": 4.656882728631608, "grad_norm": 1.3623148202896118, "learning_rate": 0.0004181104769669792, "loss": 3.5094, "step": 68540 }, { "epoch": 4.657222448702269, "grad_norm": 1.5393682718276978, "learning_rate": 0.00041806801195814645, "loss": 3.4875, "step": 68545 }, { "epoch": 4.657562168772931, "grad_norm": 1.2621313333511353, "learning_rate": 0.0004180255469493138, "loss": 3.5432, "step": 68550 }, { "epoch": 4.657901888843593, "grad_norm": 1.1291247606277466, "learning_rate": 0.00041798308194048107, "loss": 3.2154, "step": 68555 }, { "epoch": 4.658241608914254, "grad_norm": 1.2620790004730225, "learning_rate": 0.0004179406169316483, "loss": 3.4384, "step": 68560 }, { "epoch": 4.6585813289849165, "grad_norm": 1.1366429328918457, "learning_rate": 0.0004178981519228156, "loss": 3.497, "step": 68565 }, { "epoch": 4.658921049055579, "grad_norm": 1.9633517265319824, "learning_rate": 0.0004178556869139829, "loss": 3.6099, "step": 68570 }, { "epoch": 4.65926076912624, "grad_norm": 1.2773265838623047, "learning_rate": 0.00041781322190515013, "loss": 3.2518, "step": 68575 }, { "epoch": 4.659600489196902, "grad_norm": 1.2887134552001953, "learning_rate": 0.00041777075689631747, "loss": 3.5731, "step": 68580 }, { "epoch": 4.659940209267564, "grad_norm": 1.2859770059585571, "learning_rate": 0.00041772829188748475, "loss": 3.545, "step": 68585 }, { "epoch": 4.660279929338225, "grad_norm": 1.3219596147537231, "learning_rate": 0.00041768582687865197, "loss": 3.3723, "step": 68590 }, { "epoch": 4.660619649408887, "grad_norm": 1.3571094274520874, "learning_rate": 0.00041764336186981925, "loss": 3.3951, "step": 68595 }, { "epoch": 4.660959369479549, "grad_norm": 1.1257271766662598, "learning_rate": 0.0004176008968609866, "loss": 2.9738, "step": 68600 }, { "epoch": 4.66129908955021, "grad_norm": 1.05497145652771, "learning_rate": 0.0004175669248539204, "loss": 3.3505, "step": 68605 }, { "epoch": 4.6616388096208725, "grad_norm": 1.3657325506210327, "learning_rate": 0.00041752445984508764, "loss": 3.4291, "step": 68610 }, { "epoch": 4.661978529691535, "grad_norm": 1.2815738916397095, "learning_rate": 0.0004174819948362549, "loss": 3.283, "step": 68615 }, { "epoch": 4.662318249762196, "grad_norm": 1.3725645542144775, "learning_rate": 0.00041743952982742225, "loss": 3.3201, "step": 68620 }, { "epoch": 4.662657969832858, "grad_norm": 1.1710309982299805, "learning_rate": 0.0004173970648185895, "loss": 3.3325, "step": 68625 }, { "epoch": 4.66299768990352, "grad_norm": 1.2635246515274048, "learning_rate": 0.00041735459980975676, "loss": 3.5515, "step": 68630 }, { "epoch": 4.663337409974181, "grad_norm": 1.6425410509109497, "learning_rate": 0.00041731213480092404, "loss": 3.3694, "step": 68635 }, { "epoch": 4.663677130044843, "grad_norm": 1.5875201225280762, "learning_rate": 0.0004172696697920913, "loss": 3.2602, "step": 68640 }, { "epoch": 4.664016850115505, "grad_norm": 1.1910820007324219, "learning_rate": 0.0004172272047832586, "loss": 3.3592, "step": 68645 }, { "epoch": 4.6643565701861665, "grad_norm": 1.5610673427581787, "learning_rate": 0.0004171847397744259, "loss": 3.2185, "step": 68650 }, { "epoch": 4.6646962902568285, "grad_norm": 1.181252360343933, "learning_rate": 0.00041714227476559316, "loss": 3.357, "step": 68655 }, { "epoch": 4.665036010327491, "grad_norm": 1.0835965871810913, "learning_rate": 0.00041709980975676044, "loss": 3.54, "step": 68660 }, { "epoch": 4.665375730398152, "grad_norm": 1.2166136503219604, "learning_rate": 0.0004170573447479277, "loss": 3.384, "step": 68665 }, { "epoch": 4.665715450468814, "grad_norm": 1.3499289751052856, "learning_rate": 0.000417014879739095, "loss": 3.4153, "step": 68670 }, { "epoch": 4.666055170539476, "grad_norm": 1.1731830835342407, "learning_rate": 0.0004169724147302623, "loss": 3.5828, "step": 68675 }, { "epoch": 4.666394890610137, "grad_norm": 1.2316745519638062, "learning_rate": 0.00041692994972142956, "loss": 3.1736, "step": 68680 }, { "epoch": 4.666734610680799, "grad_norm": 1.4994800090789795, "learning_rate": 0.00041688748471259684, "loss": 3.4723, "step": 68685 }, { "epoch": 4.667074330751461, "grad_norm": 1.7296767234802246, "learning_rate": 0.0004168450197037641, "loss": 3.5263, "step": 68690 }, { "epoch": 4.6674140508221225, "grad_norm": 1.0712246894836426, "learning_rate": 0.0004168025546949314, "loss": 3.2244, "step": 68695 }, { "epoch": 4.6677537708927845, "grad_norm": 1.3820182085037231, "learning_rate": 0.0004167600896860987, "loss": 3.5461, "step": 68700 }, { "epoch": 4.668093490963447, "grad_norm": 1.4340407848358154, "learning_rate": 0.0004167176246772659, "loss": 3.3299, "step": 68705 }, { "epoch": 4.668433211034108, "grad_norm": 1.1834620237350464, "learning_rate": 0.00041667515966843324, "loss": 3.2049, "step": 68710 }, { "epoch": 4.66877293110477, "grad_norm": 1.2827101945877075, "learning_rate": 0.0004166326946596005, "loss": 3.3563, "step": 68715 }, { "epoch": 4.669112651175431, "grad_norm": 1.6175895929336548, "learning_rate": 0.00041659022965076774, "loss": 3.4519, "step": 68720 }, { "epoch": 4.669452371246093, "grad_norm": 1.4411605596542358, "learning_rate": 0.0004165477646419351, "loss": 3.4758, "step": 68725 }, { "epoch": 4.669792091316755, "grad_norm": 1.5445057153701782, "learning_rate": 0.00041650529963310236, "loss": 3.3881, "step": 68730 }, { "epoch": 4.670131811387416, "grad_norm": 1.3609673976898193, "learning_rate": 0.0004164628346242696, "loss": 3.3808, "step": 68735 }, { "epoch": 4.6704715314580785, "grad_norm": 1.0609923601150513, "learning_rate": 0.00041642036961543686, "loss": 3.31, "step": 68740 }, { "epoch": 4.6708112515287405, "grad_norm": 1.3889559507369995, "learning_rate": 0.0004163779046066042, "loss": 3.228, "step": 68745 }, { "epoch": 4.671150971599402, "grad_norm": 1.6455681324005127, "learning_rate": 0.0004163354395977714, "loss": 3.497, "step": 68750 }, { "epoch": 4.671490691670064, "grad_norm": 1.7284038066864014, "learning_rate": 0.0004162929745889387, "loss": 3.3867, "step": 68755 }, { "epoch": 4.671830411740726, "grad_norm": 1.576343297958374, "learning_rate": 0.00041625050958010604, "loss": 3.4521, "step": 68760 }, { "epoch": 4.672170131811387, "grad_norm": 1.3240067958831787, "learning_rate": 0.00041620804457127326, "loss": 3.5599, "step": 68765 }, { "epoch": 4.672509851882049, "grad_norm": 1.4277241230010986, "learning_rate": 0.00041616557956244054, "loss": 3.3006, "step": 68770 }, { "epoch": 4.672849571952711, "grad_norm": 1.0938080549240112, "learning_rate": 0.0004161231145536078, "loss": 3.5976, "step": 68775 }, { "epoch": 4.673189292023372, "grad_norm": 1.220557451248169, "learning_rate": 0.0004160806495447751, "loss": 3.5389, "step": 68780 }, { "epoch": 4.6735290120940345, "grad_norm": 1.1196988821029663, "learning_rate": 0.0004160381845359424, "loss": 3.5041, "step": 68785 }, { "epoch": 4.673868732164697, "grad_norm": 1.05781888961792, "learning_rate": 0.00041599571952710966, "loss": 3.3262, "step": 68790 }, { "epoch": 4.674208452235358, "grad_norm": 1.2834898233413696, "learning_rate": 0.00041595325451827694, "loss": 3.3136, "step": 68795 }, { "epoch": 4.67454817230602, "grad_norm": 1.2319858074188232, "learning_rate": 0.0004159107895094442, "loss": 3.3287, "step": 68800 }, { "epoch": 4.674887892376682, "grad_norm": 1.361750602722168, "learning_rate": 0.0004158683245006115, "loss": 3.2361, "step": 68805 }, { "epoch": 4.675227612447343, "grad_norm": 1.1783576011657715, "learning_rate": 0.00041582585949177873, "loss": 3.5574, "step": 68810 }, { "epoch": 4.675567332518005, "grad_norm": 1.1673052310943604, "learning_rate": 0.00041578339448294606, "loss": 3.4255, "step": 68815 }, { "epoch": 4.675907052588667, "grad_norm": 1.3842289447784424, "learning_rate": 0.00041574092947411334, "loss": 3.4273, "step": 68820 }, { "epoch": 4.676246772659328, "grad_norm": 1.1893715858459473, "learning_rate": 0.00041569846446528057, "loss": 3.3199, "step": 68825 }, { "epoch": 4.6765864927299905, "grad_norm": 1.1108651161193848, "learning_rate": 0.0004156559994564479, "loss": 3.2433, "step": 68830 }, { "epoch": 4.676926212800653, "grad_norm": 1.2672244310379028, "learning_rate": 0.0004156135344476152, "loss": 3.4211, "step": 68835 }, { "epoch": 4.677265932871314, "grad_norm": 1.436928629875183, "learning_rate": 0.00041557106943878247, "loss": 3.1017, "step": 68840 }, { "epoch": 4.677605652941976, "grad_norm": 1.4459989070892334, "learning_rate": 0.00041552860442994975, "loss": 3.3302, "step": 68845 }, { "epoch": 4.677945373012638, "grad_norm": 1.239188313484192, "learning_rate": 0.000415486139421117, "loss": 3.3483, "step": 68850 }, { "epoch": 4.678285093083299, "grad_norm": 1.216795563697815, "learning_rate": 0.0004154436744122843, "loss": 3.3482, "step": 68855 }, { "epoch": 4.678624813153961, "grad_norm": 1.4768844842910767, "learning_rate": 0.00041540120940345153, "loss": 3.2657, "step": 68860 }, { "epoch": 4.678964533224623, "grad_norm": 1.6454050540924072, "learning_rate": 0.00041535874439461887, "loss": 3.1935, "step": 68865 }, { "epoch": 4.679304253295284, "grad_norm": 1.1873493194580078, "learning_rate": 0.00041531627938578615, "loss": 3.1546, "step": 68870 }, { "epoch": 4.6796439733659465, "grad_norm": 1.4631335735321045, "learning_rate": 0.00041527381437695337, "loss": 3.4494, "step": 68875 }, { "epoch": 4.679983693436609, "grad_norm": 1.2642022371292114, "learning_rate": 0.0004152313493681207, "loss": 3.2722, "step": 68880 }, { "epoch": 4.68032341350727, "grad_norm": 1.2878936529159546, "learning_rate": 0.000415188884359288, "loss": 3.3655, "step": 68885 }, { "epoch": 4.680663133577932, "grad_norm": 1.438925862312317, "learning_rate": 0.0004151464193504552, "loss": 3.4596, "step": 68890 }, { "epoch": 4.681002853648594, "grad_norm": 1.2556771039962769, "learning_rate": 0.0004151039543416225, "loss": 3.3143, "step": 68895 }, { "epoch": 4.681342573719255, "grad_norm": 1.496192216873169, "learning_rate": 0.0004150614893327898, "loss": 3.2825, "step": 68900 }, { "epoch": 4.681682293789917, "grad_norm": 1.382422924041748, "learning_rate": 0.00041501902432395705, "loss": 3.4705, "step": 68905 }, { "epoch": 4.682022013860579, "grad_norm": 1.2371009588241577, "learning_rate": 0.00041497655931512433, "loss": 3.2737, "step": 68910 }, { "epoch": 4.68236173393124, "grad_norm": 1.3979978561401367, "learning_rate": 0.00041493409430629167, "loss": 3.4311, "step": 68915 }, { "epoch": 4.6827014540019025, "grad_norm": 1.385832667350769, "learning_rate": 0.0004148916292974589, "loss": 3.6677, "step": 68920 }, { "epoch": 4.683041174072565, "grad_norm": 1.4054527282714844, "learning_rate": 0.00041484916428862617, "loss": 3.4775, "step": 68925 }, { "epoch": 4.683380894143226, "grad_norm": 1.4140969514846802, "learning_rate": 0.00041480669927979345, "loss": 3.3314, "step": 68930 }, { "epoch": 4.683720614213888, "grad_norm": 1.282548427581787, "learning_rate": 0.00041476423427096073, "loss": 3.0584, "step": 68935 }, { "epoch": 4.684060334284549, "grad_norm": 1.1356303691864014, "learning_rate": 0.000414721769262128, "loss": 3.6136, "step": 68940 }, { "epoch": 4.684400054355211, "grad_norm": 1.203561782836914, "learning_rate": 0.0004146793042532953, "loss": 3.2782, "step": 68945 }, { "epoch": 4.684739774425873, "grad_norm": 1.2387793064117432, "learning_rate": 0.00041463683924446257, "loss": 3.3785, "step": 68950 }, { "epoch": 4.685079494496534, "grad_norm": 1.3912034034729004, "learning_rate": 0.00041459437423562985, "loss": 3.4297, "step": 68955 }, { "epoch": 4.6854192145671965, "grad_norm": 1.3093303442001343, "learning_rate": 0.00041455190922679713, "loss": 3.346, "step": 68960 }, { "epoch": 4.6857589346378585, "grad_norm": 1.2788764238357544, "learning_rate": 0.00041450944421796436, "loss": 3.3734, "step": 68965 }, { "epoch": 4.68609865470852, "grad_norm": 1.3550893068313599, "learning_rate": 0.0004144669792091317, "loss": 3.2321, "step": 68970 }, { "epoch": 4.686438374779182, "grad_norm": 1.3166372776031494, "learning_rate": 0.000414424514200299, "loss": 3.457, "step": 68975 }, { "epoch": 4.686778094849844, "grad_norm": 0.978601336479187, "learning_rate": 0.0004143820491914662, "loss": 3.4467, "step": 68980 }, { "epoch": 4.687117814920505, "grad_norm": 1.1552428007125854, "learning_rate": 0.00041433958418263353, "loss": 3.1274, "step": 68985 }, { "epoch": 4.687457534991167, "grad_norm": 1.1591180562973022, "learning_rate": 0.0004142971191738008, "loss": 3.4477, "step": 68990 }, { "epoch": 4.687797255061829, "grad_norm": 1.3236079216003418, "learning_rate": 0.00041425465416496804, "loss": 3.5217, "step": 68995 }, { "epoch": 4.68813697513249, "grad_norm": 1.649710774421692, "learning_rate": 0.0004142121891561353, "loss": 3.2457, "step": 69000 }, { "epoch": 4.6884766952031525, "grad_norm": 1.3168728351593018, "learning_rate": 0.00041416972414730265, "loss": 3.6062, "step": 69005 }, { "epoch": 4.6888164152738145, "grad_norm": 1.2065260410308838, "learning_rate": 0.00041412725913846993, "loss": 3.5427, "step": 69010 }, { "epoch": 4.689156135344476, "grad_norm": 1.2287594079971313, "learning_rate": 0.00041408479412963716, "loss": 3.6587, "step": 69015 }, { "epoch": 4.689495855415138, "grad_norm": 1.562501311302185, "learning_rate": 0.0004140423291208045, "loss": 3.4497, "step": 69020 }, { "epoch": 4.6898355754858, "grad_norm": 1.5760078430175781, "learning_rate": 0.0004139998641119718, "loss": 3.4696, "step": 69025 }, { "epoch": 4.690175295556461, "grad_norm": 1.1191751956939697, "learning_rate": 0.000413957399103139, "loss": 3.6971, "step": 69030 }, { "epoch": 4.690515015627123, "grad_norm": 1.0187935829162598, "learning_rate": 0.0004139149340943063, "loss": 3.2408, "step": 69035 }, { "epoch": 4.690854735697785, "grad_norm": 1.2618359327316284, "learning_rate": 0.0004138724690854736, "loss": 3.2999, "step": 69040 }, { "epoch": 4.691194455768446, "grad_norm": 1.2738525867462158, "learning_rate": 0.00041383000407664084, "loss": 3.414, "step": 69045 }, { "epoch": 4.6915341758391085, "grad_norm": 1.7969294786453247, "learning_rate": 0.0004137875390678081, "loss": 3.42, "step": 69050 }, { "epoch": 4.6918738959097706, "grad_norm": 1.3917737007141113, "learning_rate": 0.00041374507405897545, "loss": 3.5311, "step": 69055 }, { "epoch": 4.692213615980432, "grad_norm": 1.760886549949646, "learning_rate": 0.0004137026090501427, "loss": 3.4775, "step": 69060 }, { "epoch": 4.692553336051094, "grad_norm": 1.0690603256225586, "learning_rate": 0.00041366014404130996, "loss": 3.4094, "step": 69065 }, { "epoch": 4.692893056121756, "grad_norm": 1.3693084716796875, "learning_rate": 0.0004136176790324773, "loss": 3.6024, "step": 69070 }, { "epoch": 4.693232776192417, "grad_norm": 1.5069128274917603, "learning_rate": 0.0004135752140236445, "loss": 3.759, "step": 69075 }, { "epoch": 4.693572496263079, "grad_norm": 1.1383121013641357, "learning_rate": 0.0004135327490148118, "loss": 3.4772, "step": 69080 }, { "epoch": 4.693912216333741, "grad_norm": 1.2036974430084229, "learning_rate": 0.0004134902840059791, "loss": 3.6434, "step": 69085 }, { "epoch": 4.694251936404402, "grad_norm": 1.2551848888397217, "learning_rate": 0.00041344781899714636, "loss": 3.0662, "step": 69090 }, { "epoch": 4.6945916564750645, "grad_norm": 1.2238705158233643, "learning_rate": 0.00041340535398831364, "loss": 3.5024, "step": 69095 }, { "epoch": 4.694931376545727, "grad_norm": 1.4144399166107178, "learning_rate": 0.0004133628889794809, "loss": 3.4474, "step": 69100 }, { "epoch": 4.695271096616388, "grad_norm": 1.1224483251571655, "learning_rate": 0.0004133204239706482, "loss": 3.5114, "step": 69105 }, { "epoch": 4.69561081668705, "grad_norm": 1.3113210201263428, "learning_rate": 0.0004132779589618155, "loss": 3.3991, "step": 69110 }, { "epoch": 4.695950536757712, "grad_norm": 1.4844914674758911, "learning_rate": 0.00041323549395298276, "loss": 3.5012, "step": 69115 }, { "epoch": 4.696290256828373, "grad_norm": 1.1343826055526733, "learning_rate": 0.00041319302894415, "loss": 3.568, "step": 69120 }, { "epoch": 4.696629976899035, "grad_norm": 1.232474684715271, "learning_rate": 0.0004131505639353173, "loss": 3.3806, "step": 69125 }, { "epoch": 4.696969696969697, "grad_norm": 1.6152710914611816, "learning_rate": 0.0004131080989264846, "loss": 3.0933, "step": 69130 }, { "epoch": 4.697309417040358, "grad_norm": 1.0359454154968262, "learning_rate": 0.00041306563391765183, "loss": 3.5326, "step": 69135 }, { "epoch": 4.6976491371110205, "grad_norm": 1.1230543851852417, "learning_rate": 0.00041302316890881916, "loss": 3.3524, "step": 69140 }, { "epoch": 4.697988857181683, "grad_norm": 1.459019660949707, "learning_rate": 0.00041298070389998644, "loss": 3.3397, "step": 69145 }, { "epoch": 4.698328577252344, "grad_norm": 1.0956381559371948, "learning_rate": 0.00041293823889115367, "loss": 3.4776, "step": 69150 }, { "epoch": 4.698668297323006, "grad_norm": 1.4759806394577026, "learning_rate": 0.00041289577388232095, "loss": 3.5115, "step": 69155 }, { "epoch": 4.699008017393668, "grad_norm": 1.059857726097107, "learning_rate": 0.0004128533088734883, "loss": 3.5157, "step": 69160 }, { "epoch": 4.699347737464329, "grad_norm": 1.310120940208435, "learning_rate": 0.0004128108438646555, "loss": 3.2303, "step": 69165 }, { "epoch": 4.699687457534991, "grad_norm": 1.2744146585464478, "learning_rate": 0.0004127683788558228, "loss": 3.3816, "step": 69170 }, { "epoch": 4.700027177605653, "grad_norm": 1.3950777053833008, "learning_rate": 0.0004127259138469901, "loss": 3.335, "step": 69175 }, { "epoch": 4.700366897676314, "grad_norm": 1.193838119506836, "learning_rate": 0.0004126834488381574, "loss": 3.2695, "step": 69180 }, { "epoch": 4.7007066177469765, "grad_norm": 1.3227527141571045, "learning_rate": 0.00041264098382932463, "loss": 3.3089, "step": 69185 }, { "epoch": 4.701046337817639, "grad_norm": 1.6079623699188232, "learning_rate": 0.0004125985188204919, "loss": 2.9944, "step": 69190 }, { "epoch": 4.7013860578883, "grad_norm": 1.014177918434143, "learning_rate": 0.00041255605381165924, "loss": 3.59, "step": 69195 }, { "epoch": 4.701725777958962, "grad_norm": 1.3055015802383423, "learning_rate": 0.00041251358880282647, "loss": 3.4128, "step": 69200 }, { "epoch": 4.702065498029624, "grad_norm": 1.3737475872039795, "learning_rate": 0.00041247112379399375, "loss": 3.454, "step": 69205 }, { "epoch": 4.702405218100285, "grad_norm": 1.3274719715118408, "learning_rate": 0.0004124286587851611, "loss": 3.403, "step": 69210 }, { "epoch": 4.702744938170947, "grad_norm": 1.9927805662155151, "learning_rate": 0.0004123861937763283, "loss": 3.4383, "step": 69215 }, { "epoch": 4.703084658241609, "grad_norm": 1.6534029245376587, "learning_rate": 0.0004123437287674956, "loss": 3.483, "step": 69220 }, { "epoch": 4.7034243783122704, "grad_norm": 1.3180789947509766, "learning_rate": 0.00041230126375866287, "loss": 3.4751, "step": 69225 }, { "epoch": 4.7037640983829325, "grad_norm": 1.059045672416687, "learning_rate": 0.00041225879874983015, "loss": 3.5943, "step": 69230 }, { "epoch": 4.704103818453595, "grad_norm": 1.4822421073913574, "learning_rate": 0.00041221633374099743, "loss": 3.3883, "step": 69235 }, { "epoch": 4.704443538524256, "grad_norm": 1.2121766805648804, "learning_rate": 0.0004121738687321647, "loss": 3.3957, "step": 69240 }, { "epoch": 4.704783258594918, "grad_norm": 1.2749236822128296, "learning_rate": 0.000412131403723332, "loss": 3.4823, "step": 69245 }, { "epoch": 4.70512297866558, "grad_norm": 1.3176515102386475, "learning_rate": 0.00041208893871449927, "loss": 3.4789, "step": 69250 }, { "epoch": 4.705462698736241, "grad_norm": 1.3339409828186035, "learning_rate": 0.00041204647370566655, "loss": 3.4279, "step": 69255 }, { "epoch": 4.705802418806903, "grad_norm": 1.430136799812317, "learning_rate": 0.0004120040086968338, "loss": 3.4736, "step": 69260 }, { "epoch": 4.706142138877565, "grad_norm": 1.2807998657226562, "learning_rate": 0.0004119615436880011, "loss": 3.2497, "step": 69265 }, { "epoch": 4.7064818589482265, "grad_norm": 1.100379467010498, "learning_rate": 0.0004119190786791684, "loss": 3.4053, "step": 69270 }, { "epoch": 4.7068215790188885, "grad_norm": 1.3211944103240967, "learning_rate": 0.0004118766136703356, "loss": 3.2963, "step": 69275 }, { "epoch": 4.707161299089551, "grad_norm": 1.134990930557251, "learning_rate": 0.00041183414866150295, "loss": 3.2887, "step": 69280 }, { "epoch": 4.707501019160212, "grad_norm": 1.3433488607406616, "learning_rate": 0.00041179168365267023, "loss": 3.353, "step": 69285 }, { "epoch": 4.707840739230874, "grad_norm": 1.2896734476089478, "learning_rate": 0.00041174921864383746, "loss": 3.2687, "step": 69290 }, { "epoch": 4.708180459301536, "grad_norm": 1.5508110523223877, "learning_rate": 0.00041170675363500474, "loss": 3.3802, "step": 69295 }, { "epoch": 4.708520179372197, "grad_norm": 1.353940486907959, "learning_rate": 0.00041166428862617207, "loss": 3.6482, "step": 69300 }, { "epoch": 4.708859899442859, "grad_norm": 1.242300271987915, "learning_rate": 0.0004116218236173393, "loss": 3.6034, "step": 69305 }, { "epoch": 4.709199619513521, "grad_norm": 1.3446197509765625, "learning_rate": 0.0004115793586085066, "loss": 3.3121, "step": 69310 }, { "epoch": 4.7095393395841825, "grad_norm": 1.0210540294647217, "learning_rate": 0.0004115368935996739, "loss": 3.4396, "step": 69315 }, { "epoch": 4.7098790596548445, "grad_norm": 1.4400174617767334, "learning_rate": 0.00041149442859084114, "loss": 3.4771, "step": 69320 }, { "epoch": 4.710218779725507, "grad_norm": 1.0721204280853271, "learning_rate": 0.0004114519635820084, "loss": 3.341, "step": 69325 }, { "epoch": 4.710558499796168, "grad_norm": 1.4052412509918213, "learning_rate": 0.0004114094985731757, "loss": 3.3773, "step": 69330 }, { "epoch": 4.71089821986683, "grad_norm": 1.3047510385513306, "learning_rate": 0.000411367033564343, "loss": 3.2677, "step": 69335 }, { "epoch": 4.711237939937492, "grad_norm": 1.1273988485336304, "learning_rate": 0.00041132456855551026, "loss": 3.7822, "step": 69340 }, { "epoch": 4.711577660008153, "grad_norm": 1.426350712776184, "learning_rate": 0.00041128210354667754, "loss": 3.5363, "step": 69345 }, { "epoch": 4.711917380078815, "grad_norm": 1.1823155879974365, "learning_rate": 0.00041123963853784487, "loss": 3.194, "step": 69350 }, { "epoch": 4.712257100149477, "grad_norm": 1.371159315109253, "learning_rate": 0.0004111971735290121, "loss": 3.193, "step": 69355 }, { "epoch": 4.7125968202201385, "grad_norm": 1.0972164869308472, "learning_rate": 0.0004111547085201794, "loss": 3.3915, "step": 69360 }, { "epoch": 4.712936540290801, "grad_norm": 0.9484697580337524, "learning_rate": 0.0004111122435113467, "loss": 3.5022, "step": 69365 }, { "epoch": 4.713276260361463, "grad_norm": 1.5448527336120605, "learning_rate": 0.00041106977850251394, "loss": 3.3976, "step": 69370 }, { "epoch": 4.713615980432124, "grad_norm": 1.636212706565857, "learning_rate": 0.0004110273134936812, "loss": 3.3469, "step": 69375 }, { "epoch": 4.713955700502786, "grad_norm": 0.9879758358001709, "learning_rate": 0.0004109848484848485, "loss": 3.3195, "step": 69380 }, { "epoch": 4.714295420573448, "grad_norm": 1.2263604402542114, "learning_rate": 0.0004109423834760158, "loss": 3.2413, "step": 69385 }, { "epoch": 4.714635140644109, "grad_norm": 1.2041471004486084, "learning_rate": 0.00041089991846718306, "loss": 3.2231, "step": 69390 }, { "epoch": 4.714974860714771, "grad_norm": 1.290428638458252, "learning_rate": 0.00041085745345835034, "loss": 3.558, "step": 69395 }, { "epoch": 4.715314580785432, "grad_norm": 1.2326617240905762, "learning_rate": 0.0004108149884495176, "loss": 3.4336, "step": 69400 }, { "epoch": 4.7156543008560945, "grad_norm": 1.639262080192566, "learning_rate": 0.0004107725234406849, "loss": 3.575, "step": 69405 }, { "epoch": 4.715994020926757, "grad_norm": 1.4540578126907349, "learning_rate": 0.0004107300584318522, "loss": 3.468, "step": 69410 }, { "epoch": 4.716333740997418, "grad_norm": 1.2590914964675903, "learning_rate": 0.0004106875934230194, "loss": 3.1848, "step": 69415 }, { "epoch": 4.71667346106808, "grad_norm": 1.6882392168045044, "learning_rate": 0.00041064512841418674, "loss": 3.2327, "step": 69420 }, { "epoch": 4.717013181138742, "grad_norm": 1.175000548362732, "learning_rate": 0.000410602663405354, "loss": 3.4715, "step": 69425 }, { "epoch": 4.717352901209403, "grad_norm": 1.0802372694015503, "learning_rate": 0.00041056019839652124, "loss": 3.4027, "step": 69430 }, { "epoch": 4.717692621280065, "grad_norm": 1.1144925355911255, "learning_rate": 0.0004105177333876886, "loss": 3.3911, "step": 69435 }, { "epoch": 4.718032341350727, "grad_norm": 1.1569604873657227, "learning_rate": 0.00041047526837885586, "loss": 3.1428, "step": 69440 }, { "epoch": 4.718372061421388, "grad_norm": 1.466306209564209, "learning_rate": 0.0004104328033700231, "loss": 3.3948, "step": 69445 }, { "epoch": 4.7187117814920505, "grad_norm": 1.1557059288024902, "learning_rate": 0.00041039033836119036, "loss": 3.5049, "step": 69450 }, { "epoch": 4.719051501562713, "grad_norm": 1.3524800539016724, "learning_rate": 0.0004103478733523577, "loss": 2.9639, "step": 69455 }, { "epoch": 4.719391221633374, "grad_norm": 1.4813652038574219, "learning_rate": 0.0004103054083435249, "loss": 3.3255, "step": 69460 }, { "epoch": 4.719730941704036, "grad_norm": 1.3862144947052002, "learning_rate": 0.0004102629433346922, "loss": 3.4895, "step": 69465 }, { "epoch": 4.720070661774698, "grad_norm": 1.2420426607131958, "learning_rate": 0.00041022047832585954, "loss": 3.4299, "step": 69470 }, { "epoch": 4.720410381845359, "grad_norm": 1.382878065109253, "learning_rate": 0.00041017801331702676, "loss": 3.275, "step": 69475 }, { "epoch": 4.720750101916021, "grad_norm": 1.098192572593689, "learning_rate": 0.00041013554830819404, "loss": 3.3665, "step": 69480 }, { "epoch": 4.721089821986683, "grad_norm": 1.518219232559204, "learning_rate": 0.0004100930832993613, "loss": 3.1415, "step": 69485 }, { "epoch": 4.721429542057344, "grad_norm": 1.0491557121276855, "learning_rate": 0.0004100506182905286, "loss": 3.3416, "step": 69490 }, { "epoch": 4.7217692621280065, "grad_norm": 1.273090124130249, "learning_rate": 0.0004100081532816959, "loss": 3.5651, "step": 69495 }, { "epoch": 4.722108982198669, "grad_norm": 1.4137578010559082, "learning_rate": 0.00040996568827286316, "loss": 3.5192, "step": 69500 }, { "epoch": 4.72244870226933, "grad_norm": 1.0989787578582764, "learning_rate": 0.00040992322326403044, "loss": 3.4662, "step": 69505 }, { "epoch": 4.722788422339992, "grad_norm": 1.413799524307251, "learning_rate": 0.0004098807582551977, "loss": 3.5396, "step": 69510 }, { "epoch": 4.723128142410654, "grad_norm": 1.3328105211257935, "learning_rate": 0.000409838293246365, "loss": 3.3392, "step": 69515 }, { "epoch": 4.723467862481315, "grad_norm": 1.3317972421646118, "learning_rate": 0.0004097958282375323, "loss": 3.3392, "step": 69520 }, { "epoch": 4.723807582551977, "grad_norm": 1.1249340772628784, "learning_rate": 0.00040975336322869956, "loss": 3.3204, "step": 69525 }, { "epoch": 4.724147302622639, "grad_norm": 1.3916422128677368, "learning_rate": 0.00040971089821986684, "loss": 3.4994, "step": 69530 }, { "epoch": 4.7244870226933005, "grad_norm": 1.3739205598831177, "learning_rate": 0.0004096684332110341, "loss": 3.1272, "step": 69535 }, { "epoch": 4.7248267427639625, "grad_norm": 1.074324131011963, "learning_rate": 0.0004096259682022014, "loss": 3.2418, "step": 69540 }, { "epoch": 4.725166462834625, "grad_norm": 1.3889780044555664, "learning_rate": 0.0004095835031933687, "loss": 3.5539, "step": 69545 }, { "epoch": 4.725506182905286, "grad_norm": 1.0159146785736084, "learning_rate": 0.00040954103818453597, "loss": 3.7714, "step": 69550 }, { "epoch": 4.725845902975948, "grad_norm": 1.0627261400222778, "learning_rate": 0.0004094985731757032, "loss": 3.2213, "step": 69555 }, { "epoch": 4.72618562304661, "grad_norm": 1.3232765197753906, "learning_rate": 0.0004094561081668705, "loss": 3.1187, "step": 69560 }, { "epoch": 4.726525343117271, "grad_norm": 1.6528757810592651, "learning_rate": 0.0004094136431580378, "loss": 3.4207, "step": 69565 }, { "epoch": 4.726865063187933, "grad_norm": 1.2992326021194458, "learning_rate": 0.00040937117814920503, "loss": 3.3872, "step": 69570 }, { "epoch": 4.727204783258595, "grad_norm": 1.3657017946243286, "learning_rate": 0.00040932871314037237, "loss": 3.2673, "step": 69575 }, { "epoch": 4.7275445033292565, "grad_norm": 1.3283122777938843, "learning_rate": 0.00040928624813153965, "loss": 3.3712, "step": 69580 }, { "epoch": 4.7278842233999185, "grad_norm": 1.4037188291549683, "learning_rate": 0.00040924378312270687, "loss": 3.2671, "step": 69585 }, { "epoch": 4.728223943470581, "grad_norm": 1.2795530557632446, "learning_rate": 0.00040920131811387415, "loss": 3.4613, "step": 69590 }, { "epoch": 4.728563663541242, "grad_norm": 1.3087859153747559, "learning_rate": 0.0004091588531050415, "loss": 3.38, "step": 69595 }, { "epoch": 4.728903383611904, "grad_norm": 1.2775392532348633, "learning_rate": 0.0004091163880962087, "loss": 3.3866, "step": 69600 }, { "epoch": 4.729243103682566, "grad_norm": 1.2583109140396118, "learning_rate": 0.000409073923087376, "loss": 3.3367, "step": 69605 }, { "epoch": 4.729582823753227, "grad_norm": 1.1269912719726562, "learning_rate": 0.0004090314580785433, "loss": 3.6239, "step": 69610 }, { "epoch": 4.729922543823889, "grad_norm": 1.3219447135925293, "learning_rate": 0.00040898899306971055, "loss": 3.177, "step": 69615 }, { "epoch": 4.73026226389455, "grad_norm": 1.130709171295166, "learning_rate": 0.00040894652806087783, "loss": 3.3665, "step": 69620 }, { "epoch": 4.7306019839652125, "grad_norm": 1.2775055170059204, "learning_rate": 0.0004089040630520451, "loss": 3.4666, "step": 69625 }, { "epoch": 4.7309417040358746, "grad_norm": 1.2617381811141968, "learning_rate": 0.0004088615980432124, "loss": 3.5131, "step": 69630 }, { "epoch": 4.731281424106536, "grad_norm": 1.357999324798584, "learning_rate": 0.00040881913303437967, "loss": 3.2172, "step": 69635 }, { "epoch": 4.731621144177198, "grad_norm": 1.2466011047363281, "learning_rate": 0.00040877666802554695, "loss": 3.5474, "step": 69640 }, { "epoch": 4.73196086424786, "grad_norm": 1.1604058742523193, "learning_rate": 0.00040873420301671423, "loss": 3.3198, "step": 69645 }, { "epoch": 4.732300584318521, "grad_norm": 1.204733967781067, "learning_rate": 0.0004086917380078815, "loss": 3.2556, "step": 69650 }, { "epoch": 4.732640304389183, "grad_norm": 1.1974053382873535, "learning_rate": 0.0004086492729990488, "loss": 3.4331, "step": 69655 }, { "epoch": 4.732980024459845, "grad_norm": 1.3863755464553833, "learning_rate": 0.000408606807990216, "loss": 3.4008, "step": 69660 }, { "epoch": 4.733319744530506, "grad_norm": 1.2101149559020996, "learning_rate": 0.00040856434298138335, "loss": 3.0123, "step": 69665 }, { "epoch": 4.7336594646011685, "grad_norm": 1.0303338766098022, "learning_rate": 0.00040852187797255063, "loss": 3.5213, "step": 69670 }, { "epoch": 4.733999184671831, "grad_norm": 1.3194091320037842, "learning_rate": 0.00040847941296371786, "loss": 3.5115, "step": 69675 }, { "epoch": 4.734338904742492, "grad_norm": 1.2131043672561646, "learning_rate": 0.0004084369479548852, "loss": 3.4752, "step": 69680 }, { "epoch": 4.734678624813154, "grad_norm": 1.1182880401611328, "learning_rate": 0.0004083944829460525, "loss": 3.3534, "step": 69685 }, { "epoch": 4.735018344883816, "grad_norm": 1.3041484355926514, "learning_rate": 0.00040835201793721975, "loss": 3.6515, "step": 69690 }, { "epoch": 4.735358064954477, "grad_norm": 1.302955150604248, "learning_rate": 0.00040830955292838703, "loss": 3.3504, "step": 69695 }, { "epoch": 4.735697785025139, "grad_norm": 1.3278086185455322, "learning_rate": 0.0004082670879195543, "loss": 3.1782, "step": 69700 }, { "epoch": 4.736037505095801, "grad_norm": 1.3560817241668701, "learning_rate": 0.0004082246229107216, "loss": 3.2788, "step": 69705 }, { "epoch": 4.736377225166462, "grad_norm": 1.6396595239639282, "learning_rate": 0.0004081821579018888, "loss": 3.4193, "step": 69710 }, { "epoch": 4.7367169452371245, "grad_norm": 1.3732481002807617, "learning_rate": 0.00040813969289305615, "loss": 3.3849, "step": 69715 }, { "epoch": 4.737056665307787, "grad_norm": 1.4161372184753418, "learning_rate": 0.00040809722788422343, "loss": 2.8817, "step": 69720 }, { "epoch": 4.737396385378448, "grad_norm": 1.0839283466339111, "learning_rate": 0.00040805476287539066, "loss": 3.7653, "step": 69725 }, { "epoch": 4.73773610544911, "grad_norm": 1.4780056476593018, "learning_rate": 0.000408012297866558, "loss": 3.3045, "step": 69730 }, { "epoch": 4.738075825519772, "grad_norm": 1.1944098472595215, "learning_rate": 0.0004079698328577253, "loss": 3.212, "step": 69735 }, { "epoch": 4.738415545590433, "grad_norm": 1.1781920194625854, "learning_rate": 0.0004079273678488925, "loss": 3.4608, "step": 69740 }, { "epoch": 4.738755265661095, "grad_norm": 1.4312254190444946, "learning_rate": 0.0004078849028400598, "loss": 3.4616, "step": 69745 }, { "epoch": 4.739094985731757, "grad_norm": 1.2407965660095215, "learning_rate": 0.0004078424378312271, "loss": 3.4728, "step": 69750 }, { "epoch": 4.739434705802418, "grad_norm": 1.3809945583343506, "learning_rate": 0.00040779997282239434, "loss": 3.5816, "step": 69755 }, { "epoch": 4.7397744258730805, "grad_norm": 1.9286164045333862, "learning_rate": 0.0004077575078135616, "loss": 3.2641, "step": 69760 }, { "epoch": 4.740114145943743, "grad_norm": 1.1491655111312866, "learning_rate": 0.00040771504280472895, "loss": 3.5217, "step": 69765 }, { "epoch": 4.740453866014404, "grad_norm": 1.2210509777069092, "learning_rate": 0.0004076725777958962, "loss": 3.3906, "step": 69770 }, { "epoch": 4.740793586085066, "grad_norm": 1.3648651838302612, "learning_rate": 0.00040763011278706346, "loss": 3.2815, "step": 69775 }, { "epoch": 4.741133306155728, "grad_norm": 1.7471810579299927, "learning_rate": 0.00040758764777823074, "loss": 3.3667, "step": 69780 }, { "epoch": 4.741473026226389, "grad_norm": 1.3253597021102905, "learning_rate": 0.000407545182769398, "loss": 3.4372, "step": 69785 }, { "epoch": 4.741812746297051, "grad_norm": 1.5357425212860107, "learning_rate": 0.0004075027177605653, "loss": 3.4147, "step": 69790 }, { "epoch": 4.742152466367713, "grad_norm": 1.2074811458587646, "learning_rate": 0.0004074602527517326, "loss": 3.6816, "step": 69795 }, { "epoch": 4.7424921864383744, "grad_norm": 1.396445631980896, "learning_rate": 0.00040741778774289986, "loss": 3.3128, "step": 69800 }, { "epoch": 4.7428319065090365, "grad_norm": 1.1525555849075317, "learning_rate": 0.00040737532273406714, "loss": 3.2857, "step": 69805 }, { "epoch": 4.743171626579699, "grad_norm": 1.273434042930603, "learning_rate": 0.0004073328577252344, "loss": 3.3911, "step": 69810 }, { "epoch": 4.74351134665036, "grad_norm": 1.4414503574371338, "learning_rate": 0.00040729039271640165, "loss": 2.8734, "step": 69815 }, { "epoch": 4.743851066721022, "grad_norm": 1.2668424844741821, "learning_rate": 0.000407247927707569, "loss": 3.3662, "step": 69820 }, { "epoch": 4.744190786791684, "grad_norm": 1.256745457649231, "learning_rate": 0.00040720546269873626, "loss": 3.421, "step": 69825 }, { "epoch": 4.744530506862345, "grad_norm": 1.281632423400879, "learning_rate": 0.0004071629976899035, "loss": 3.4139, "step": 69830 }, { "epoch": 4.744870226933007, "grad_norm": 1.4017897844314575, "learning_rate": 0.0004071205326810708, "loss": 3.5023, "step": 69835 }, { "epoch": 4.745209947003669, "grad_norm": 1.3677994012832642, "learning_rate": 0.0004070780676722381, "loss": 3.2102, "step": 69840 }, { "epoch": 4.7455496670743305, "grad_norm": 1.3561267852783203, "learning_rate": 0.0004070356026634053, "loss": 3.4356, "step": 69845 }, { "epoch": 4.7458893871449925, "grad_norm": 1.3820888996124268, "learning_rate": 0.0004069931376545726, "loss": 3.5196, "step": 69850 }, { "epoch": 4.746229107215655, "grad_norm": 1.3813883066177368, "learning_rate": 0.00040695067264573994, "loss": 3.5794, "step": 69855 }, { "epoch": 4.746568827286316, "grad_norm": 1.3767729997634888, "learning_rate": 0.0004069082076369072, "loss": 3.0922, "step": 69860 }, { "epoch": 4.746908547356978, "grad_norm": 1.2090036869049072, "learning_rate": 0.00040686574262807445, "loss": 3.6051, "step": 69865 }, { "epoch": 4.74724826742764, "grad_norm": 1.5546456575393677, "learning_rate": 0.0004068232776192418, "loss": 3.1627, "step": 69870 }, { "epoch": 4.747587987498301, "grad_norm": 1.3725495338439941, "learning_rate": 0.00040678081261040906, "loss": 3.3509, "step": 69875 }, { "epoch": 4.747927707568963, "grad_norm": 1.4185781478881836, "learning_rate": 0.0004067383476015763, "loss": 3.5148, "step": 69880 }, { "epoch": 4.748267427639625, "grad_norm": 1.1635358333587646, "learning_rate": 0.00040669588259274357, "loss": 3.3985, "step": 69885 }, { "epoch": 4.7486071477102865, "grad_norm": 1.1848708391189575, "learning_rate": 0.0004066534175839109, "loss": 3.704, "step": 69890 }, { "epoch": 4.7489468677809485, "grad_norm": 1.4088311195373535, "learning_rate": 0.00040661095257507813, "loss": 3.4544, "step": 69895 }, { "epoch": 4.749286587851611, "grad_norm": 1.414631962776184, "learning_rate": 0.0004065684875662454, "loss": 3.4529, "step": 69900 }, { "epoch": 4.749626307922272, "grad_norm": 1.3383638858795166, "learning_rate": 0.00040652602255741274, "loss": 3.2375, "step": 69905 }, { "epoch": 4.749966027992934, "grad_norm": 1.0852605104446411, "learning_rate": 0.00040648355754857997, "loss": 3.5066, "step": 69910 }, { "epoch": 4.750305748063596, "grad_norm": 1.1295149326324463, "learning_rate": 0.00040644109253974725, "loss": 3.5674, "step": 69915 }, { "epoch": 4.750645468134257, "grad_norm": 1.342616319656372, "learning_rate": 0.0004063986275309146, "loss": 3.3655, "step": 69920 }, { "epoch": 4.750985188204919, "grad_norm": 1.376362681388855, "learning_rate": 0.0004063561625220818, "loss": 3.295, "step": 69925 }, { "epoch": 4.751324908275581, "grad_norm": 1.1258492469787598, "learning_rate": 0.0004063136975132491, "loss": 3.2952, "step": 69930 }, { "epoch": 4.7516646283462425, "grad_norm": 1.4618619680404663, "learning_rate": 0.00040627123250441637, "loss": 3.0635, "step": 69935 }, { "epoch": 4.7520043484169046, "grad_norm": 1.0967847108840942, "learning_rate": 0.00040622876749558365, "loss": 3.4241, "step": 69940 }, { "epoch": 4.752344068487567, "grad_norm": 1.297544002532959, "learning_rate": 0.00040618630248675093, "loss": 3.7116, "step": 69945 }, { "epoch": 4.752683788558228, "grad_norm": 1.1570910215377808, "learning_rate": 0.0004061438374779182, "loss": 3.3246, "step": 69950 }, { "epoch": 4.75302350862889, "grad_norm": 1.1103785037994385, "learning_rate": 0.00040610137246908543, "loss": 3.2965, "step": 69955 }, { "epoch": 4.753363228699552, "grad_norm": 1.4573906660079956, "learning_rate": 0.00040605890746025277, "loss": 3.4939, "step": 69960 }, { "epoch": 4.753702948770213, "grad_norm": 1.1950085163116455, "learning_rate": 0.00040601644245142005, "loss": 3.6014, "step": 69965 }, { "epoch": 4.754042668840875, "grad_norm": 1.1954940557479858, "learning_rate": 0.0004059739774425873, "loss": 3.3606, "step": 69970 }, { "epoch": 4.754382388911537, "grad_norm": 1.5367918014526367, "learning_rate": 0.0004059315124337546, "loss": 3.1065, "step": 69975 }, { "epoch": 4.7547221089821985, "grad_norm": 1.5301625728607178, "learning_rate": 0.0004058890474249219, "loss": 3.401, "step": 69980 }, { "epoch": 4.755061829052861, "grad_norm": 1.6286890506744385, "learning_rate": 0.0004058465824160891, "loss": 3.352, "step": 69985 }, { "epoch": 4.755401549123523, "grad_norm": 1.2277811765670776, "learning_rate": 0.00040580411740725645, "loss": 3.28, "step": 69990 }, { "epoch": 4.755741269194184, "grad_norm": 1.498694658279419, "learning_rate": 0.00040576165239842373, "loss": 3.2126, "step": 69995 }, { "epoch": 4.756080989264846, "grad_norm": 1.7362507581710815, "learning_rate": 0.00040571918738959096, "loss": 3.402, "step": 70000 }, { "epoch": 4.756420709335508, "grad_norm": 1.492014765739441, "learning_rate": 0.00040567672238075824, "loss": 3.7417, "step": 70005 }, { "epoch": 4.756760429406169, "grad_norm": 1.264259934425354, "learning_rate": 0.00040563425737192557, "loss": 3.4618, "step": 70010 }, { "epoch": 4.757100149476831, "grad_norm": 1.244139552116394, "learning_rate": 0.0004055917923630928, "loss": 3.3796, "step": 70015 }, { "epoch": 4.757439869547493, "grad_norm": 1.095877766609192, "learning_rate": 0.0004055493273542601, "loss": 3.3598, "step": 70020 }, { "epoch": 4.7577795896181545, "grad_norm": 1.6229299306869507, "learning_rate": 0.0004055068623454274, "loss": 3.3507, "step": 70025 }, { "epoch": 4.758119309688817, "grad_norm": 1.3983101844787598, "learning_rate": 0.0004054643973365947, "loss": 3.394, "step": 70030 }, { "epoch": 4.758459029759479, "grad_norm": 1.220045804977417, "learning_rate": 0.0004054219323277619, "loss": 3.5987, "step": 70035 }, { "epoch": 4.75879874983014, "grad_norm": 1.2969802618026733, "learning_rate": 0.0004053794673189292, "loss": 3.4534, "step": 70040 }, { "epoch": 4.759138469900802, "grad_norm": 1.1382709741592407, "learning_rate": 0.00040533700231009653, "loss": 3.6036, "step": 70045 }, { "epoch": 4.759478189971464, "grad_norm": 0.9612616300582886, "learning_rate": 0.00040529453730126376, "loss": 2.9886, "step": 70050 }, { "epoch": 4.759817910042125, "grad_norm": 1.0424646139144897, "learning_rate": 0.00040525207229243104, "loss": 3.5424, "step": 70055 }, { "epoch": 4.760157630112787, "grad_norm": 1.0484826564788818, "learning_rate": 0.00040520960728359837, "loss": 3.1672, "step": 70060 }, { "epoch": 4.760497350183449, "grad_norm": 1.4721615314483643, "learning_rate": 0.0004051671422747656, "loss": 3.3, "step": 70065 }, { "epoch": 4.7608370702541105, "grad_norm": 1.5687096118927002, "learning_rate": 0.0004051246772659329, "loss": 3.623, "step": 70070 }, { "epoch": 4.761176790324773, "grad_norm": 1.5561968088150024, "learning_rate": 0.00040508221225710016, "loss": 3.2996, "step": 70075 }, { "epoch": 4.761516510395434, "grad_norm": 1.3639782667160034, "learning_rate": 0.00040503974724826744, "loss": 3.506, "step": 70080 }, { "epoch": 4.761856230466096, "grad_norm": 1.2299745082855225, "learning_rate": 0.0004049972822394347, "loss": 3.3562, "step": 70085 }, { "epoch": 4.762195950536758, "grad_norm": 1.1298103332519531, "learning_rate": 0.000404954817230602, "loss": 3.0254, "step": 70090 }, { "epoch": 4.762535670607419, "grad_norm": 1.4452999830245972, "learning_rate": 0.0004049123522217693, "loss": 3.2003, "step": 70095 }, { "epoch": 4.762875390678081, "grad_norm": 1.138493537902832, "learning_rate": 0.00040486988721293656, "loss": 3.475, "step": 70100 }, { "epoch": 4.763215110748743, "grad_norm": 1.0735447406768799, "learning_rate": 0.00040482742220410384, "loss": 3.348, "step": 70105 }, { "epoch": 4.7635548308194045, "grad_norm": 1.187209963798523, "learning_rate": 0.00040478495719527106, "loss": 2.9175, "step": 70110 }, { "epoch": 4.7638945508900665, "grad_norm": 1.5763624906539917, "learning_rate": 0.0004047424921864384, "loss": 3.2673, "step": 70115 }, { "epoch": 4.764234270960729, "grad_norm": 1.5332366228103638, "learning_rate": 0.0004047000271776057, "loss": 3.3283, "step": 70120 }, { "epoch": 4.76457399103139, "grad_norm": 1.1120506525039673, "learning_rate": 0.0004046575621687729, "loss": 3.4428, "step": 70125 }, { "epoch": 4.764913711102052, "grad_norm": 1.3994007110595703, "learning_rate": 0.00040461509715994024, "loss": 3.2299, "step": 70130 }, { "epoch": 4.765253431172714, "grad_norm": 1.2494046688079834, "learning_rate": 0.0004045726321511075, "loss": 3.5883, "step": 70135 }, { "epoch": 4.765593151243375, "grad_norm": 1.3795511722564697, "learning_rate": 0.00040453016714227474, "loss": 3.4397, "step": 70140 }, { "epoch": 4.765932871314037, "grad_norm": 1.4118188619613647, "learning_rate": 0.000404487702133442, "loss": 3.344, "step": 70145 }, { "epoch": 4.766272591384699, "grad_norm": 1.3351176977157593, "learning_rate": 0.00040444523712460936, "loss": 3.2352, "step": 70150 }, { "epoch": 4.7666123114553605, "grad_norm": 1.1969804763793945, "learning_rate": 0.0004044027721157766, "loss": 3.2242, "step": 70155 }, { "epoch": 4.7669520315260225, "grad_norm": 1.4402459859848022, "learning_rate": 0.00040436030710694386, "loss": 3.5529, "step": 70160 }, { "epoch": 4.767291751596685, "grad_norm": 1.2509578466415405, "learning_rate": 0.0004043178420981112, "loss": 3.5349, "step": 70165 }, { "epoch": 4.767631471667346, "grad_norm": 1.8132487535476685, "learning_rate": 0.0004042753770892784, "loss": 3.5054, "step": 70170 }, { "epoch": 4.767971191738008, "grad_norm": 1.0474286079406738, "learning_rate": 0.0004042329120804457, "loss": 3.2919, "step": 70175 }, { "epoch": 4.76831091180867, "grad_norm": 1.1477123498916626, "learning_rate": 0.000404190447071613, "loss": 3.6377, "step": 70180 }, { "epoch": 4.768650631879331, "grad_norm": 1.1021218299865723, "learning_rate": 0.00040414798206278026, "loss": 3.5093, "step": 70185 }, { "epoch": 4.768990351949993, "grad_norm": 1.2111953496932983, "learning_rate": 0.00040410551705394754, "loss": 3.5191, "step": 70190 }, { "epoch": 4.769330072020655, "grad_norm": 1.1860325336456299, "learning_rate": 0.0004040630520451148, "loss": 3.3138, "step": 70195 }, { "epoch": 4.7696697920913165, "grad_norm": 1.0910214185714722, "learning_rate": 0.00040402058703628216, "loss": 3.311, "step": 70200 }, { "epoch": 4.7700095121619785, "grad_norm": 1.0408936738967896, "learning_rate": 0.0004039781220274494, "loss": 3.3981, "step": 70205 }, { "epoch": 4.770349232232641, "grad_norm": 0.9932443499565125, "learning_rate": 0.00040393565701861666, "loss": 3.3311, "step": 70210 }, { "epoch": 4.770688952303302, "grad_norm": 1.335711121559143, "learning_rate": 0.000403893192009784, "loss": 3.3527, "step": 70215 }, { "epoch": 4.771028672373964, "grad_norm": 1.3837076425552368, "learning_rate": 0.0004038507270009512, "loss": 3.3708, "step": 70220 }, { "epoch": 4.771368392444626, "grad_norm": 1.234404444694519, "learning_rate": 0.0004038082619921185, "loss": 3.3592, "step": 70225 }, { "epoch": 4.771708112515287, "grad_norm": 7.964658260345459, "learning_rate": 0.0004037657969832858, "loss": 3.5366, "step": 70230 }, { "epoch": 4.772047832585949, "grad_norm": 1.376362681388855, "learning_rate": 0.00040372333197445306, "loss": 3.2802, "step": 70235 }, { "epoch": 4.772387552656611, "grad_norm": 1.1497365236282349, "learning_rate": 0.00040368086696562034, "loss": 3.1456, "step": 70240 }, { "epoch": 4.7727272727272725, "grad_norm": 1.035879373550415, "learning_rate": 0.0004036384019567876, "loss": 3.356, "step": 70245 }, { "epoch": 4.773066992797935, "grad_norm": 1.1751854419708252, "learning_rate": 0.0004035959369479549, "loss": 3.4197, "step": 70250 }, { "epoch": 4.773406712868597, "grad_norm": 1.0272119045257568, "learning_rate": 0.0004035534719391222, "loss": 3.4183, "step": 70255 }, { "epoch": 4.773746432939258, "grad_norm": 1.3517273664474487, "learning_rate": 0.00040351100693028947, "loss": 3.4883, "step": 70260 }, { "epoch": 4.77408615300992, "grad_norm": 1.280631422996521, "learning_rate": 0.0004034685419214567, "loss": 3.3266, "step": 70265 }, { "epoch": 4.774425873080582, "grad_norm": 1.584851622581482, "learning_rate": 0.000403426076912624, "loss": 3.3755, "step": 70270 }, { "epoch": 4.774765593151243, "grad_norm": 1.2355842590332031, "learning_rate": 0.0004033836119037913, "loss": 3.6538, "step": 70275 }, { "epoch": 4.775105313221905, "grad_norm": 1.177871584892273, "learning_rate": 0.00040334114689495853, "loss": 3.2716, "step": 70280 }, { "epoch": 4.775445033292567, "grad_norm": 1.108591914176941, "learning_rate": 0.00040329868188612587, "loss": 3.5958, "step": 70285 }, { "epoch": 4.7757847533632285, "grad_norm": 1.2052059173583984, "learning_rate": 0.00040325621687729315, "loss": 3.4518, "step": 70290 }, { "epoch": 4.776124473433891, "grad_norm": 1.3255202770233154, "learning_rate": 0.00040321375186846037, "loss": 3.5474, "step": 70295 }, { "epoch": 4.776464193504552, "grad_norm": 1.426576018333435, "learning_rate": 0.00040317128685962765, "loss": 3.5657, "step": 70300 }, { "epoch": 4.776803913575214, "grad_norm": 1.484837532043457, "learning_rate": 0.000403128821850795, "loss": 3.6977, "step": 70305 }, { "epoch": 4.777143633645876, "grad_norm": 1.7287381887435913, "learning_rate": 0.0004030863568419622, "loss": 3.2825, "step": 70310 }, { "epoch": 4.777483353716537, "grad_norm": 1.2190829515457153, "learning_rate": 0.0004030438918331295, "loss": 3.323, "step": 70315 }, { "epoch": 4.777823073787199, "grad_norm": 1.1230568885803223, "learning_rate": 0.0004030014268242968, "loss": 3.5246, "step": 70320 }, { "epoch": 4.778162793857861, "grad_norm": 1.2611994743347168, "learning_rate": 0.00040295896181546405, "loss": 3.3826, "step": 70325 }, { "epoch": 4.778502513928522, "grad_norm": 1.3241692781448364, "learning_rate": 0.00040291649680663133, "loss": 3.3127, "step": 70330 }, { "epoch": 4.7788422339991845, "grad_norm": 1.2968093156814575, "learning_rate": 0.0004028740317977986, "loss": 3.3982, "step": 70335 }, { "epoch": 4.779181954069847, "grad_norm": 1.4445639848709106, "learning_rate": 0.0004028315667889659, "loss": 3.2139, "step": 70340 }, { "epoch": 4.779521674140508, "grad_norm": 1.2556824684143066, "learning_rate": 0.00040278910178013317, "loss": 3.3794, "step": 70345 }, { "epoch": 4.77986139421117, "grad_norm": 1.1659927368164062, "learning_rate": 0.00040274663677130045, "loss": 3.2285, "step": 70350 }, { "epoch": 4.780201114281832, "grad_norm": 1.2997190952301025, "learning_rate": 0.00040270417176246773, "loss": 3.5888, "step": 70355 }, { "epoch": 4.780540834352493, "grad_norm": 0.9422664046287537, "learning_rate": 0.000402661706753635, "loss": 3.4764, "step": 70360 }, { "epoch": 4.780880554423155, "grad_norm": 1.79936945438385, "learning_rate": 0.0004026192417448023, "loss": 3.2376, "step": 70365 }, { "epoch": 4.781220274493817, "grad_norm": 1.070554494857788, "learning_rate": 0.00040257677673596957, "loss": 3.5356, "step": 70370 }, { "epoch": 4.7815599945644784, "grad_norm": 1.238409161567688, "learning_rate": 0.00040253431172713685, "loss": 3.4461, "step": 70375 }, { "epoch": 4.7818997146351405, "grad_norm": 1.35757315158844, "learning_rate": 0.00040249184671830413, "loss": 3.3057, "step": 70380 }, { "epoch": 4.782239434705803, "grad_norm": 1.2749102115631104, "learning_rate": 0.0004024493817094714, "loss": 3.5799, "step": 70385 }, { "epoch": 4.782579154776464, "grad_norm": 1.2646121978759766, "learning_rate": 0.0004024069167006387, "loss": 3.3506, "step": 70390 }, { "epoch": 4.782918874847126, "grad_norm": 1.4246611595153809, "learning_rate": 0.00040236445169180597, "loss": 3.3166, "step": 70395 }, { "epoch": 4.783258594917788, "grad_norm": 1.2966480255126953, "learning_rate": 0.00040232198668297325, "loss": 3.1633, "step": 70400 }, { "epoch": 4.783598314988449, "grad_norm": 1.5401270389556885, "learning_rate": 0.0004022795216741405, "loss": 3.5077, "step": 70405 }, { "epoch": 4.783938035059111, "grad_norm": 1.1060528755187988, "learning_rate": 0.0004022370566653078, "loss": 3.5623, "step": 70410 }, { "epoch": 4.784277755129773, "grad_norm": 1.7996503114700317, "learning_rate": 0.0004021945916564751, "loss": 3.6102, "step": 70415 }, { "epoch": 4.7846174752004345, "grad_norm": 1.461756944656372, "learning_rate": 0.0004021521266476423, "loss": 3.3149, "step": 70420 }, { "epoch": 4.7849571952710965, "grad_norm": 1.2978856563568115, "learning_rate": 0.00040210966163880965, "loss": 3.1813, "step": 70425 }, { "epoch": 4.785296915341759, "grad_norm": 1.1044471263885498, "learning_rate": 0.00040206719662997693, "loss": 3.5632, "step": 70430 }, { "epoch": 4.78563663541242, "grad_norm": 1.2046401500701904, "learning_rate": 0.00040202473162114416, "loss": 3.3563, "step": 70435 }, { "epoch": 4.785976355483082, "grad_norm": 1.5483325719833374, "learning_rate": 0.00040198226661231144, "loss": 3.2827, "step": 70440 }, { "epoch": 4.786316075553744, "grad_norm": 1.3662070035934448, "learning_rate": 0.0004019398016034788, "loss": 3.3164, "step": 70445 }, { "epoch": 4.786655795624405, "grad_norm": 1.2245241403579712, "learning_rate": 0.000401897336594646, "loss": 3.4644, "step": 70450 }, { "epoch": 4.786995515695067, "grad_norm": 1.146751880645752, "learning_rate": 0.0004018548715858133, "loss": 3.1381, "step": 70455 }, { "epoch": 4.787335235765729, "grad_norm": 1.389241099357605, "learning_rate": 0.0004018124065769806, "loss": 3.3812, "step": 70460 }, { "epoch": 4.7876749558363905, "grad_norm": 1.4084291458129883, "learning_rate": 0.00040176994156814784, "loss": 3.3586, "step": 70465 }, { "epoch": 4.7880146759070525, "grad_norm": 1.556718111038208, "learning_rate": 0.0004017274765593151, "loss": 3.626, "step": 70470 }, { "epoch": 4.788354395977715, "grad_norm": 1.3647022247314453, "learning_rate": 0.0004016850115504824, "loss": 3.4307, "step": 70475 }, { "epoch": 4.788694116048376, "grad_norm": 1.224571943283081, "learning_rate": 0.0004016425465416497, "loss": 3.2318, "step": 70480 }, { "epoch": 4.789033836119038, "grad_norm": 1.2169125080108643, "learning_rate": 0.00040160008153281696, "loss": 3.2901, "step": 70485 }, { "epoch": 4.7893735561897, "grad_norm": 1.1391466856002808, "learning_rate": 0.00040155761652398424, "loss": 3.3047, "step": 70490 }, { "epoch": 4.789713276260361, "grad_norm": 1.3732504844665527, "learning_rate": 0.0004015151515151515, "loss": 3.2671, "step": 70495 }, { "epoch": 4.790052996331023, "grad_norm": 1.418520212173462, "learning_rate": 0.0004014726865063188, "loss": 3.6607, "step": 70500 }, { "epoch": 4.790392716401685, "grad_norm": 1.1874284744262695, "learning_rate": 0.0004014302214974861, "loss": 3.21, "step": 70505 }, { "epoch": 4.7907324364723465, "grad_norm": 1.2030320167541504, "learning_rate": 0.0004013877564886533, "loss": 3.4345, "step": 70510 }, { "epoch": 4.7910721565430086, "grad_norm": 1.292283296585083, "learning_rate": 0.00040134529147982064, "loss": 3.2938, "step": 70515 }, { "epoch": 4.791411876613671, "grad_norm": 1.0396794080734253, "learning_rate": 0.0004013028264709879, "loss": 3.6917, "step": 70520 }, { "epoch": 4.791751596684332, "grad_norm": 1.2533780336380005, "learning_rate": 0.00040126036146215515, "loss": 3.4517, "step": 70525 }, { "epoch": 4.792091316754994, "grad_norm": 1.64959716796875, "learning_rate": 0.0004012178964533225, "loss": 3.2412, "step": 70530 }, { "epoch": 4.792431036825656, "grad_norm": 1.3860377073287964, "learning_rate": 0.00040117543144448976, "loss": 3.5018, "step": 70535 }, { "epoch": 4.792770756896317, "grad_norm": 1.1758357286453247, "learning_rate": 0.00040113296643565704, "loss": 3.1949, "step": 70540 }, { "epoch": 4.793110476966979, "grad_norm": 1.4669535160064697, "learning_rate": 0.0004010905014268243, "loss": 3.284, "step": 70545 }, { "epoch": 4.793450197037641, "grad_norm": 1.7285622358322144, "learning_rate": 0.0004010480364179916, "loss": 3.4667, "step": 70550 }, { "epoch": 4.7937899171083025, "grad_norm": 1.199770212173462, "learning_rate": 0.0004010055714091589, "loss": 3.6491, "step": 70555 }, { "epoch": 4.794129637178965, "grad_norm": 1.2137677669525146, "learning_rate": 0.0004009631064003261, "loss": 3.6329, "step": 70560 }, { "epoch": 4.794469357249627, "grad_norm": 1.4697697162628174, "learning_rate": 0.00040092064139149344, "loss": 3.4661, "step": 70565 }, { "epoch": 4.794809077320288, "grad_norm": 1.6410801410675049, "learning_rate": 0.0004008781763826607, "loss": 3.634, "step": 70570 }, { "epoch": 4.79514879739095, "grad_norm": 1.4947906732559204, "learning_rate": 0.00040083571137382795, "loss": 3.1683, "step": 70575 }, { "epoch": 4.795488517461612, "grad_norm": 1.2813974618911743, "learning_rate": 0.0004007932463649953, "loss": 3.3275, "step": 70580 }, { "epoch": 4.795828237532273, "grad_norm": 1.1394731998443604, "learning_rate": 0.00040075078135616256, "loss": 3.3609, "step": 70585 }, { "epoch": 4.796167957602935, "grad_norm": 1.2087595462799072, "learning_rate": 0.0004007083163473298, "loss": 3.4924, "step": 70590 }, { "epoch": 4.796507677673597, "grad_norm": 1.3318562507629395, "learning_rate": 0.00040066585133849707, "loss": 3.2905, "step": 70595 }, { "epoch": 4.7968473977442585, "grad_norm": 1.2421410083770752, "learning_rate": 0.0004006233863296644, "loss": 3.4152, "step": 70600 }, { "epoch": 4.797187117814921, "grad_norm": 1.0961284637451172, "learning_rate": 0.00040058092132083163, "loss": 3.4161, "step": 70605 }, { "epoch": 4.797526837885583, "grad_norm": 1.3130464553833008, "learning_rate": 0.0004005384563119989, "loss": 3.4469, "step": 70610 }, { "epoch": 4.797866557956244, "grad_norm": 1.318557858467102, "learning_rate": 0.00040049599130316624, "loss": 3.3111, "step": 70615 }, { "epoch": 4.798206278026906, "grad_norm": 1.0827953815460205, "learning_rate": 0.00040045352629433347, "loss": 3.5506, "step": 70620 }, { "epoch": 4.798545998097568, "grad_norm": 1.4976756572723389, "learning_rate": 0.00040041106128550075, "loss": 3.4143, "step": 70625 }, { "epoch": 4.798885718168229, "grad_norm": 1.25872802734375, "learning_rate": 0.00040036859627666803, "loss": 3.2054, "step": 70630 }, { "epoch": 4.799225438238891, "grad_norm": 1.2268116474151611, "learning_rate": 0.0004003261312678353, "loss": 3.3359, "step": 70635 }, { "epoch": 4.799565158309553, "grad_norm": 1.5883313417434692, "learning_rate": 0.0004002836662590026, "loss": 3.4514, "step": 70640 }, { "epoch": 4.7999048783802145, "grad_norm": 1.3014962673187256, "learning_rate": 0.00040024120125016987, "loss": 3.102, "step": 70645 }, { "epoch": 4.800244598450877, "grad_norm": 1.181219220161438, "learning_rate": 0.00040019873624133715, "loss": 3.361, "step": 70650 }, { "epoch": 4.800584318521539, "grad_norm": 1.3830218315124512, "learning_rate": 0.00040015627123250443, "loss": 3.4416, "step": 70655 }, { "epoch": 4.8009240385922, "grad_norm": 1.287019968032837, "learning_rate": 0.0004001138062236717, "loss": 3.4671, "step": 70660 }, { "epoch": 4.801263758662862, "grad_norm": 1.0376514196395874, "learning_rate": 0.00040007134121483893, "loss": 3.5617, "step": 70665 }, { "epoch": 4.801603478733524, "grad_norm": 1.3665155172348022, "learning_rate": 0.00040002887620600627, "loss": 3.1712, "step": 70670 }, { "epoch": 4.801943198804185, "grad_norm": 1.2191896438598633, "learning_rate": 0.00039998641119717355, "loss": 3.7404, "step": 70675 }, { "epoch": 4.802282918874847, "grad_norm": 1.458516240119934, "learning_rate": 0.0003999439461883408, "loss": 3.5169, "step": 70680 }, { "epoch": 4.802622638945509, "grad_norm": 1.0749304294586182, "learning_rate": 0.0003999014811795081, "loss": 3.3842, "step": 70685 }, { "epoch": 4.8029623590161705, "grad_norm": 1.7269116640090942, "learning_rate": 0.0003998590161706754, "loss": 3.4397, "step": 70690 }, { "epoch": 4.803302079086833, "grad_norm": 1.7839466333389282, "learning_rate": 0.0003998165511618426, "loss": 3.5917, "step": 70695 }, { "epoch": 4.803641799157495, "grad_norm": 1.461240530014038, "learning_rate": 0.0003997740861530099, "loss": 3.347, "step": 70700 }, { "epoch": 4.803981519228156, "grad_norm": 1.39717698097229, "learning_rate": 0.00039973162114417723, "loss": 3.3823, "step": 70705 }, { "epoch": 4.804321239298818, "grad_norm": 1.2865601778030396, "learning_rate": 0.0003996891561353445, "loss": 3.4588, "step": 70710 }, { "epoch": 4.80466095936948, "grad_norm": 1.3435925245285034, "learning_rate": 0.00039964669112651174, "loss": 3.3768, "step": 70715 }, { "epoch": 4.805000679440141, "grad_norm": 0.9410041570663452, "learning_rate": 0.00039960422611767907, "loss": 3.2573, "step": 70720 }, { "epoch": 4.805340399510803, "grad_norm": 1.0152696371078491, "learning_rate": 0.00039956176110884635, "loss": 3.2273, "step": 70725 }, { "epoch": 4.805680119581465, "grad_norm": 1.3450837135314941, "learning_rate": 0.0003995192961000136, "loss": 3.4742, "step": 70730 }, { "epoch": 4.8060198396521265, "grad_norm": 1.1111773252487183, "learning_rate": 0.00039947683109118086, "loss": 3.4434, "step": 70735 }, { "epoch": 4.806359559722789, "grad_norm": 1.2363264560699463, "learning_rate": 0.0003994343660823482, "loss": 3.3925, "step": 70740 }, { "epoch": 4.806699279793451, "grad_norm": 1.3932381868362427, "learning_rate": 0.0003993919010735154, "loss": 3.4857, "step": 70745 }, { "epoch": 4.807038999864112, "grad_norm": 1.3651409149169922, "learning_rate": 0.0003993494360646827, "loss": 3.5471, "step": 70750 }, { "epoch": 4.807378719934774, "grad_norm": 1.0779260396957397, "learning_rate": 0.00039930697105585003, "loss": 3.4109, "step": 70755 }, { "epoch": 4.807718440005435, "grad_norm": 1.615248441696167, "learning_rate": 0.00039926450604701726, "loss": 3.2724, "step": 70760 }, { "epoch": 4.808058160076097, "grad_norm": 1.058091402053833, "learning_rate": 0.00039922204103818454, "loss": 3.5526, "step": 70765 }, { "epoch": 4.808397880146759, "grad_norm": 1.6231400966644287, "learning_rate": 0.0003991795760293518, "loss": 3.3867, "step": 70770 }, { "epoch": 4.8087376002174205, "grad_norm": 1.1591941118240356, "learning_rate": 0.0003991371110205191, "loss": 3.4259, "step": 70775 }, { "epoch": 4.8090773202880825, "grad_norm": 1.0775717496871948, "learning_rate": 0.0003990946460116864, "loss": 3.1614, "step": 70780 }, { "epoch": 4.809417040358745, "grad_norm": 0.9031919836997986, "learning_rate": 0.00039905218100285366, "loss": 3.3866, "step": 70785 }, { "epoch": 4.809756760429406, "grad_norm": 1.2759591341018677, "learning_rate": 0.00039900971599402094, "loss": 3.2445, "step": 70790 }, { "epoch": 4.810096480500068, "grad_norm": 1.3045611381530762, "learning_rate": 0.0003989672509851882, "loss": 3.2488, "step": 70795 }, { "epoch": 4.81043620057073, "grad_norm": 1.1967239379882812, "learning_rate": 0.0003989247859763555, "loss": 3.3128, "step": 70800 }, { "epoch": 4.810775920641391, "grad_norm": 1.1898303031921387, "learning_rate": 0.0003988823209675227, "loss": 3.4024, "step": 70805 }, { "epoch": 4.811115640712053, "grad_norm": 1.1124032735824585, "learning_rate": 0.00039883985595869006, "loss": 3.4494, "step": 70810 }, { "epoch": 4.811455360782715, "grad_norm": 1.3947399854660034, "learning_rate": 0.00039879739094985734, "loss": 3.5894, "step": 70815 }, { "epoch": 4.8117950808533765, "grad_norm": 1.316833734512329, "learning_rate": 0.00039875492594102456, "loss": 3.3902, "step": 70820 }, { "epoch": 4.812134800924039, "grad_norm": 1.204565405845642, "learning_rate": 0.0003987124609321919, "loss": 3.4057, "step": 70825 }, { "epoch": 4.812474520994701, "grad_norm": 1.2187755107879639, "learning_rate": 0.0003986699959233592, "loss": 3.5122, "step": 70830 }, { "epoch": 4.812814241065362, "grad_norm": 1.6243900060653687, "learning_rate": 0.0003986275309145264, "loss": 3.5209, "step": 70835 }, { "epoch": 4.813153961136024, "grad_norm": 1.7668317556381226, "learning_rate": 0.00039858506590569374, "loss": 3.4683, "step": 70840 }, { "epoch": 4.813493681206686, "grad_norm": 1.3803716897964478, "learning_rate": 0.000398542600896861, "loss": 3.9221, "step": 70845 }, { "epoch": 4.813833401277347, "grad_norm": 1.0279200077056885, "learning_rate": 0.00039850013588802824, "loss": 3.3872, "step": 70850 }, { "epoch": 4.814173121348009, "grad_norm": 1.214718222618103, "learning_rate": 0.0003984576708791955, "loss": 3.6771, "step": 70855 }, { "epoch": 4.814512841418671, "grad_norm": 0.9803794026374817, "learning_rate": 0.00039841520587036286, "loss": 3.2989, "step": 70860 }, { "epoch": 4.8148525614893325, "grad_norm": 1.2746012210845947, "learning_rate": 0.0003983727408615301, "loss": 3.4021, "step": 70865 }, { "epoch": 4.815192281559995, "grad_norm": 1.0992603302001953, "learning_rate": 0.00039833027585269736, "loss": 3.2799, "step": 70870 }, { "epoch": 4.815532001630657, "grad_norm": 1.1990646123886108, "learning_rate": 0.0003982878108438647, "loss": 3.2563, "step": 70875 }, { "epoch": 4.815871721701318, "grad_norm": 1.3520545959472656, "learning_rate": 0.000398245345835032, "loss": 3.6074, "step": 70880 }, { "epoch": 4.81621144177198, "grad_norm": 1.0721224546432495, "learning_rate": 0.0003982028808261992, "loss": 3.4358, "step": 70885 }, { "epoch": 4.816551161842642, "grad_norm": 1.4257352352142334, "learning_rate": 0.0003981604158173665, "loss": 3.5462, "step": 70890 }, { "epoch": 4.816890881913303, "grad_norm": 1.0057220458984375, "learning_rate": 0.0003981179508085338, "loss": 3.5688, "step": 70895 }, { "epoch": 4.817230601983965, "grad_norm": 1.4117540121078491, "learning_rate": 0.00039807548579970104, "loss": 3.4916, "step": 70900 }, { "epoch": 4.817570322054627, "grad_norm": 1.4343794584274292, "learning_rate": 0.0003980330207908683, "loss": 3.6108, "step": 70905 }, { "epoch": 4.8179100421252885, "grad_norm": 1.3758368492126465, "learning_rate": 0.00039799055578203566, "loss": 3.717, "step": 70910 }, { "epoch": 4.818249762195951, "grad_norm": 1.3157581090927124, "learning_rate": 0.0003979480907732029, "loss": 3.3431, "step": 70915 }, { "epoch": 4.818589482266613, "grad_norm": 1.3198864459991455, "learning_rate": 0.00039790562576437016, "loss": 3.2398, "step": 70920 }, { "epoch": 4.818929202337274, "grad_norm": 1.493873953819275, "learning_rate": 0.00039786316075553744, "loss": 3.3086, "step": 70925 }, { "epoch": 4.819268922407936, "grad_norm": 1.439186692237854, "learning_rate": 0.0003978206957467047, "loss": 3.404, "step": 70930 }, { "epoch": 4.819608642478598, "grad_norm": 1.3876898288726807, "learning_rate": 0.000397778230737872, "loss": 3.3779, "step": 70935 }, { "epoch": 4.819948362549259, "grad_norm": 1.1343975067138672, "learning_rate": 0.0003977357657290393, "loss": 3.5554, "step": 70940 }, { "epoch": 4.820288082619921, "grad_norm": 1.1747088432312012, "learning_rate": 0.00039769330072020656, "loss": 3.6329, "step": 70945 }, { "epoch": 4.820627802690583, "grad_norm": 1.2298489809036255, "learning_rate": 0.00039765083571137384, "loss": 3.3777, "step": 70950 }, { "epoch": 4.8209675227612445, "grad_norm": 1.0491122007369995, "learning_rate": 0.0003976083707025411, "loss": 3.3114, "step": 70955 }, { "epoch": 4.821307242831907, "grad_norm": 1.1306498050689697, "learning_rate": 0.00039756590569370835, "loss": 3.438, "step": 70960 }, { "epoch": 4.821646962902569, "grad_norm": 1.2446684837341309, "learning_rate": 0.0003975234406848757, "loss": 3.5862, "step": 70965 }, { "epoch": 4.82198668297323, "grad_norm": 1.1461412906646729, "learning_rate": 0.00039748097567604296, "loss": 3.5197, "step": 70970 }, { "epoch": 4.822326403043892, "grad_norm": 1.8402113914489746, "learning_rate": 0.0003974385106672102, "loss": 3.3382, "step": 70975 }, { "epoch": 4.822666123114553, "grad_norm": 1.2205090522766113, "learning_rate": 0.0003973960456583775, "loss": 3.1499, "step": 70980 }, { "epoch": 4.823005843185215, "grad_norm": 1.585052728652954, "learning_rate": 0.0003973535806495448, "loss": 3.4037, "step": 70985 }, { "epoch": 4.823345563255877, "grad_norm": 1.2899349927902222, "learning_rate": 0.00039731111564071203, "loss": 3.3288, "step": 70990 }, { "epoch": 4.8236852833265385, "grad_norm": 3.2983102798461914, "learning_rate": 0.0003972686506318793, "loss": 3.3801, "step": 70995 }, { "epoch": 4.8240250033972005, "grad_norm": 1.5183219909667969, "learning_rate": 0.00039722618562304665, "loss": 3.448, "step": 71000 }, { "epoch": 4.824364723467863, "grad_norm": 1.165573239326477, "learning_rate": 0.00039718372061421387, "loss": 3.3586, "step": 71005 }, { "epoch": 4.824704443538524, "grad_norm": 1.9975435733795166, "learning_rate": 0.00039714125560538115, "loss": 3.4762, "step": 71010 }, { "epoch": 4.825044163609186, "grad_norm": 1.2843631505966187, "learning_rate": 0.0003970987905965485, "loss": 2.9025, "step": 71015 }, { "epoch": 4.825383883679848, "grad_norm": 1.0834941864013672, "learning_rate": 0.0003970563255877157, "loss": 3.2701, "step": 71020 }, { "epoch": 4.825723603750509, "grad_norm": 1.3900073766708374, "learning_rate": 0.000397013860578883, "loss": 3.5116, "step": 71025 }, { "epoch": 4.826063323821171, "grad_norm": 1.3948160409927368, "learning_rate": 0.00039697139557005027, "loss": 3.5577, "step": 71030 }, { "epoch": 4.826403043891833, "grad_norm": 1.2718769311904907, "learning_rate": 0.00039692893056121755, "loss": 3.4857, "step": 71035 }, { "epoch": 4.8267427639624945, "grad_norm": 1.5232940912246704, "learning_rate": 0.00039688646555238483, "loss": 3.3295, "step": 71040 }, { "epoch": 4.8270824840331565, "grad_norm": 1.3432352542877197, "learning_rate": 0.0003968440005435521, "loss": 3.1799, "step": 71045 }, { "epoch": 4.827422204103819, "grad_norm": 1.1827232837677002, "learning_rate": 0.00039680153553471945, "loss": 3.4724, "step": 71050 }, { "epoch": 4.82776192417448, "grad_norm": 1.2883589267730713, "learning_rate": 0.00039675907052588667, "loss": 3.2878, "step": 71055 }, { "epoch": 4.828101644245142, "grad_norm": 1.2846238613128662, "learning_rate": 0.00039671660551705395, "loss": 3.4301, "step": 71060 }, { "epoch": 4.828441364315804, "grad_norm": 1.4947926998138428, "learning_rate": 0.0003966741405082213, "loss": 3.5534, "step": 71065 }, { "epoch": 4.828781084386465, "grad_norm": 1.3932428359985352, "learning_rate": 0.0003966316754993885, "loss": 3.3571, "step": 71070 }, { "epoch": 4.829120804457127, "grad_norm": 1.3496568202972412, "learning_rate": 0.0003965892104905558, "loss": 3.3654, "step": 71075 }, { "epoch": 4.829460524527789, "grad_norm": 1.2791773080825806, "learning_rate": 0.00039654674548172307, "loss": 3.335, "step": 71080 }, { "epoch": 4.8298002445984505, "grad_norm": 1.1741435527801514, "learning_rate": 0.00039650428047289035, "loss": 3.5163, "step": 71085 }, { "epoch": 4.8301399646691126, "grad_norm": 1.087752342224121, "learning_rate": 0.00039646181546405763, "loss": 3.5806, "step": 71090 }, { "epoch": 4.830479684739775, "grad_norm": 1.4276396036148071, "learning_rate": 0.0003964193504552249, "loss": 3.3946, "step": 71095 }, { "epoch": 4.830819404810436, "grad_norm": 1.1793731451034546, "learning_rate": 0.0003963768854463922, "loss": 3.4316, "step": 71100 }, { "epoch": 4.831159124881098, "grad_norm": 1.4959608316421509, "learning_rate": 0.00039633442043755947, "loss": 3.3287, "step": 71105 }, { "epoch": 4.83149884495176, "grad_norm": 1.3187726736068726, "learning_rate": 0.00039629195542872675, "loss": 3.5753, "step": 71110 }, { "epoch": 4.831838565022421, "grad_norm": 1.282853364944458, "learning_rate": 0.000396249490419894, "loss": 3.5358, "step": 71115 }, { "epoch": 4.832178285093083, "grad_norm": 1.0444756746292114, "learning_rate": 0.0003962070254110613, "loss": 3.2256, "step": 71120 }, { "epoch": 4.832518005163745, "grad_norm": 1.2184927463531494, "learning_rate": 0.0003961645604022286, "loss": 3.4545, "step": 71125 }, { "epoch": 4.8328577252344065, "grad_norm": 1.4138740301132202, "learning_rate": 0.0003961220953933958, "loss": 3.1685, "step": 71130 }, { "epoch": 4.833197445305069, "grad_norm": 1.0838332176208496, "learning_rate": 0.00039607963038456315, "loss": 3.6022, "step": 71135 }, { "epoch": 4.833537165375731, "grad_norm": 1.3162351846694946, "learning_rate": 0.00039603716537573043, "loss": 3.1536, "step": 71140 }, { "epoch": 4.833876885446392, "grad_norm": 1.2233527898788452, "learning_rate": 0.00039599470036689766, "loss": 3.3511, "step": 71145 }, { "epoch": 4.834216605517054, "grad_norm": 1.0464791059494019, "learning_rate": 0.00039595223535806494, "loss": 3.5097, "step": 71150 }, { "epoch": 4.834556325587716, "grad_norm": 1.307684302330017, "learning_rate": 0.0003959097703492323, "loss": 3.3514, "step": 71155 }, { "epoch": 4.834896045658377, "grad_norm": 1.3618642091751099, "learning_rate": 0.0003958673053403995, "loss": 3.3167, "step": 71160 }, { "epoch": 4.835235765729039, "grad_norm": 1.3610444068908691, "learning_rate": 0.0003958248403315668, "loss": 3.5359, "step": 71165 }, { "epoch": 4.835575485799701, "grad_norm": 1.0943392515182495, "learning_rate": 0.0003957823753227341, "loss": 3.4249, "step": 71170 }, { "epoch": 4.8359152058703625, "grad_norm": 1.6884875297546387, "learning_rate": 0.00039573991031390134, "loss": 3.3507, "step": 71175 }, { "epoch": 4.836254925941025, "grad_norm": 0.8716896176338196, "learning_rate": 0.0003956974453050686, "loss": 3.3575, "step": 71180 }, { "epoch": 4.836594646011687, "grad_norm": 1.354392170906067, "learning_rate": 0.0003956549802962359, "loss": 3.5045, "step": 71185 }, { "epoch": 4.836934366082348, "grad_norm": 1.3730740547180176, "learning_rate": 0.0003956125152874032, "loss": 3.2665, "step": 71190 }, { "epoch": 4.83727408615301, "grad_norm": 1.3649671077728271, "learning_rate": 0.00039557005027857046, "loss": 3.3321, "step": 71195 }, { "epoch": 4.837613806223672, "grad_norm": 1.2733017206192017, "learning_rate": 0.00039552758526973774, "loss": 3.3842, "step": 71200 }, { "epoch": 4.837953526294333, "grad_norm": 1.557178258895874, "learning_rate": 0.000395485120260905, "loss": 3.3778, "step": 71205 }, { "epoch": 4.838293246364995, "grad_norm": 1.5044169425964355, "learning_rate": 0.0003954426552520723, "loss": 3.4556, "step": 71210 }, { "epoch": 4.838632966435657, "grad_norm": 1.3193262815475464, "learning_rate": 0.0003954001902432396, "loss": 3.4134, "step": 71215 }, { "epoch": 4.8389726865063185, "grad_norm": 1.2342259883880615, "learning_rate": 0.00039535772523440686, "loss": 3.3395, "step": 71220 }, { "epoch": 4.839312406576981, "grad_norm": 1.0487529039382935, "learning_rate": 0.00039531526022557414, "loss": 3.2387, "step": 71225 }, { "epoch": 4.839652126647643, "grad_norm": 1.1788733005523682, "learning_rate": 0.0003952727952167414, "loss": 3.0771, "step": 71230 }, { "epoch": 4.839991846718304, "grad_norm": 1.3200479745864868, "learning_rate": 0.0003952303302079087, "loss": 3.3652, "step": 71235 }, { "epoch": 4.840331566788966, "grad_norm": 1.4579254388809204, "learning_rate": 0.000395187865199076, "loss": 3.5384, "step": 71240 }, { "epoch": 4.840671286859628, "grad_norm": 1.5533660650253296, "learning_rate": 0.00039514540019024326, "loss": 3.3327, "step": 71245 }, { "epoch": 4.841011006930289, "grad_norm": 1.1029534339904785, "learning_rate": 0.00039510293518141054, "loss": 3.4401, "step": 71250 }, { "epoch": 4.841350727000951, "grad_norm": 1.3280212879180908, "learning_rate": 0.00039506047017257777, "loss": 3.4643, "step": 71255 }, { "epoch": 4.841690447071613, "grad_norm": 1.1614471673965454, "learning_rate": 0.0003950180051637451, "loss": 3.356, "step": 71260 }, { "epoch": 4.8420301671422745, "grad_norm": 1.2958310842514038, "learning_rate": 0.0003949755401549124, "loss": 3.5261, "step": 71265 }, { "epoch": 4.842369887212937, "grad_norm": 1.3371976613998413, "learning_rate": 0.0003949330751460796, "loss": 3.2195, "step": 71270 }, { "epoch": 4.842709607283599, "grad_norm": 1.1399586200714111, "learning_rate": 0.00039489061013724694, "loss": 3.4638, "step": 71275 }, { "epoch": 4.84304932735426, "grad_norm": 1.1318224668502808, "learning_rate": 0.0003948481451284142, "loss": 3.4135, "step": 71280 }, { "epoch": 4.843389047424922, "grad_norm": 1.4433199167251587, "learning_rate": 0.00039480568011958145, "loss": 3.329, "step": 71285 }, { "epoch": 4.843728767495584, "grad_norm": 1.4246189594268799, "learning_rate": 0.00039476321511074873, "loss": 3.326, "step": 71290 }, { "epoch": 4.844068487566245, "grad_norm": 1.1366605758666992, "learning_rate": 0.00039472075010191606, "loss": 3.3242, "step": 71295 }, { "epoch": 4.844408207636907, "grad_norm": 1.2273499965667725, "learning_rate": 0.0003946782850930833, "loss": 3.1527, "step": 71300 }, { "epoch": 4.844747927707569, "grad_norm": 1.0187430381774902, "learning_rate": 0.00039463582008425057, "loss": 3.6099, "step": 71305 }, { "epoch": 4.8450876477782305, "grad_norm": 1.266574501991272, "learning_rate": 0.0003945933550754179, "loss": 3.2552, "step": 71310 }, { "epoch": 4.845427367848893, "grad_norm": 1.4065873622894287, "learning_rate": 0.00039455089006658513, "loss": 3.4709, "step": 71315 }, { "epoch": 4.845767087919555, "grad_norm": 1.265505075454712, "learning_rate": 0.0003945084250577524, "loss": 3.6696, "step": 71320 }, { "epoch": 4.846106807990216, "grad_norm": 1.264696717262268, "learning_rate": 0.0003944659600489197, "loss": 3.5194, "step": 71325 }, { "epoch": 4.846446528060878, "grad_norm": 1.714802622795105, "learning_rate": 0.00039442349504008697, "loss": 3.486, "step": 71330 }, { "epoch": 4.84678624813154, "grad_norm": 1.594722032546997, "learning_rate": 0.00039438103003125425, "loss": 3.4615, "step": 71335 }, { "epoch": 4.847125968202201, "grad_norm": 1.4748072624206543, "learning_rate": 0.00039433856502242153, "loss": 3.2821, "step": 71340 }, { "epoch": 4.847465688272863, "grad_norm": 1.1591883897781372, "learning_rate": 0.0003942961000135888, "loss": 3.5278, "step": 71345 }, { "epoch": 4.847805408343525, "grad_norm": 1.2313083410263062, "learning_rate": 0.0003942536350047561, "loss": 3.4425, "step": 71350 }, { "epoch": 4.8481451284141865, "grad_norm": 1.495124101638794, "learning_rate": 0.00039421116999592337, "loss": 3.7215, "step": 71355 }, { "epoch": 4.848484848484849, "grad_norm": 1.011122465133667, "learning_rate": 0.0003941687049870906, "loss": 3.2538, "step": 71360 }, { "epoch": 4.848824568555511, "grad_norm": 1.0978726148605347, "learning_rate": 0.00039412623997825793, "loss": 3.5381, "step": 71365 }, { "epoch": 4.849164288626172, "grad_norm": 1.1195951700210571, "learning_rate": 0.0003940837749694252, "loss": 3.163, "step": 71370 }, { "epoch": 4.849504008696834, "grad_norm": 1.3451340198516846, "learning_rate": 0.00039404130996059243, "loss": 3.3949, "step": 71375 }, { "epoch": 4.849843728767496, "grad_norm": 1.4226480722427368, "learning_rate": 0.00039399884495175977, "loss": 3.4071, "step": 71380 }, { "epoch": 4.850183448838157, "grad_norm": 1.6961376667022705, "learning_rate": 0.00039395637994292705, "loss": 3.264, "step": 71385 }, { "epoch": 4.850523168908819, "grad_norm": 1.231431007385254, "learning_rate": 0.00039391391493409433, "loss": 3.476, "step": 71390 }, { "epoch": 4.850862888979481, "grad_norm": 1.319939136505127, "learning_rate": 0.0003938714499252616, "loss": 3.1871, "step": 71395 }, { "epoch": 4.8512026090501426, "grad_norm": 1.0996164083480835, "learning_rate": 0.0003938289849164289, "loss": 3.3892, "step": 71400 }, { "epoch": 4.851542329120805, "grad_norm": 1.5256028175354004, "learning_rate": 0.00039378651990759617, "loss": 3.3419, "step": 71405 }, { "epoch": 4.851882049191467, "grad_norm": 1.3887892961502075, "learning_rate": 0.0003937440548987634, "loss": 3.2204, "step": 71410 }, { "epoch": 4.852221769262128, "grad_norm": 1.283972144126892, "learning_rate": 0.00039370158988993073, "loss": 3.471, "step": 71415 }, { "epoch": 4.85256148933279, "grad_norm": 1.2405142784118652, "learning_rate": 0.000393659124881098, "loss": 3.2053, "step": 71420 }, { "epoch": 4.852901209403452, "grad_norm": 1.144068956375122, "learning_rate": 0.00039361665987226524, "loss": 3.309, "step": 71425 }, { "epoch": 4.853240929474113, "grad_norm": 1.2265738248825073, "learning_rate": 0.00039357419486343257, "loss": 3.5906, "step": 71430 }, { "epoch": 4.853580649544775, "grad_norm": 1.2829383611679077, "learning_rate": 0.00039353172985459985, "loss": 3.4617, "step": 71435 }, { "epoch": 4.8539203696154365, "grad_norm": 1.1439714431762695, "learning_rate": 0.0003934892648457671, "loss": 3.5689, "step": 71440 }, { "epoch": 4.854260089686099, "grad_norm": 1.4179227352142334, "learning_rate": 0.00039344679983693436, "loss": 3.2082, "step": 71445 }, { "epoch": 4.854599809756761, "grad_norm": 1.146487832069397, "learning_rate": 0.0003934043348281017, "loss": 3.2978, "step": 71450 }, { "epoch": 4.854939529827422, "grad_norm": 1.199033498764038, "learning_rate": 0.0003933618698192689, "loss": 3.2719, "step": 71455 }, { "epoch": 4.855279249898084, "grad_norm": 1.3328654766082764, "learning_rate": 0.0003933194048104362, "loss": 3.3036, "step": 71460 }, { "epoch": 4.855618969968746, "grad_norm": 1.43343985080719, "learning_rate": 0.00039327693980160353, "loss": 3.2989, "step": 71465 }, { "epoch": 4.855958690039407, "grad_norm": 1.3799139261245728, "learning_rate": 0.00039323447479277076, "loss": 3.3972, "step": 71470 }, { "epoch": 4.856298410110069, "grad_norm": 1.4309157133102417, "learning_rate": 0.00039319200978393804, "loss": 3.5192, "step": 71475 }, { "epoch": 4.856638130180731, "grad_norm": 0.9985924363136292, "learning_rate": 0.0003931495447751053, "loss": 3.1529, "step": 71480 }, { "epoch": 4.8569778502513925, "grad_norm": 1.191610336303711, "learning_rate": 0.0003931070797662726, "loss": 3.6516, "step": 71485 }, { "epoch": 4.857317570322055, "grad_norm": 1.3694729804992676, "learning_rate": 0.0003930646147574399, "loss": 3.5061, "step": 71490 }, { "epoch": 4.857657290392717, "grad_norm": 1.4576324224472046, "learning_rate": 0.00039302214974860716, "loss": 3.1666, "step": 71495 }, { "epoch": 4.857997010463378, "grad_norm": 1.2077662944793701, "learning_rate": 0.00039297968473977444, "loss": 3.5788, "step": 71500 }, { "epoch": 4.85833673053404, "grad_norm": 1.415664792060852, "learning_rate": 0.0003929372197309417, "loss": 3.1326, "step": 71505 }, { "epoch": 4.858676450604702, "grad_norm": 1.211104393005371, "learning_rate": 0.000392894754722109, "loss": 3.144, "step": 71510 }, { "epoch": 4.859016170675363, "grad_norm": 1.280239224433899, "learning_rate": 0.0003928522897132762, "loss": 3.218, "step": 71515 }, { "epoch": 4.859355890746025, "grad_norm": 1.1261177062988281, "learning_rate": 0.00039280982470444356, "loss": 3.2773, "step": 71520 }, { "epoch": 4.859695610816687, "grad_norm": 1.374635100364685, "learning_rate": 0.00039276735969561084, "loss": 3.4381, "step": 71525 }, { "epoch": 4.8600353308873485, "grad_norm": 1.435752511024475, "learning_rate": 0.00039272489468677806, "loss": 3.2926, "step": 71530 }, { "epoch": 4.860375050958011, "grad_norm": 1.4729050397872925, "learning_rate": 0.0003926824296779454, "loss": 3.3743, "step": 71535 }, { "epoch": 4.860714771028673, "grad_norm": 1.4986300468444824, "learning_rate": 0.0003926399646691127, "loss": 3.4252, "step": 71540 }, { "epoch": 4.861054491099334, "grad_norm": 1.1355667114257812, "learning_rate": 0.0003925974996602799, "loss": 3.7108, "step": 71545 }, { "epoch": 4.861394211169996, "grad_norm": 1.2107913494110107, "learning_rate": 0.0003925550346514472, "loss": 3.3597, "step": 71550 }, { "epoch": 4.861733931240658, "grad_norm": 1.2732014656066895, "learning_rate": 0.0003925125696426145, "loss": 3.5824, "step": 71555 }, { "epoch": 4.862073651311319, "grad_norm": 1.4120434522628784, "learning_rate": 0.0003924701046337818, "loss": 3.212, "step": 71560 }, { "epoch": 4.862413371381981, "grad_norm": 1.648801326751709, "learning_rate": 0.000392427639624949, "loss": 3.3378, "step": 71565 }, { "epoch": 4.862753091452643, "grad_norm": 1.5322264432907104, "learning_rate": 0.00039238517461611636, "loss": 3.3363, "step": 71570 }, { "epoch": 4.8630928115233045, "grad_norm": 1.1897881031036377, "learning_rate": 0.00039234270960728364, "loss": 3.4246, "step": 71575 }, { "epoch": 4.863432531593967, "grad_norm": 1.3469345569610596, "learning_rate": 0.00039230024459845086, "loss": 3.4163, "step": 71580 }, { "epoch": 4.863772251664629, "grad_norm": 1.1844674348831177, "learning_rate": 0.00039225777958961814, "loss": 3.6292, "step": 71585 }, { "epoch": 4.86411197173529, "grad_norm": 1.1678467988967896, "learning_rate": 0.0003922153145807855, "loss": 3.1646, "step": 71590 }, { "epoch": 4.864451691805952, "grad_norm": 1.104594111442566, "learning_rate": 0.0003921728495719527, "loss": 3.5223, "step": 71595 }, { "epoch": 4.864791411876614, "grad_norm": 0.9765718579292297, "learning_rate": 0.00039213038456312, "loss": 3.332, "step": 71600 }, { "epoch": 4.865131131947275, "grad_norm": 1.1731981039047241, "learning_rate": 0.0003920879195542873, "loss": 3.5393, "step": 71605 }, { "epoch": 4.865470852017937, "grad_norm": 1.5142074823379517, "learning_rate": 0.00039204545454545454, "loss": 3.2485, "step": 71610 }, { "epoch": 4.865810572088599, "grad_norm": 1.4326642751693726, "learning_rate": 0.0003920029895366218, "loss": 3.2535, "step": 71615 }, { "epoch": 4.8661502921592605, "grad_norm": 1.4494317770004272, "learning_rate": 0.0003919605245277891, "loss": 3.319, "step": 71620 }, { "epoch": 4.866490012229923, "grad_norm": 1.2490403652191162, "learning_rate": 0.0003919180595189564, "loss": 3.59, "step": 71625 }, { "epoch": 4.866829732300585, "grad_norm": 1.2551498413085938, "learning_rate": 0.00039187559451012366, "loss": 3.2739, "step": 71630 }, { "epoch": 4.867169452371246, "grad_norm": 1.2402111291885376, "learning_rate": 0.00039183312950129094, "loss": 3.3594, "step": 71635 }, { "epoch": 4.867509172441908, "grad_norm": 1.0631235837936401, "learning_rate": 0.0003917906644924582, "loss": 3.3581, "step": 71640 }, { "epoch": 4.86784889251257, "grad_norm": 1.346165418624878, "learning_rate": 0.0003917481994836255, "loss": 3.681, "step": 71645 }, { "epoch": 4.868188612583231, "grad_norm": 1.0477299690246582, "learning_rate": 0.0003917057344747928, "loss": 3.329, "step": 71650 }, { "epoch": 4.868528332653893, "grad_norm": 1.1754701137542725, "learning_rate": 0.00039166326946596, "loss": 3.4393, "step": 71655 }, { "epoch": 4.8688680527245545, "grad_norm": 1.7505075931549072, "learning_rate": 0.00039162080445712734, "loss": 3.2933, "step": 71660 }, { "epoch": 4.8692077727952165, "grad_norm": 1.166295051574707, "learning_rate": 0.0003915783394482946, "loss": 3.47, "step": 71665 }, { "epoch": 4.869547492865879, "grad_norm": 1.0137534141540527, "learning_rate": 0.00039153587443946185, "loss": 3.7538, "step": 71670 }, { "epoch": 4.86988721293654, "grad_norm": 1.1525343656539917, "learning_rate": 0.0003914934094306292, "loss": 3.4573, "step": 71675 }, { "epoch": 4.870226933007202, "grad_norm": 1.1135120391845703, "learning_rate": 0.00039145094442179646, "loss": 3.4296, "step": 71680 }, { "epoch": 4.870566653077864, "grad_norm": 1.5714993476867676, "learning_rate": 0.0003914084794129637, "loss": 3.1064, "step": 71685 }, { "epoch": 4.870906373148525, "grad_norm": 1.3605599403381348, "learning_rate": 0.000391366014404131, "loss": 3.3813, "step": 71690 }, { "epoch": 4.871246093219187, "grad_norm": 1.3155558109283447, "learning_rate": 0.0003913235493952983, "loss": 3.2898, "step": 71695 }, { "epoch": 4.871585813289849, "grad_norm": 1.1934846639633179, "learning_rate": 0.00039128108438646553, "loss": 3.3676, "step": 71700 }, { "epoch": 4.8719255333605105, "grad_norm": 1.035252571105957, "learning_rate": 0.0003912386193776328, "loss": 3.2126, "step": 71705 }, { "epoch": 4.872265253431173, "grad_norm": 1.4477267265319824, "learning_rate": 0.00039119615436880015, "loss": 2.9639, "step": 71710 }, { "epoch": 4.872604973501835, "grad_norm": 1.4355363845825195, "learning_rate": 0.00039115368935996737, "loss": 3.1334, "step": 71715 }, { "epoch": 4.872944693572496, "grad_norm": 1.206273078918457, "learning_rate": 0.00039111122435113465, "loss": 3.6529, "step": 71720 }, { "epoch": 4.873284413643158, "grad_norm": 1.4392871856689453, "learning_rate": 0.000391068759342302, "loss": 3.3901, "step": 71725 }, { "epoch": 4.87362413371382, "grad_norm": 1.1005842685699463, "learning_rate": 0.00039102629433346927, "loss": 3.5425, "step": 71730 }, { "epoch": 4.873963853784481, "grad_norm": 1.3362077474594116, "learning_rate": 0.0003909838293246365, "loss": 3.5921, "step": 71735 }, { "epoch": 4.874303573855143, "grad_norm": 1.0437400341033936, "learning_rate": 0.00039094136431580377, "loss": 3.3686, "step": 71740 }, { "epoch": 4.874643293925805, "grad_norm": 1.2023037672042847, "learning_rate": 0.0003908988993069711, "loss": 3.5487, "step": 71745 }, { "epoch": 4.8749830139964665, "grad_norm": 1.1852829456329346, "learning_rate": 0.00039085643429813833, "loss": 3.279, "step": 71750 }, { "epoch": 4.875322734067129, "grad_norm": 1.4411629438400269, "learning_rate": 0.0003908139692893056, "loss": 3.2345, "step": 71755 }, { "epoch": 4.875662454137791, "grad_norm": 1.117739200592041, "learning_rate": 0.00039077150428047295, "loss": 3.3254, "step": 71760 }, { "epoch": 4.876002174208452, "grad_norm": 1.0289639234542847, "learning_rate": 0.00039072903927164017, "loss": 3.6295, "step": 71765 }, { "epoch": 4.876341894279114, "grad_norm": 1.4693385362625122, "learning_rate": 0.00039068657426280745, "loss": 3.321, "step": 71770 }, { "epoch": 4.876681614349776, "grad_norm": 1.2039812803268433, "learning_rate": 0.00039064410925397473, "loss": 3.4759, "step": 71775 }, { "epoch": 4.877021334420437, "grad_norm": 1.0456063747406006, "learning_rate": 0.000390601644245142, "loss": 3.5633, "step": 71780 }, { "epoch": 4.877361054491099, "grad_norm": 0.9889867305755615, "learning_rate": 0.0003905591792363093, "loss": 3.3836, "step": 71785 }, { "epoch": 4.877700774561761, "grad_norm": 1.375673770904541, "learning_rate": 0.00039051671422747657, "loss": 3.4811, "step": 71790 }, { "epoch": 4.8780404946324225, "grad_norm": 1.1456611156463623, "learning_rate": 0.00039047424921864385, "loss": 3.1703, "step": 71795 }, { "epoch": 4.878380214703085, "grad_norm": 1.0926636457443237, "learning_rate": 0.00039043178420981113, "loss": 3.4461, "step": 71800 }, { "epoch": 4.878719934773747, "grad_norm": 1.6256048679351807, "learning_rate": 0.0003903893192009784, "loss": 3.2351, "step": 71805 }, { "epoch": 4.879059654844408, "grad_norm": 1.507020115852356, "learning_rate": 0.00039034685419214564, "loss": 3.5555, "step": 71810 }, { "epoch": 4.87939937491507, "grad_norm": 1.2900398969650269, "learning_rate": 0.00039030438918331297, "loss": 3.2609, "step": 71815 }, { "epoch": 4.879739094985732, "grad_norm": 1.5250099897384644, "learning_rate": 0.00039026192417448025, "loss": 3.3651, "step": 71820 }, { "epoch": 4.880078815056393, "grad_norm": 0.9139539003372192, "learning_rate": 0.0003902194591656475, "loss": 3.4094, "step": 71825 }, { "epoch": 4.880418535127055, "grad_norm": 1.3636603355407715, "learning_rate": 0.0003901769941568148, "loss": 3.469, "step": 71830 }, { "epoch": 4.880758255197717, "grad_norm": 1.2644836902618408, "learning_rate": 0.0003901345291479821, "loss": 3.5645, "step": 71835 }, { "epoch": 4.8810979752683785, "grad_norm": 1.1836740970611572, "learning_rate": 0.0003900920641391493, "loss": 3.4092, "step": 71840 }, { "epoch": 4.881437695339041, "grad_norm": 1.3836944103240967, "learning_rate": 0.0003900495991303166, "loss": 3.3795, "step": 71845 }, { "epoch": 4.881777415409703, "grad_norm": 1.5375598669052124, "learning_rate": 0.00039000713412148393, "loss": 3.3506, "step": 71850 }, { "epoch": 4.882117135480364, "grad_norm": 1.1086369752883911, "learning_rate": 0.00038996466911265116, "loss": 3.3671, "step": 71855 }, { "epoch": 4.882456855551026, "grad_norm": 1.1607435941696167, "learning_rate": 0.00038992220410381844, "loss": 3.4894, "step": 71860 }, { "epoch": 4.882796575621688, "grad_norm": 1.3119879961013794, "learning_rate": 0.0003898797390949858, "loss": 3.6139, "step": 71865 }, { "epoch": 4.883136295692349, "grad_norm": 1.1730691194534302, "learning_rate": 0.000389837274086153, "loss": 3.5523, "step": 71870 }, { "epoch": 4.883476015763011, "grad_norm": 1.791546106338501, "learning_rate": 0.0003897948090773203, "loss": 3.3582, "step": 71875 }, { "epoch": 4.883815735833673, "grad_norm": 1.274095058441162, "learning_rate": 0.00038975234406848756, "loss": 3.5919, "step": 71880 }, { "epoch": 4.8841554559043345, "grad_norm": 1.8243966102600098, "learning_rate": 0.00038970987905965484, "loss": 3.355, "step": 71885 }, { "epoch": 4.884495175974997, "grad_norm": 1.2903647422790527, "learning_rate": 0.0003896674140508221, "loss": 3.3057, "step": 71890 }, { "epoch": 4.884834896045659, "grad_norm": 1.4958473443984985, "learning_rate": 0.0003896249490419894, "loss": 3.3135, "step": 71895 }, { "epoch": 4.88517461611632, "grad_norm": 1.5684820413589478, "learning_rate": 0.00038958248403315673, "loss": 3.2382, "step": 71900 }, { "epoch": 4.885514336186982, "grad_norm": 1.0360931158065796, "learning_rate": 0.00038954001902432396, "loss": 3.4434, "step": 71905 }, { "epoch": 4.885854056257644, "grad_norm": 1.4424562454223633, "learning_rate": 0.00038949755401549124, "loss": 3.272, "step": 71910 }, { "epoch": 4.886193776328305, "grad_norm": 1.057354211807251, "learning_rate": 0.0003894550890066586, "loss": 3.2897, "step": 71915 }, { "epoch": 4.886533496398967, "grad_norm": 1.1753019094467163, "learning_rate": 0.0003894126239978258, "loss": 3.3611, "step": 71920 }, { "epoch": 4.886873216469629, "grad_norm": 1.4226698875427246, "learning_rate": 0.0003893701589889931, "loss": 3.2601, "step": 71925 }, { "epoch": 4.8872129365402905, "grad_norm": 1.2592555284500122, "learning_rate": 0.00038932769398016036, "loss": 3.4699, "step": 71930 }, { "epoch": 4.887552656610953, "grad_norm": 1.4116380214691162, "learning_rate": 0.00038928522897132764, "loss": 3.3574, "step": 71935 }, { "epoch": 4.887892376681615, "grad_norm": 1.2560170888900757, "learning_rate": 0.0003892427639624949, "loss": 3.6817, "step": 71940 }, { "epoch": 4.888232096752276, "grad_norm": 1.2035187482833862, "learning_rate": 0.0003892002989536622, "loss": 3.3884, "step": 71945 }, { "epoch": 4.888571816822938, "grad_norm": 1.23473060131073, "learning_rate": 0.0003891578339448295, "loss": 3.4668, "step": 71950 }, { "epoch": 4.8889115368936, "grad_norm": 1.3536044359207153, "learning_rate": 0.00038911536893599676, "loss": 3.5969, "step": 71955 }, { "epoch": 4.889251256964261, "grad_norm": 1.250707983970642, "learning_rate": 0.00038907290392716404, "loss": 3.0737, "step": 71960 }, { "epoch": 4.889590977034923, "grad_norm": 1.2502567768096924, "learning_rate": 0.00038903043891833127, "loss": 3.2762, "step": 71965 }, { "epoch": 4.889930697105585, "grad_norm": 1.1808269023895264, "learning_rate": 0.0003889879739094986, "loss": 3.2651, "step": 71970 }, { "epoch": 4.8902704171762466, "grad_norm": 1.1807386875152588, "learning_rate": 0.0003889455089006659, "loss": 3.4893, "step": 71975 }, { "epoch": 4.890610137246909, "grad_norm": 1.4946489334106445, "learning_rate": 0.0003889030438918331, "loss": 3.3775, "step": 71980 }, { "epoch": 4.890949857317571, "grad_norm": 1.2012004852294922, "learning_rate": 0.00038886057888300044, "loss": 3.6012, "step": 71985 }, { "epoch": 4.891289577388232, "grad_norm": 1.3576180934906006, "learning_rate": 0.0003888181138741677, "loss": 3.5291, "step": 71990 }, { "epoch": 4.891629297458894, "grad_norm": 1.3543529510498047, "learning_rate": 0.00038877564886533495, "loss": 3.6114, "step": 71995 }, { "epoch": 4.891969017529556, "grad_norm": 1.0303341150283813, "learning_rate": 0.00038873318385650223, "loss": 3.4947, "step": 72000 }, { "epoch": 4.892308737600217, "grad_norm": 1.3233299255371094, "learning_rate": 0.00038869071884766956, "loss": 3.4638, "step": 72005 }, { "epoch": 4.892648457670879, "grad_norm": 1.3996858596801758, "learning_rate": 0.0003886482538388368, "loss": 3.2116, "step": 72010 }, { "epoch": 4.892988177741541, "grad_norm": 1.4788696765899658, "learning_rate": 0.00038860578883000407, "loss": 3.4808, "step": 72015 }, { "epoch": 4.893327897812203, "grad_norm": 1.4733788967132568, "learning_rate": 0.0003885633238211714, "loss": 3.5085, "step": 72020 }, { "epoch": 4.893667617882865, "grad_norm": 1.1576755046844482, "learning_rate": 0.00038852085881233863, "loss": 3.2453, "step": 72025 }, { "epoch": 4.894007337953527, "grad_norm": 1.4014499187469482, "learning_rate": 0.0003884783938035059, "loss": 3.6396, "step": 72030 }, { "epoch": 4.894347058024188, "grad_norm": 0.9898927807807922, "learning_rate": 0.0003884359287946732, "loss": 3.0128, "step": 72035 }, { "epoch": 4.89468677809485, "grad_norm": 1.4223018884658813, "learning_rate": 0.00038839346378584047, "loss": 3.67, "step": 72040 }, { "epoch": 4.895026498165512, "grad_norm": 1.7379204034805298, "learning_rate": 0.00038835099877700775, "loss": 3.4371, "step": 72045 }, { "epoch": 4.895366218236173, "grad_norm": 1.2695854902267456, "learning_rate": 0.00038830853376817503, "loss": 3.4003, "step": 72050 }, { "epoch": 4.895705938306835, "grad_norm": 1.3058708906173706, "learning_rate": 0.0003882660687593423, "loss": 3.4741, "step": 72055 }, { "epoch": 4.896045658377497, "grad_norm": 1.5420485734939575, "learning_rate": 0.0003882236037505096, "loss": 3.4212, "step": 72060 }, { "epoch": 4.896385378448159, "grad_norm": 1.2469590902328491, "learning_rate": 0.00038818113874167687, "loss": 3.5082, "step": 72065 }, { "epoch": 4.896725098518821, "grad_norm": 1.3977705240249634, "learning_rate": 0.00038813867373284415, "loss": 3.5346, "step": 72070 }, { "epoch": 4.897064818589483, "grad_norm": 1.008619785308838, "learning_rate": 0.00038809620872401143, "loss": 3.4202, "step": 72075 }, { "epoch": 4.897404538660144, "grad_norm": 1.5619101524353027, "learning_rate": 0.0003880537437151787, "loss": 3.3857, "step": 72080 }, { "epoch": 4.897744258730806, "grad_norm": 1.1636102199554443, "learning_rate": 0.000388011278706346, "loss": 3.5835, "step": 72085 }, { "epoch": 4.898083978801468, "grad_norm": 1.5658692121505737, "learning_rate": 0.00038796881369751327, "loss": 3.4853, "step": 72090 }, { "epoch": 4.898423698872129, "grad_norm": 1.6337257623672485, "learning_rate": 0.00038792634868868055, "loss": 3.2863, "step": 72095 }, { "epoch": 4.898763418942791, "grad_norm": 1.116905927658081, "learning_rate": 0.00038788388367984783, "loss": 3.3459, "step": 72100 }, { "epoch": 4.899103139013453, "grad_norm": 1.241253137588501, "learning_rate": 0.00038784141867101505, "loss": 3.5534, "step": 72105 }, { "epoch": 4.899442859084115, "grad_norm": 1.4484022855758667, "learning_rate": 0.0003877989536621824, "loss": 3.5067, "step": 72110 }, { "epoch": 4.899782579154777, "grad_norm": 1.454042673110962, "learning_rate": 0.00038775648865334967, "loss": 3.0547, "step": 72115 }, { "epoch": 4.900122299225439, "grad_norm": 1.155236840248108, "learning_rate": 0.0003877140236445169, "loss": 3.4389, "step": 72120 }, { "epoch": 4.9004620192961, "grad_norm": 1.0304023027420044, "learning_rate": 0.00038767155863568423, "loss": 3.2229, "step": 72125 }, { "epoch": 4.900801739366762, "grad_norm": 1.2311536073684692, "learning_rate": 0.0003876290936268515, "loss": 3.5834, "step": 72130 }, { "epoch": 4.901141459437423, "grad_norm": 1.1592587232589722, "learning_rate": 0.00038758662861801874, "loss": 3.4475, "step": 72135 }, { "epoch": 4.901481179508085, "grad_norm": 1.6315361261367798, "learning_rate": 0.000387544163609186, "loss": 3.2296, "step": 72140 }, { "epoch": 4.901820899578747, "grad_norm": 1.21695077419281, "learning_rate": 0.00038750169860035335, "loss": 3.4198, "step": 72145 }, { "epoch": 4.9021606196494085, "grad_norm": 1.1874669790267944, "learning_rate": 0.0003874592335915206, "loss": 3.5445, "step": 72150 }, { "epoch": 4.902500339720071, "grad_norm": 1.3172918558120728, "learning_rate": 0.00038741676858268786, "loss": 3.2751, "step": 72155 }, { "epoch": 4.902840059790733, "grad_norm": 1.1389607191085815, "learning_rate": 0.0003873743035738552, "loss": 3.4572, "step": 72160 }, { "epoch": 4.903179779861394, "grad_norm": 1.4493863582611084, "learning_rate": 0.0003873318385650224, "loss": 3.3184, "step": 72165 }, { "epoch": 4.903519499932056, "grad_norm": 1.4173086881637573, "learning_rate": 0.0003872893735561897, "loss": 3.3751, "step": 72170 }, { "epoch": 4.903859220002718, "grad_norm": 1.117167353630066, "learning_rate": 0.000387246908547357, "loss": 3.4191, "step": 72175 }, { "epoch": 4.904198940073379, "grad_norm": 1.0377695560455322, "learning_rate": 0.00038720444353852426, "loss": 3.4735, "step": 72180 }, { "epoch": 4.904538660144041, "grad_norm": 1.3435635566711426, "learning_rate": 0.00038716197852969154, "loss": 3.5729, "step": 72185 }, { "epoch": 4.904878380214703, "grad_norm": 1.0444129705429077, "learning_rate": 0.0003871195135208588, "loss": 3.6933, "step": 72190 }, { "epoch": 4.9052181002853645, "grad_norm": 1.2927173376083374, "learning_rate": 0.0003870770485120261, "loss": 3.2952, "step": 72195 }, { "epoch": 4.905557820356027, "grad_norm": 1.3306262493133545, "learning_rate": 0.0003870345835031934, "loss": 3.443, "step": 72200 }, { "epoch": 4.905897540426689, "grad_norm": 1.2648415565490723, "learning_rate": 0.00038699211849436066, "loss": 3.4259, "step": 72205 }, { "epoch": 4.90623726049735, "grad_norm": 1.2369359731674194, "learning_rate": 0.0003869496534855279, "loss": 3.5267, "step": 72210 }, { "epoch": 4.906576980568012, "grad_norm": 0.9884666204452515, "learning_rate": 0.0003869071884766952, "loss": 3.2987, "step": 72215 }, { "epoch": 4.906916700638674, "grad_norm": 1.5159311294555664, "learning_rate": 0.0003868647234678625, "loss": 3.2662, "step": 72220 }, { "epoch": 4.907256420709335, "grad_norm": 1.2573132514953613, "learning_rate": 0.0003868222584590297, "loss": 3.379, "step": 72225 }, { "epoch": 4.907596140779997, "grad_norm": 1.8622018098831177, "learning_rate": 0.00038677979345019706, "loss": 3.1309, "step": 72230 }, { "epoch": 4.907935860850659, "grad_norm": 1.2178478240966797, "learning_rate": 0.00038673732844136434, "loss": 3.1941, "step": 72235 }, { "epoch": 4.9082755809213205, "grad_norm": 1.2484796047210693, "learning_rate": 0.0003866948634325316, "loss": 3.5196, "step": 72240 }, { "epoch": 4.908615300991983, "grad_norm": 1.249084234237671, "learning_rate": 0.0003866523984236989, "loss": 3.5212, "step": 72245 }, { "epoch": 4.908955021062645, "grad_norm": 1.3876031637191772, "learning_rate": 0.0003866099334148662, "loss": 3.2194, "step": 72250 }, { "epoch": 4.909294741133306, "grad_norm": 1.554327368736267, "learning_rate": 0.00038656746840603346, "loss": 3.4132, "step": 72255 }, { "epoch": 4.909634461203968, "grad_norm": 1.476328730583191, "learning_rate": 0.0003865250033972007, "loss": 3.5507, "step": 72260 }, { "epoch": 4.90997418127463, "grad_norm": 1.2787079811096191, "learning_rate": 0.000386482538388368, "loss": 3.4596, "step": 72265 }, { "epoch": 4.910313901345291, "grad_norm": 1.4032416343688965, "learning_rate": 0.0003864400733795353, "loss": 3.212, "step": 72270 }, { "epoch": 4.910653621415953, "grad_norm": 1.6555261611938477, "learning_rate": 0.0003863976083707025, "loss": 3.3463, "step": 72275 }, { "epoch": 4.910993341486615, "grad_norm": 1.1404798030853271, "learning_rate": 0.00038635514336186986, "loss": 3.2917, "step": 72280 }, { "epoch": 4.911333061557277, "grad_norm": 1.3890743255615234, "learning_rate": 0.00038631267835303714, "loss": 3.3601, "step": 72285 }, { "epoch": 4.911672781627939, "grad_norm": 1.072434425354004, "learning_rate": 0.00038627021334420436, "loss": 3.0553, "step": 72290 }, { "epoch": 4.912012501698601, "grad_norm": 1.117240309715271, "learning_rate": 0.00038622774833537164, "loss": 3.1192, "step": 72295 }, { "epoch": 4.912352221769262, "grad_norm": 1.0667765140533447, "learning_rate": 0.000386185283326539, "loss": 3.2258, "step": 72300 }, { "epoch": 4.912691941839924, "grad_norm": 1.1390517950057983, "learning_rate": 0.0003861428183177062, "loss": 3.5083, "step": 72305 }, { "epoch": 4.913031661910586, "grad_norm": 1.4193767309188843, "learning_rate": 0.0003861003533088735, "loss": 3.3351, "step": 72310 }, { "epoch": 4.913371381981247, "grad_norm": 1.5727853775024414, "learning_rate": 0.0003860578883000408, "loss": 3.2864, "step": 72315 }, { "epoch": 4.913711102051909, "grad_norm": 1.1480350494384766, "learning_rate": 0.00038601542329120804, "loss": 3.3652, "step": 72320 }, { "epoch": 4.914050822122571, "grad_norm": 1.446590542793274, "learning_rate": 0.0003859729582823753, "loss": 3.4304, "step": 72325 }, { "epoch": 4.914390542193233, "grad_norm": 1.2913936376571655, "learning_rate": 0.0003859304932735426, "loss": 3.2163, "step": 72330 }, { "epoch": 4.914730262263895, "grad_norm": 1.5045394897460938, "learning_rate": 0.0003858880282647099, "loss": 3.4121, "step": 72335 }, { "epoch": 4.915069982334556, "grad_norm": 1.1071370840072632, "learning_rate": 0.00038584556325587716, "loss": 3.4479, "step": 72340 }, { "epoch": 4.915409702405218, "grad_norm": 1.4621776342391968, "learning_rate": 0.00038580309824704444, "loss": 3.3812, "step": 72345 }, { "epoch": 4.91574942247588, "grad_norm": 1.7099417448043823, "learning_rate": 0.0003857606332382117, "loss": 3.3491, "step": 72350 }, { "epoch": 4.916089142546541, "grad_norm": 0.9856950044631958, "learning_rate": 0.000385718168229379, "loss": 3.4309, "step": 72355 }, { "epoch": 4.916428862617203, "grad_norm": 1.3895632028579712, "learning_rate": 0.0003856757032205463, "loss": 3.3558, "step": 72360 }, { "epoch": 4.916768582687865, "grad_norm": 1.25521981716156, "learning_rate": 0.0003856332382117135, "loss": 3.3516, "step": 72365 }, { "epoch": 4.9171083027585265, "grad_norm": 1.1972713470458984, "learning_rate": 0.00038559077320288084, "loss": 3.331, "step": 72370 }, { "epoch": 4.917448022829189, "grad_norm": 1.1581921577453613, "learning_rate": 0.0003855483081940481, "loss": 3.5647, "step": 72375 }, { "epoch": 4.917787742899851, "grad_norm": 1.430216908454895, "learning_rate": 0.00038550584318521535, "loss": 3.3622, "step": 72380 }, { "epoch": 4.918127462970512, "grad_norm": 2.108276605606079, "learning_rate": 0.0003854633781763827, "loss": 3.3392, "step": 72385 }, { "epoch": 4.918467183041174, "grad_norm": 1.3031235933303833, "learning_rate": 0.00038542091316754996, "loss": 3.4412, "step": 72390 }, { "epoch": 4.918806903111836, "grad_norm": 1.5485137701034546, "learning_rate": 0.0003853784481587172, "loss": 3.4024, "step": 72395 }, { "epoch": 4.919146623182497, "grad_norm": 1.0399994850158691, "learning_rate": 0.00038533598314988447, "loss": 3.5038, "step": 72400 }, { "epoch": 4.919486343253159, "grad_norm": 1.0443446636199951, "learning_rate": 0.0003852935181410518, "loss": 3.2547, "step": 72405 }, { "epoch": 4.919826063323821, "grad_norm": 1.5487217903137207, "learning_rate": 0.0003852510531322191, "loss": 3.4513, "step": 72410 }, { "epoch": 4.9201657833944825, "grad_norm": 1.596000075340271, "learning_rate": 0.0003852085881233863, "loss": 3.3541, "step": 72415 }, { "epoch": 4.920505503465145, "grad_norm": 1.0403447151184082, "learning_rate": 0.00038516612311455365, "loss": 3.2711, "step": 72420 }, { "epoch": 4.920845223535807, "grad_norm": 1.2532355785369873, "learning_rate": 0.0003851236581057209, "loss": 3.1644, "step": 72425 }, { "epoch": 4.921184943606468, "grad_norm": 1.5246920585632324, "learning_rate": 0.00038508119309688815, "loss": 3.2753, "step": 72430 }, { "epoch": 4.92152466367713, "grad_norm": 1.2560820579528809, "learning_rate": 0.00038503872808805543, "loss": 3.2346, "step": 72435 }, { "epoch": 4.921864383747792, "grad_norm": 1.3164689540863037, "learning_rate": 0.00038499626307922277, "loss": 3.3579, "step": 72440 }, { "epoch": 4.922204103818453, "grad_norm": 1.5501232147216797, "learning_rate": 0.00038495379807039, "loss": 3.4818, "step": 72445 }, { "epoch": 4.922543823889115, "grad_norm": 1.3731955289840698, "learning_rate": 0.00038491133306155727, "loss": 3.1797, "step": 72450 }, { "epoch": 4.922883543959777, "grad_norm": 1.181955099105835, "learning_rate": 0.0003848688680527246, "loss": 3.3498, "step": 72455 }, { "epoch": 4.9232232640304385, "grad_norm": 0.9697031378746033, "learning_rate": 0.00038482640304389183, "loss": 3.4107, "step": 72460 }, { "epoch": 4.923562984101101, "grad_norm": 1.1857160329818726, "learning_rate": 0.0003847839380350591, "loss": 3.481, "step": 72465 }, { "epoch": 4.923902704171763, "grad_norm": 0.9986404776573181, "learning_rate": 0.0003847414730262264, "loss": 3.5964, "step": 72470 }, { "epoch": 4.924242424242424, "grad_norm": 1.2402451038360596, "learning_rate": 0.00038469900801739367, "loss": 3.5037, "step": 72475 }, { "epoch": 4.924582144313086, "grad_norm": 0.994248628616333, "learning_rate": 0.00038465654300856095, "loss": 3.2434, "step": 72480 }, { "epoch": 4.924921864383748, "grad_norm": 1.5864661931991577, "learning_rate": 0.00038461407799972823, "loss": 3.3112, "step": 72485 }, { "epoch": 4.925261584454409, "grad_norm": 1.314483880996704, "learning_rate": 0.0003845716129908955, "loss": 3.2617, "step": 72490 }, { "epoch": 4.925601304525071, "grad_norm": 1.7740504741668701, "learning_rate": 0.0003845291479820628, "loss": 3.2511, "step": 72495 }, { "epoch": 4.925941024595733, "grad_norm": 1.264701008796692, "learning_rate": 0.00038448668297323007, "loss": 3.3431, "step": 72500 }, { "epoch": 4.9262807446663945, "grad_norm": 1.4985588788986206, "learning_rate": 0.0003844442179643973, "loss": 3.2869, "step": 72505 }, { "epoch": 4.926620464737057, "grad_norm": 1.4705322980880737, "learning_rate": 0.00038440175295556463, "loss": 3.6045, "step": 72510 }, { "epoch": 4.926960184807719, "grad_norm": 1.3705744743347168, "learning_rate": 0.0003843592879467319, "loss": 3.6803, "step": 72515 }, { "epoch": 4.92729990487838, "grad_norm": 1.2906973361968994, "learning_rate": 0.00038431682293789914, "loss": 3.4403, "step": 72520 }, { "epoch": 4.927639624949042, "grad_norm": 1.4056504964828491, "learning_rate": 0.00038427435792906647, "loss": 3.3504, "step": 72525 }, { "epoch": 4.927979345019704, "grad_norm": 1.156920075416565, "learning_rate": 0.00038423189292023375, "loss": 3.1949, "step": 72530 }, { "epoch": 4.928319065090365, "grad_norm": 1.0271222591400146, "learning_rate": 0.000384189427911401, "loss": 3.3206, "step": 72535 }, { "epoch": 4.928658785161027, "grad_norm": 1.080320119857788, "learning_rate": 0.0003841469629025683, "loss": 3.6487, "step": 72540 }, { "epoch": 4.928998505231689, "grad_norm": 1.1947013139724731, "learning_rate": 0.0003841044978937356, "loss": 3.314, "step": 72545 }, { "epoch": 4.9293382253023506, "grad_norm": 1.2730343341827393, "learning_rate": 0.0003840620328849028, "loss": 3.386, "step": 72550 }, { "epoch": 4.929677945373013, "grad_norm": 0.9912260174751282, "learning_rate": 0.0003840195678760701, "loss": 3.4569, "step": 72555 }, { "epoch": 4.930017665443675, "grad_norm": 1.1915446519851685, "learning_rate": 0.00038397710286723743, "loss": 3.2606, "step": 72560 }, { "epoch": 4.930357385514336, "grad_norm": 1.4304605722427368, "learning_rate": 0.00038393463785840466, "loss": 3.4168, "step": 72565 }, { "epoch": 4.930697105584998, "grad_norm": 1.2737505435943604, "learning_rate": 0.00038389217284957194, "loss": 3.3861, "step": 72570 }, { "epoch": 4.93103682565566, "grad_norm": 1.5251481533050537, "learning_rate": 0.0003838497078407393, "loss": 3.6164, "step": 72575 }, { "epoch": 4.931376545726321, "grad_norm": 1.3820581436157227, "learning_rate": 0.00038380724283190655, "loss": 3.4695, "step": 72580 }, { "epoch": 4.931716265796983, "grad_norm": 1.303821086883545, "learning_rate": 0.0003837647778230738, "loss": 3.4389, "step": 72585 }, { "epoch": 4.932055985867645, "grad_norm": 1.3582406044006348, "learning_rate": 0.00038372231281424106, "loss": 3.3505, "step": 72590 }, { "epoch": 4.932395705938307, "grad_norm": 1.0841007232666016, "learning_rate": 0.0003836798478054084, "loss": 3.4778, "step": 72595 }, { "epoch": 4.932735426008969, "grad_norm": 1.3848587274551392, "learning_rate": 0.0003836373827965756, "loss": 3.486, "step": 72600 }, { "epoch": 4.933075146079631, "grad_norm": 1.1060553789138794, "learning_rate": 0.0003835949177877429, "loss": 3.656, "step": 72605 }, { "epoch": 4.933414866150292, "grad_norm": 1.2114289999008179, "learning_rate": 0.00038355245277891023, "loss": 3.5243, "step": 72610 }, { "epoch": 4.933754586220954, "grad_norm": 1.3836556673049927, "learning_rate": 0.00038350998777007746, "loss": 3.3852, "step": 72615 }, { "epoch": 4.934094306291616, "grad_norm": 1.105800747871399, "learning_rate": 0.00038346752276124474, "loss": 3.6117, "step": 72620 }, { "epoch": 4.934434026362277, "grad_norm": 1.0024280548095703, "learning_rate": 0.000383425057752412, "loss": 3.0448, "step": 72625 }, { "epoch": 4.934773746432939, "grad_norm": 1.1235713958740234, "learning_rate": 0.0003833825927435793, "loss": 3.4966, "step": 72630 }, { "epoch": 4.935113466503601, "grad_norm": 0.9909025430679321, "learning_rate": 0.0003833401277347466, "loss": 3.0513, "step": 72635 }, { "epoch": 4.935453186574263, "grad_norm": 1.2634679079055786, "learning_rate": 0.00038329766272591386, "loss": 3.3967, "step": 72640 }, { "epoch": 4.935792906644925, "grad_norm": 1.2640275955200195, "learning_rate": 0.00038325519771708114, "loss": 3.4213, "step": 72645 }, { "epoch": 4.936132626715587, "grad_norm": 1.4030464887619019, "learning_rate": 0.0003832127327082484, "loss": 3.3488, "step": 72650 }, { "epoch": 4.936472346786248, "grad_norm": 1.1976373195648193, "learning_rate": 0.0003831702676994157, "loss": 3.2014, "step": 72655 }, { "epoch": 4.93681206685691, "grad_norm": 1.2646697759628296, "learning_rate": 0.0003831278026905829, "loss": 3.5553, "step": 72660 }, { "epoch": 4.937151786927572, "grad_norm": 1.373661994934082, "learning_rate": 0.00038308533768175026, "loss": 3.3748, "step": 72665 }, { "epoch": 4.937491506998233, "grad_norm": 1.0527650117874146, "learning_rate": 0.00038304287267291754, "loss": 3.2612, "step": 72670 }, { "epoch": 4.937831227068895, "grad_norm": 1.168333888053894, "learning_rate": 0.00038300040766408477, "loss": 3.2584, "step": 72675 }, { "epoch": 4.938170947139557, "grad_norm": 1.0181033611297607, "learning_rate": 0.0003829579426552521, "loss": 3.2842, "step": 72680 }, { "epoch": 4.938510667210219, "grad_norm": 1.1133114099502563, "learning_rate": 0.0003829154776464194, "loss": 3.1226, "step": 72685 }, { "epoch": 4.938850387280881, "grad_norm": 1.1897069215774536, "learning_rate": 0.0003828730126375866, "loss": 3.4061, "step": 72690 }, { "epoch": 4.939190107351543, "grad_norm": 1.5854424238204956, "learning_rate": 0.0003828305476287539, "loss": 3.178, "step": 72695 }, { "epoch": 4.939529827422204, "grad_norm": 1.4002054929733276, "learning_rate": 0.0003827880826199212, "loss": 3.4537, "step": 72700 }, { "epoch": 4.939869547492866, "grad_norm": 1.238722324371338, "learning_rate": 0.00038274561761108845, "loss": 3.3225, "step": 72705 }, { "epoch": 4.940209267563528, "grad_norm": 1.22383713722229, "learning_rate": 0.0003827031526022557, "loss": 3.223, "step": 72710 }, { "epoch": 4.940548987634189, "grad_norm": 1.1001181602478027, "learning_rate": 0.00038266068759342306, "loss": 3.3571, "step": 72715 }, { "epoch": 4.940888707704851, "grad_norm": 1.1605921983718872, "learning_rate": 0.0003826182225845903, "loss": 3.5442, "step": 72720 }, { "epoch": 4.941228427775513, "grad_norm": 1.0457721948623657, "learning_rate": 0.00038257575757575757, "loss": 3.4776, "step": 72725 }, { "epoch": 4.941568147846175, "grad_norm": 1.3435200452804565, "learning_rate": 0.00038253329256692485, "loss": 3.1699, "step": 72730 }, { "epoch": 4.941907867916837, "grad_norm": 1.2796852588653564, "learning_rate": 0.00038249082755809213, "loss": 3.2885, "step": 72735 }, { "epoch": 4.942247587987499, "grad_norm": 1.4233943223953247, "learning_rate": 0.0003824483625492594, "loss": 3.4958, "step": 72740 }, { "epoch": 4.94258730805816, "grad_norm": 1.1840883493423462, "learning_rate": 0.0003824058975404267, "loss": 3.2637, "step": 72745 }, { "epoch": 4.942927028128822, "grad_norm": 0.9942564964294434, "learning_rate": 0.000382363432531594, "loss": 3.2967, "step": 72750 }, { "epoch": 4.943266748199484, "grad_norm": 1.5441436767578125, "learning_rate": 0.00038232096752276125, "loss": 3.4789, "step": 72755 }, { "epoch": 4.943606468270145, "grad_norm": 1.2457506656646729, "learning_rate": 0.00038227850251392853, "loss": 3.3482, "step": 72760 }, { "epoch": 4.943946188340807, "grad_norm": 1.1732836961746216, "learning_rate": 0.0003822360375050958, "loss": 3.5048, "step": 72765 }, { "epoch": 4.944285908411469, "grad_norm": 1.4285982847213745, "learning_rate": 0.0003821935724962631, "loss": 3.353, "step": 72770 }, { "epoch": 4.944625628482131, "grad_norm": 1.2886170148849487, "learning_rate": 0.00038215110748743037, "loss": 3.4995, "step": 72775 }, { "epoch": 4.944965348552793, "grad_norm": 1.268692135810852, "learning_rate": 0.00038210864247859765, "loss": 3.5814, "step": 72780 }, { "epoch": 4.945305068623455, "grad_norm": 1.2533706426620483, "learning_rate": 0.00038206617746976493, "loss": 3.2198, "step": 72785 }, { "epoch": 4.945644788694116, "grad_norm": 1.6279661655426025, "learning_rate": 0.0003820237124609322, "loss": 3.5347, "step": 72790 }, { "epoch": 4.945984508764778, "grad_norm": 1.0692623853683472, "learning_rate": 0.0003819812474520995, "loss": 3.4507, "step": 72795 }, { "epoch": 4.94632422883544, "grad_norm": 1.2912095785140991, "learning_rate": 0.0003819387824432667, "loss": 3.5452, "step": 72800 }, { "epoch": 4.946663948906101, "grad_norm": 1.1771538257598877, "learning_rate": 0.00038189631743443405, "loss": 3.2178, "step": 72805 }, { "epoch": 4.947003668976763, "grad_norm": 1.8122361898422241, "learning_rate": 0.00038185385242560133, "loss": 3.2637, "step": 72810 }, { "epoch": 4.9473433890474245, "grad_norm": 1.19475519657135, "learning_rate": 0.00038181138741676855, "loss": 3.5265, "step": 72815 }, { "epoch": 4.947683109118087, "grad_norm": 2.075111150741577, "learning_rate": 0.0003817689224079359, "loss": 3.0741, "step": 72820 }, { "epoch": 4.948022829188749, "grad_norm": 1.2807737588882446, "learning_rate": 0.00038172645739910317, "loss": 3.2586, "step": 72825 }, { "epoch": 4.94836254925941, "grad_norm": 1.5137165784835815, "learning_rate": 0.0003816839923902704, "loss": 3.2967, "step": 72830 }, { "epoch": 4.948702269330072, "grad_norm": 1.4107505083084106, "learning_rate": 0.00038164152738143773, "loss": 3.4886, "step": 72835 }, { "epoch": 4.949041989400734, "grad_norm": 1.186172604560852, "learning_rate": 0.000381599062372605, "loss": 3.45, "step": 72840 }, { "epoch": 4.949381709471395, "grad_norm": 1.488816738128662, "learning_rate": 0.00038155659736377223, "loss": 3.4231, "step": 72845 }, { "epoch": 4.949721429542057, "grad_norm": 1.3867518901824951, "learning_rate": 0.0003815141323549395, "loss": 3.5111, "step": 72850 }, { "epoch": 4.950061149612719, "grad_norm": 1.1677157878875732, "learning_rate": 0.00038147166734610685, "loss": 2.9145, "step": 72855 }, { "epoch": 4.9504008696833806, "grad_norm": 1.293175220489502, "learning_rate": 0.0003814292023372741, "loss": 3.4782, "step": 72860 }, { "epoch": 4.950740589754043, "grad_norm": 1.4353902339935303, "learning_rate": 0.00038138673732844136, "loss": 3.5849, "step": 72865 }, { "epoch": 4.951080309824705, "grad_norm": 1.2115610837936401, "learning_rate": 0.0003813442723196087, "loss": 3.6018, "step": 72870 }, { "epoch": 4.951420029895366, "grad_norm": 1.2207385301589966, "learning_rate": 0.0003813018073107759, "loss": 3.4344, "step": 72875 }, { "epoch": 4.951759749966028, "grad_norm": 1.0433799028396606, "learning_rate": 0.0003812593423019432, "loss": 3.3796, "step": 72880 }, { "epoch": 4.95209947003669, "grad_norm": 1.1699203252792358, "learning_rate": 0.0003812168772931105, "loss": 3.4114, "step": 72885 }, { "epoch": 4.952439190107351, "grad_norm": 1.1554241180419922, "learning_rate": 0.00038117441228427776, "loss": 3.3211, "step": 72890 }, { "epoch": 4.952778910178013, "grad_norm": 1.1578402519226074, "learning_rate": 0.00038113194727544504, "loss": 3.4945, "step": 72895 }, { "epoch": 4.953118630248675, "grad_norm": 1.4514304399490356, "learning_rate": 0.0003810894822666123, "loss": 3.2023, "step": 72900 }, { "epoch": 4.953458350319337, "grad_norm": 1.3617974519729614, "learning_rate": 0.0003810470172577796, "loss": 3.2365, "step": 72905 }, { "epoch": 4.953798070389999, "grad_norm": 1.0270062685012817, "learning_rate": 0.0003810045522489469, "loss": 3.1204, "step": 72910 }, { "epoch": 4.954137790460661, "grad_norm": 1.0581008195877075, "learning_rate": 0.00038096208724011416, "loss": 3.6223, "step": 72915 }, { "epoch": 4.954477510531322, "grad_norm": 1.4689217805862427, "learning_rate": 0.00038091962223128144, "loss": 3.4145, "step": 72920 }, { "epoch": 4.954817230601984, "grad_norm": 2.206610918045044, "learning_rate": 0.0003808771572224487, "loss": 3.5002, "step": 72925 }, { "epoch": 4.955156950672646, "grad_norm": 1.1032696962356567, "learning_rate": 0.000380834692213616, "loss": 3.471, "step": 72930 }, { "epoch": 4.955496670743307, "grad_norm": 2.8414652347564697, "learning_rate": 0.0003807922272047833, "loss": 3.3097, "step": 72935 }, { "epoch": 4.955836390813969, "grad_norm": 1.4476892948150635, "learning_rate": 0.00038074976219595056, "loss": 3.6093, "step": 72940 }, { "epoch": 4.956176110884631, "grad_norm": 1.4050469398498535, "learning_rate": 0.00038070729718711784, "loss": 3.2851, "step": 72945 }, { "epoch": 4.956515830955293, "grad_norm": 1.9658961296081543, "learning_rate": 0.0003806648321782851, "loss": 3.4708, "step": 72950 }, { "epoch": 4.956855551025955, "grad_norm": 1.242713212966919, "learning_rate": 0.00038062236716945234, "loss": 3.2536, "step": 72955 }, { "epoch": 4.957195271096617, "grad_norm": 1.0342761278152466, "learning_rate": 0.0003805799021606197, "loss": 3.5201, "step": 72960 }, { "epoch": 4.957534991167278, "grad_norm": 1.502946376800537, "learning_rate": 0.00038053743715178696, "loss": 3.4713, "step": 72965 }, { "epoch": 4.95787471123794, "grad_norm": 1.3113230466842651, "learning_rate": 0.0003804949721429542, "loss": 3.5089, "step": 72970 }, { "epoch": 4.958214431308602, "grad_norm": 1.0420541763305664, "learning_rate": 0.0003804525071341215, "loss": 3.5464, "step": 72975 }, { "epoch": 4.958554151379263, "grad_norm": 1.168650507926941, "learning_rate": 0.0003804100421252888, "loss": 3.3704, "step": 72980 }, { "epoch": 4.958893871449925, "grad_norm": 1.3369601964950562, "learning_rate": 0.000380367577116456, "loss": 3.4789, "step": 72985 }, { "epoch": 4.959233591520587, "grad_norm": 1.4072847366333008, "learning_rate": 0.0003803251121076233, "loss": 3.2153, "step": 72990 }, { "epoch": 4.959573311591249, "grad_norm": 1.380543828010559, "learning_rate": 0.00038028264709879064, "loss": 3.3268, "step": 72995 }, { "epoch": 4.959913031661911, "grad_norm": 1.4551594257354736, "learning_rate": 0.00038024018208995786, "loss": 3.3368, "step": 73000 }, { "epoch": 4.960252751732573, "grad_norm": 1.047022819519043, "learning_rate": 0.00038019771708112514, "loss": 3.272, "step": 73005 }, { "epoch": 4.960592471803234, "grad_norm": 1.1524949073791504, "learning_rate": 0.0003801552520722925, "loss": 3.5427, "step": 73010 }, { "epoch": 4.960932191873896, "grad_norm": 1.080170750617981, "learning_rate": 0.0003801127870634597, "loss": 3.2516, "step": 73015 }, { "epoch": 4.961271911944557, "grad_norm": 1.2127189636230469, "learning_rate": 0.000380070322054627, "loss": 3.6514, "step": 73020 }, { "epoch": 4.961611632015219, "grad_norm": 1.3252965211868286, "learning_rate": 0.00038002785704579426, "loss": 3.3102, "step": 73025 }, { "epoch": 4.961951352085881, "grad_norm": 1.1786284446716309, "learning_rate": 0.00037998539203696154, "loss": 3.4647, "step": 73030 }, { "epoch": 4.9622910721565425, "grad_norm": 1.284110188484192, "learning_rate": 0.0003799429270281288, "loss": 3.6199, "step": 73035 }, { "epoch": 4.962630792227205, "grad_norm": 1.1423075199127197, "learning_rate": 0.0003799004620192961, "loss": 3.4629, "step": 73040 }, { "epoch": 4.962970512297867, "grad_norm": 1.200498342514038, "learning_rate": 0.0003798579970104634, "loss": 3.5113, "step": 73045 }, { "epoch": 4.963310232368528, "grad_norm": 1.4671823978424072, "learning_rate": 0.00037981553200163066, "loss": 3.3797, "step": 73050 }, { "epoch": 4.96364995243919, "grad_norm": 1.1420073509216309, "learning_rate": 0.00037977306699279794, "loss": 3.1853, "step": 73055 }, { "epoch": 4.963989672509852, "grad_norm": 1.4909248352050781, "learning_rate": 0.00037973060198396517, "loss": 3.3705, "step": 73060 }, { "epoch": 4.964329392580513, "grad_norm": 1.3805780410766602, "learning_rate": 0.0003796881369751325, "loss": 3.2404, "step": 73065 }, { "epoch": 4.964669112651175, "grad_norm": 1.4034254550933838, "learning_rate": 0.0003796456719662998, "loss": 3.5855, "step": 73070 }, { "epoch": 4.965008832721837, "grad_norm": 1.2084801197052002, "learning_rate": 0.000379603206957467, "loss": 3.3452, "step": 73075 }, { "epoch": 4.9653485527924985, "grad_norm": 1.2710338830947876, "learning_rate": 0.00037956074194863434, "loss": 3.4084, "step": 73080 }, { "epoch": 4.965688272863161, "grad_norm": 1.065882921218872, "learning_rate": 0.0003795182769398016, "loss": 3.5784, "step": 73085 }, { "epoch": 4.966027992933823, "grad_norm": 1.102414846420288, "learning_rate": 0.0003794758119309689, "loss": 3.4162, "step": 73090 }, { "epoch": 4.966367713004484, "grad_norm": 1.3204783201217651, "learning_rate": 0.0003794333469221362, "loss": 3.2752, "step": 73095 }, { "epoch": 4.966707433075146, "grad_norm": 1.3645942211151123, "learning_rate": 0.00037939088191330346, "loss": 3.2268, "step": 73100 }, { "epoch": 4.967047153145808, "grad_norm": 1.0239018201828003, "learning_rate": 0.00037934841690447074, "loss": 3.4751, "step": 73105 }, { "epoch": 4.967386873216469, "grad_norm": 1.3162657022476196, "learning_rate": 0.00037930595189563797, "loss": 3.2804, "step": 73110 }, { "epoch": 4.967726593287131, "grad_norm": 1.0559813976287842, "learning_rate": 0.0003792634868868053, "loss": 3.3165, "step": 73115 }, { "epoch": 4.968066313357793, "grad_norm": 1.1107465028762817, "learning_rate": 0.0003792210218779726, "loss": 3.6469, "step": 73120 }, { "epoch": 4.9684060334284545, "grad_norm": 1.4139635562896729, "learning_rate": 0.0003791785568691398, "loss": 3.546, "step": 73125 }, { "epoch": 4.968745753499117, "grad_norm": 1.0973149538040161, "learning_rate": 0.00037913609186030715, "loss": 3.3869, "step": 73130 }, { "epoch": 4.969085473569779, "grad_norm": 1.0972399711608887, "learning_rate": 0.0003790936268514744, "loss": 3.1828, "step": 73135 }, { "epoch": 4.96942519364044, "grad_norm": 1.1512112617492676, "learning_rate": 0.00037905116184264165, "loss": 3.4623, "step": 73140 }, { "epoch": 4.969764913711102, "grad_norm": 1.1490795612335205, "learning_rate": 0.00037900869683380893, "loss": 3.2059, "step": 73145 }, { "epoch": 4.970104633781764, "grad_norm": 1.3017939329147339, "learning_rate": 0.00037896623182497627, "loss": 3.6311, "step": 73150 }, { "epoch": 4.970444353852425, "grad_norm": 1.1146563291549683, "learning_rate": 0.0003789237668161435, "loss": 3.3473, "step": 73155 }, { "epoch": 4.970784073923087, "grad_norm": 0.9525801539421082, "learning_rate": 0.00037888130180731077, "loss": 3.5641, "step": 73160 }, { "epoch": 4.971123793993749, "grad_norm": 1.3761742115020752, "learning_rate": 0.0003788388367984781, "loss": 3.5803, "step": 73165 }, { "epoch": 4.971463514064411, "grad_norm": 1.0854755640029907, "learning_rate": 0.00037879637178964533, "loss": 3.4214, "step": 73170 }, { "epoch": 4.971803234135073, "grad_norm": 1.196672797203064, "learning_rate": 0.0003787539067808126, "loss": 3.4616, "step": 73175 }, { "epoch": 4.972142954205735, "grad_norm": 1.1670210361480713, "learning_rate": 0.0003787114417719799, "loss": 3.3012, "step": 73180 }, { "epoch": 4.972482674276396, "grad_norm": 1.046005129814148, "learning_rate": 0.00037866897676314717, "loss": 3.25, "step": 73185 }, { "epoch": 4.972822394347058, "grad_norm": 1.1444069147109985, "learning_rate": 0.00037862651175431445, "loss": 3.5232, "step": 73190 }, { "epoch": 4.97316211441772, "grad_norm": 1.3426315784454346, "learning_rate": 0.00037858404674548173, "loss": 3.5654, "step": 73195 }, { "epoch": 4.973501834488381, "grad_norm": 1.2357597351074219, "learning_rate": 0.000378541581736649, "loss": 3.4753, "step": 73200 }, { "epoch": 4.973841554559043, "grad_norm": 1.1525381803512573, "learning_rate": 0.0003784991167278163, "loss": 3.5293, "step": 73205 }, { "epoch": 4.974181274629705, "grad_norm": 1.1437559127807617, "learning_rate": 0.00037845665171898357, "loss": 3.4122, "step": 73210 }, { "epoch": 4.974520994700367, "grad_norm": 1.136235237121582, "learning_rate": 0.0003784141867101508, "loss": 3.3103, "step": 73215 }, { "epoch": 4.974860714771029, "grad_norm": 1.108816385269165, "learning_rate": 0.00037837172170131813, "loss": 3.3284, "step": 73220 }, { "epoch": 4.975200434841691, "grad_norm": 1.2577741146087646, "learning_rate": 0.0003783292566924854, "loss": 3.2863, "step": 73225 }, { "epoch": 4.975540154912352, "grad_norm": 1.2892296314239502, "learning_rate": 0.00037828679168365264, "loss": 3.0063, "step": 73230 }, { "epoch": 4.975879874983014, "grad_norm": 1.1427816152572632, "learning_rate": 0.00037824432667481997, "loss": 3.3407, "step": 73235 }, { "epoch": 4.976219595053676, "grad_norm": 1.478085994720459, "learning_rate": 0.00037820186166598725, "loss": 3.1623, "step": 73240 }, { "epoch": 4.976559315124337, "grad_norm": 1.7052448987960815, "learning_rate": 0.0003781593966571545, "loss": 3.5747, "step": 73245 }, { "epoch": 4.976899035194999, "grad_norm": 1.1986087560653687, "learning_rate": 0.00037811693164832176, "loss": 3.6981, "step": 73250 }, { "epoch": 4.977238755265661, "grad_norm": 0.9372797012329102, "learning_rate": 0.0003780744666394891, "loss": 3.4921, "step": 73255 }, { "epoch": 4.977578475336323, "grad_norm": 1.0654163360595703, "learning_rate": 0.0003780320016306564, "loss": 3.381, "step": 73260 }, { "epoch": 4.977918195406985, "grad_norm": 1.018918752670288, "learning_rate": 0.0003779895366218236, "loss": 3.5552, "step": 73265 }, { "epoch": 4.978257915477647, "grad_norm": 1.3632107973098755, "learning_rate": 0.00037794707161299093, "loss": 3.3315, "step": 73270 }, { "epoch": 4.978597635548308, "grad_norm": 1.3044471740722656, "learning_rate": 0.0003779046066041582, "loss": 3.4422, "step": 73275 }, { "epoch": 4.97893735561897, "grad_norm": 1.367025375366211, "learning_rate": 0.00037786214159532544, "loss": 3.3003, "step": 73280 }, { "epoch": 4.979277075689632, "grad_norm": 1.2011884450912476, "learning_rate": 0.0003778196765864927, "loss": 3.1912, "step": 73285 }, { "epoch": 4.979616795760293, "grad_norm": 1.1575391292572021, "learning_rate": 0.00037777721157766005, "loss": 3.114, "step": 73290 }, { "epoch": 4.979956515830955, "grad_norm": 2.331650972366333, "learning_rate": 0.0003777347465688273, "loss": 3.3575, "step": 73295 }, { "epoch": 4.980296235901617, "grad_norm": 1.3164970874786377, "learning_rate": 0.00037769228155999456, "loss": 3.1962, "step": 73300 }, { "epoch": 4.980635955972279, "grad_norm": 1.1693394184112549, "learning_rate": 0.0003776498165511619, "loss": 3.5517, "step": 73305 }, { "epoch": 4.980975676042941, "grad_norm": 1.0399988889694214, "learning_rate": 0.0003776073515423291, "loss": 3.3948, "step": 73310 }, { "epoch": 4.981315396113603, "grad_norm": 1.3785094022750854, "learning_rate": 0.0003775648865334964, "loss": 3.38, "step": 73315 }, { "epoch": 4.981655116184264, "grad_norm": 1.4416574239730835, "learning_rate": 0.0003775224215246637, "loss": 3.4525, "step": 73320 }, { "epoch": 4.981994836254926, "grad_norm": 1.1775096654891968, "learning_rate": 0.00037747995651583096, "loss": 3.3516, "step": 73325 }, { "epoch": 4.982334556325588, "grad_norm": 1.1409704685211182, "learning_rate": 0.00037743749150699824, "loss": 3.1205, "step": 73330 }, { "epoch": 4.982674276396249, "grad_norm": 1.4953057765960693, "learning_rate": 0.0003773950264981655, "loss": 3.4759, "step": 73335 }, { "epoch": 4.983013996466911, "grad_norm": 1.1874667406082153, "learning_rate": 0.0003773525614893328, "loss": 3.4959, "step": 73340 }, { "epoch": 4.983353716537573, "grad_norm": 1.5367431640625, "learning_rate": 0.0003773100964805001, "loss": 3.1754, "step": 73345 }, { "epoch": 4.983693436608235, "grad_norm": 1.204848051071167, "learning_rate": 0.00037726763147166736, "loss": 3.4293, "step": 73350 }, { "epoch": 4.984033156678897, "grad_norm": 1.0830199718475342, "learning_rate": 0.0003772251664628346, "loss": 3.3582, "step": 73355 }, { "epoch": 4.984372876749559, "grad_norm": 1.376857042312622, "learning_rate": 0.0003771827014540019, "loss": 3.2851, "step": 73360 }, { "epoch": 4.98471259682022, "grad_norm": 1.150585651397705, "learning_rate": 0.0003771402364451692, "loss": 3.5668, "step": 73365 }, { "epoch": 4.985052316890882, "grad_norm": 1.0789754390716553, "learning_rate": 0.0003770977714363364, "loss": 3.4945, "step": 73370 }, { "epoch": 4.985392036961544, "grad_norm": 1.1439533233642578, "learning_rate": 0.00037705530642750376, "loss": 3.4345, "step": 73375 }, { "epoch": 4.985731757032205, "grad_norm": 1.3996120691299438, "learning_rate": 0.00037701284141867104, "loss": 3.3599, "step": 73380 }, { "epoch": 4.986071477102867, "grad_norm": 1.4917300939559937, "learning_rate": 0.00037697037640983827, "loss": 3.1948, "step": 73385 }, { "epoch": 4.986411197173529, "grad_norm": 1.3360373973846436, "learning_rate": 0.0003769279114010056, "loss": 3.3737, "step": 73390 }, { "epoch": 4.986750917244191, "grad_norm": 1.1294926404953003, "learning_rate": 0.0003768854463921729, "loss": 3.2184, "step": 73395 }, { "epoch": 4.987090637314853, "grad_norm": 1.293473482131958, "learning_rate": 0.0003768429813833401, "loss": 3.3303, "step": 73400 }, { "epoch": 4.987430357385515, "grad_norm": 1.1763782501220703, "learning_rate": 0.0003768005163745074, "loss": 3.4055, "step": 73405 }, { "epoch": 4.987770077456176, "grad_norm": 1.3823235034942627, "learning_rate": 0.0003767580513656747, "loss": 3.4831, "step": 73410 }, { "epoch": 4.988109797526838, "grad_norm": 1.3088903427124023, "learning_rate": 0.00037671558635684195, "loss": 3.1911, "step": 73415 }, { "epoch": 4.9884495175975, "grad_norm": 1.1909000873565674, "learning_rate": 0.0003766731213480092, "loss": 3.371, "step": 73420 }, { "epoch": 4.988789237668161, "grad_norm": 1.4753191471099854, "learning_rate": 0.00037663065633917656, "loss": 3.561, "step": 73425 }, { "epoch": 4.989128957738823, "grad_norm": 1.4439624547958374, "learning_rate": 0.00037658819133034384, "loss": 3.494, "step": 73430 }, { "epoch": 4.989468677809485, "grad_norm": 1.3638055324554443, "learning_rate": 0.00037654572632151107, "loss": 3.551, "step": 73435 }, { "epoch": 4.989808397880147, "grad_norm": 1.0783199071884155, "learning_rate": 0.00037650326131267835, "loss": 3.5242, "step": 73440 }, { "epoch": 4.990148117950809, "grad_norm": 1.3848947286605835, "learning_rate": 0.0003764607963038457, "loss": 3.5927, "step": 73445 }, { "epoch": 4.990487838021471, "grad_norm": 0.9640660881996155, "learning_rate": 0.0003764183312950129, "loss": 3.1866, "step": 73450 }, { "epoch": 4.990827558092132, "grad_norm": 1.3942949771881104, "learning_rate": 0.0003763758662861802, "loss": 3.2914, "step": 73455 }, { "epoch": 4.991167278162794, "grad_norm": 1.0684891939163208, "learning_rate": 0.0003763334012773475, "loss": 3.3894, "step": 73460 }, { "epoch": 4.991506998233456, "grad_norm": 1.4901292324066162, "learning_rate": 0.00037629093626851475, "loss": 3.1533, "step": 73465 }, { "epoch": 4.991846718304117, "grad_norm": 1.2301275730133057, "learning_rate": 0.00037624847125968203, "loss": 3.3782, "step": 73470 }, { "epoch": 4.992186438374779, "grad_norm": 1.3678117990493774, "learning_rate": 0.0003762060062508493, "loss": 3.4644, "step": 73475 }, { "epoch": 4.9925261584454415, "grad_norm": 1.0749223232269287, "learning_rate": 0.0003761635412420166, "loss": 3.4953, "step": 73480 }, { "epoch": 4.992865878516103, "grad_norm": 1.051117181777954, "learning_rate": 0.00037612107623318387, "loss": 3.5138, "step": 73485 }, { "epoch": 4.993205598586765, "grad_norm": 1.5044785737991333, "learning_rate": 0.00037607861122435115, "loss": 3.499, "step": 73490 }, { "epoch": 4.993545318657426, "grad_norm": 1.0242911577224731, "learning_rate": 0.00037603614621551843, "loss": 3.5133, "step": 73495 }, { "epoch": 4.993885038728088, "grad_norm": 1.4422082901000977, "learning_rate": 0.0003759936812066857, "loss": 3.3041, "step": 73500 }, { "epoch": 4.99422475879875, "grad_norm": 1.2191102504730225, "learning_rate": 0.000375951216197853, "loss": 3.2261, "step": 73505 }, { "epoch": 4.994564478869411, "grad_norm": 1.2159924507141113, "learning_rate": 0.0003759087511890202, "loss": 3.5184, "step": 73510 }, { "epoch": 4.994904198940073, "grad_norm": 1.417171835899353, "learning_rate": 0.00037586628618018755, "loss": 3.4714, "step": 73515 }, { "epoch": 4.995243919010735, "grad_norm": 1.390851616859436, "learning_rate": 0.00037582382117135483, "loss": 3.3447, "step": 73520 }, { "epoch": 4.995583639081397, "grad_norm": 1.3433926105499268, "learning_rate": 0.00037578135616252205, "loss": 3.2127, "step": 73525 }, { "epoch": 4.995923359152059, "grad_norm": 1.4165306091308594, "learning_rate": 0.0003757388911536894, "loss": 3.4233, "step": 73530 }, { "epoch": 4.996263079222721, "grad_norm": 1.2594835758209229, "learning_rate": 0.00037569642614485667, "loss": 3.3639, "step": 73535 }, { "epoch": 4.996602799293382, "grad_norm": 1.1396369934082031, "learning_rate": 0.0003756539611360239, "loss": 3.4017, "step": 73540 }, { "epoch": 4.996942519364044, "grad_norm": 1.2281851768493652, "learning_rate": 0.0003756114961271912, "loss": 3.4787, "step": 73545 }, { "epoch": 4.997282239434706, "grad_norm": 1.4486175775527954, "learning_rate": 0.0003755690311183585, "loss": 3.4554, "step": 73550 }, { "epoch": 4.997621959505367, "grad_norm": 1.2251664400100708, "learning_rate": 0.00037552656610952573, "loss": 3.4881, "step": 73555 }, { "epoch": 4.997961679576029, "grad_norm": 1.1807894706726074, "learning_rate": 0.000375484101100693, "loss": 3.4957, "step": 73560 }, { "epoch": 4.998301399646691, "grad_norm": 1.3389661312103271, "learning_rate": 0.00037544163609186035, "loss": 3.1375, "step": 73565 }, { "epoch": 4.998641119717353, "grad_norm": 1.1071044206619263, "learning_rate": 0.0003753991710830276, "loss": 3.4226, "step": 73570 }, { "epoch": 4.998980839788015, "grad_norm": 1.2521737813949585, "learning_rate": 0.00037535670607419486, "loss": 3.2485, "step": 73575 }, { "epoch": 4.999320559858677, "grad_norm": 1.1589133739471436, "learning_rate": 0.00037531424106536214, "loss": 3.3724, "step": 73580 }, { "epoch": 4.999660279929338, "grad_norm": 1.455511450767517, "learning_rate": 0.0003752717760565294, "loss": 3.5418, "step": 73585 }, { "epoch": 5.0, "grad_norm": 2.790072202682495, "learning_rate": 0.0003752293110476967, "loss": 3.4018, "step": 73590 }, { "epoch": 5.0, "eval_bertscore": { "f1": 0.8400825068913454, "precision": 0.8440104546398141, "recall": 0.8369823986562789 }, "eval_bleu_4": 0.014557218152139464, "eval_exact_match": 0.0002907258455276674, "eval_loss": 3.3806118965148926, "eval_meteor": 0.08706889505599011, "eval_rouge": { "rouge1": 0.12386769153109675, "rouge2": 0.01767526577973924, "rougeL": 0.10719326994011733, "rougeLsum": 0.1072339619960586 }, "eval_runtime": 1786.4225, "eval_samples_per_second": 5.776, "eval_steps_per_second": 0.722, "step": 73590 }, { "epoch": 5.000339720070662, "grad_norm": 1.3096387386322021, "learning_rate": 0.000375186846038864, "loss": 3.3243, "step": 73595 }, { "epoch": 5.000679440141323, "grad_norm": 1.1611895561218262, "learning_rate": 0.0003751443810300313, "loss": 3.2403, "step": 73600 }, { "epoch": 5.001019160211985, "grad_norm": 1.2227673530578613, "learning_rate": 0.00037510191602119854, "loss": 3.4868, "step": 73605 }, { "epoch": 5.001358880282647, "grad_norm": 1.79432213306427, "learning_rate": 0.0003750594510123658, "loss": 3.4598, "step": 73610 }, { "epoch": 5.001698600353309, "grad_norm": 1.3164585828781128, "learning_rate": 0.0003750169860035331, "loss": 3.3447, "step": 73615 }, { "epoch": 5.002038320423971, "grad_norm": 1.2648087739944458, "learning_rate": 0.0003749745209947004, "loss": 3.1905, "step": 73620 }, { "epoch": 5.002378040494633, "grad_norm": 1.2519415616989136, "learning_rate": 0.00037493205598586766, "loss": 3.4477, "step": 73625 }, { "epoch": 5.002717760565294, "grad_norm": 1.2478607892990112, "learning_rate": 0.00037488959097703494, "loss": 3.1407, "step": 73630 }, { "epoch": 5.003057480635956, "grad_norm": 1.090532660484314, "learning_rate": 0.0003748471259682022, "loss": 3.4527, "step": 73635 }, { "epoch": 5.003397200706618, "grad_norm": 1.2839094400405884, "learning_rate": 0.0003748046609593695, "loss": 3.1621, "step": 73640 }, { "epoch": 5.003736920777279, "grad_norm": 1.4212558269500732, "learning_rate": 0.0003747621959505368, "loss": 3.2037, "step": 73645 }, { "epoch": 5.004076640847941, "grad_norm": 1.4028037786483765, "learning_rate": 0.000374719730941704, "loss": 3.3682, "step": 73650 }, { "epoch": 5.004416360918603, "grad_norm": 1.2114408016204834, "learning_rate": 0.00037467726593287134, "loss": 3.3489, "step": 73655 }, { "epoch": 5.004756080989265, "grad_norm": 1.4589424133300781, "learning_rate": 0.0003746348009240386, "loss": 3.4116, "step": 73660 }, { "epoch": 5.005095801059927, "grad_norm": 1.3230953216552734, "learning_rate": 0.00037459233591520584, "loss": 3.3977, "step": 73665 }, { "epoch": 5.005435521130589, "grad_norm": 1.3580244779586792, "learning_rate": 0.0003745498709063732, "loss": 3.1738, "step": 73670 }, { "epoch": 5.00577524120125, "grad_norm": 1.0340006351470947, "learning_rate": 0.00037450740589754046, "loss": 3.4408, "step": 73675 }, { "epoch": 5.006114961271912, "grad_norm": 1.1185052394866943, "learning_rate": 0.0003744649408887077, "loss": 3.2756, "step": 73680 }, { "epoch": 5.006454681342574, "grad_norm": 1.2645926475524902, "learning_rate": 0.000374422475879875, "loss": 3.35, "step": 73685 }, { "epoch": 5.006794401413235, "grad_norm": 1.291805386543274, "learning_rate": 0.0003743800108710423, "loss": 3.2029, "step": 73690 }, { "epoch": 5.007134121483897, "grad_norm": 1.880858063697815, "learning_rate": 0.0003743375458622095, "loss": 3.336, "step": 73695 }, { "epoch": 5.007473841554559, "grad_norm": 1.1648019552230835, "learning_rate": 0.0003742950808533768, "loss": 3.1861, "step": 73700 }, { "epoch": 5.007813561625221, "grad_norm": 1.3570412397384644, "learning_rate": 0.00037425261584454414, "loss": 3.4186, "step": 73705 }, { "epoch": 5.008153281695883, "grad_norm": 1.3423806428909302, "learning_rate": 0.00037421015083571136, "loss": 3.3449, "step": 73710 }, { "epoch": 5.008493001766545, "grad_norm": 1.5413732528686523, "learning_rate": 0.00037416768582687864, "loss": 3.4303, "step": 73715 }, { "epoch": 5.008832721837206, "grad_norm": 1.101129412651062, "learning_rate": 0.000374125220818046, "loss": 3.3116, "step": 73720 }, { "epoch": 5.009172441907868, "grad_norm": 1.1557447910308838, "learning_rate": 0.0003740827558092132, "loss": 3.208, "step": 73725 }, { "epoch": 5.00951216197853, "grad_norm": 1.273701548576355, "learning_rate": 0.0003740402908003805, "loss": 3.1369, "step": 73730 }, { "epoch": 5.009851882049191, "grad_norm": 1.1108012199401855, "learning_rate": 0.00037399782579154776, "loss": 3.3439, "step": 73735 }, { "epoch": 5.010191602119853, "grad_norm": 1.1308352947235107, "learning_rate": 0.00037395536078271504, "loss": 3.269, "step": 73740 }, { "epoch": 5.0105313221905154, "grad_norm": 1.2657833099365234, "learning_rate": 0.0003739128957738823, "loss": 3.0622, "step": 73745 }, { "epoch": 5.010871042261177, "grad_norm": 1.2678593397140503, "learning_rate": 0.0003738704307650496, "loss": 3.0782, "step": 73750 }, { "epoch": 5.011210762331839, "grad_norm": 1.3646059036254883, "learning_rate": 0.0003738279657562169, "loss": 3.2587, "step": 73755 }, { "epoch": 5.0115504824025, "grad_norm": 1.3213744163513184, "learning_rate": 0.00037378550074738416, "loss": 3.2364, "step": 73760 }, { "epoch": 5.011890202473162, "grad_norm": 1.7644445896148682, "learning_rate": 0.00037374303573855144, "loss": 3.3315, "step": 73765 }, { "epoch": 5.012229922543824, "grad_norm": 1.530515193939209, "learning_rate": 0.00037370057072971867, "loss": 2.9912, "step": 73770 }, { "epoch": 5.012569642614485, "grad_norm": 1.2085241079330444, "learning_rate": 0.000373658105720886, "loss": 3.1771, "step": 73775 }, { "epoch": 5.012909362685147, "grad_norm": 1.4598581790924072, "learning_rate": 0.0003736156407120533, "loss": 3.3988, "step": 73780 }, { "epoch": 5.013249082755809, "grad_norm": 1.1374602317810059, "learning_rate": 0.00037357317570322056, "loss": 3.3514, "step": 73785 }, { "epoch": 5.013588802826471, "grad_norm": 1.3571542501449585, "learning_rate": 0.00037353071069438784, "loss": 3.2454, "step": 73790 }, { "epoch": 5.013928522897133, "grad_norm": 1.4737836122512817, "learning_rate": 0.0003734882456855551, "loss": 3.4189, "step": 73795 }, { "epoch": 5.014268242967795, "grad_norm": 1.081951379776001, "learning_rate": 0.0003734457806767224, "loss": 3.3139, "step": 73800 }, { "epoch": 5.014607963038456, "grad_norm": 1.2938190698623657, "learning_rate": 0.00037340331566788963, "loss": 3.0154, "step": 73805 }, { "epoch": 5.014947683109118, "grad_norm": 1.109570026397705, "learning_rate": 0.00037336085065905696, "loss": 3.3723, "step": 73810 }, { "epoch": 5.01528740317978, "grad_norm": 1.5406062602996826, "learning_rate": 0.00037331838565022424, "loss": 3.2086, "step": 73815 }, { "epoch": 5.015627123250441, "grad_norm": 1.9372460842132568, "learning_rate": 0.00037327592064139147, "loss": 3.3361, "step": 73820 }, { "epoch": 5.015966843321103, "grad_norm": 1.271420955657959, "learning_rate": 0.0003732334556325588, "loss": 3.1681, "step": 73825 }, { "epoch": 5.016306563391765, "grad_norm": 1.229584813117981, "learning_rate": 0.0003731909906237261, "loss": 3.3319, "step": 73830 }, { "epoch": 5.016646283462427, "grad_norm": 1.3020422458648682, "learning_rate": 0.0003731485256148933, "loss": 3.3296, "step": 73835 }, { "epoch": 5.016986003533089, "grad_norm": 1.1851718425750732, "learning_rate": 0.0003731060606060606, "loss": 3.1205, "step": 73840 }, { "epoch": 5.017325723603751, "grad_norm": 1.2986464500427246, "learning_rate": 0.0003730635955972279, "loss": 3.1684, "step": 73845 }, { "epoch": 5.017665443674412, "grad_norm": 1.2599520683288574, "learning_rate": 0.00037302113058839515, "loss": 3.2489, "step": 73850 }, { "epoch": 5.018005163745074, "grad_norm": 1.4896273612976074, "learning_rate": 0.00037297866557956243, "loss": 3.302, "step": 73855 }, { "epoch": 5.018344883815736, "grad_norm": 1.1373552083969116, "learning_rate": 0.00037293620057072977, "loss": 3.1956, "step": 73860 }, { "epoch": 5.018684603886397, "grad_norm": 1.4760342836380005, "learning_rate": 0.000372893735561897, "loss": 3.2366, "step": 73865 }, { "epoch": 5.019024323957059, "grad_norm": 1.2167174816131592, "learning_rate": 0.00037285127055306427, "loss": 3.401, "step": 73870 }, { "epoch": 5.019364044027721, "grad_norm": 1.1604167222976685, "learning_rate": 0.00037280880554423155, "loss": 3.4209, "step": 73875 }, { "epoch": 5.019703764098383, "grad_norm": 1.3042091131210327, "learning_rate": 0.00037276634053539883, "loss": 3.1664, "step": 73880 }, { "epoch": 5.020043484169045, "grad_norm": 1.5025368928909302, "learning_rate": 0.0003727238755265661, "loss": 3.2145, "step": 73885 }, { "epoch": 5.020383204239707, "grad_norm": 1.5966334342956543, "learning_rate": 0.0003726814105177334, "loss": 3.3973, "step": 73890 }, { "epoch": 5.020722924310368, "grad_norm": 1.345812201499939, "learning_rate": 0.00037263894550890067, "loss": 3.1614, "step": 73895 }, { "epoch": 5.02106264438103, "grad_norm": 1.7230329513549805, "learning_rate": 0.00037259648050006795, "loss": 3.3407, "step": 73900 }, { "epoch": 5.021402364451692, "grad_norm": 1.174718976020813, "learning_rate": 0.00037255401549123523, "loss": 3.3878, "step": 73905 }, { "epoch": 5.021742084522353, "grad_norm": 1.574466347694397, "learning_rate": 0.00037251155048240246, "loss": 3.2805, "step": 73910 }, { "epoch": 5.022081804593015, "grad_norm": 1.4303926229476929, "learning_rate": 0.0003724690854735698, "loss": 3.3348, "step": 73915 }, { "epoch": 5.022421524663677, "grad_norm": 1.1259472370147705, "learning_rate": 0.00037242662046473707, "loss": 3.2265, "step": 73920 }, { "epoch": 5.022761244734339, "grad_norm": 1.374632477760315, "learning_rate": 0.0003723841554559043, "loss": 3.2308, "step": 73925 }, { "epoch": 5.023100964805001, "grad_norm": 1.0396883487701416, "learning_rate": 0.00037234169044707163, "loss": 3.2775, "step": 73930 }, { "epoch": 5.023440684875663, "grad_norm": 1.5274927616119385, "learning_rate": 0.0003722992254382389, "loss": 3.5913, "step": 73935 }, { "epoch": 5.023780404946324, "grad_norm": 1.237417459487915, "learning_rate": 0.00037225676042940614, "loss": 3.1688, "step": 73940 }, { "epoch": 5.024120125016986, "grad_norm": 1.0087800025939941, "learning_rate": 0.00037221429542057347, "loss": 3.0419, "step": 73945 }, { "epoch": 5.024459845087648, "grad_norm": 1.0967172384262085, "learning_rate": 0.00037217183041174075, "loss": 3.072, "step": 73950 }, { "epoch": 5.024799565158309, "grad_norm": 1.3215075731277466, "learning_rate": 0.00037212936540290803, "loss": 3.1345, "step": 73955 }, { "epoch": 5.025139285228971, "grad_norm": 1.0656404495239258, "learning_rate": 0.00037208690039407526, "loss": 3.3354, "step": 73960 }, { "epoch": 5.025479005299633, "grad_norm": 1.2503769397735596, "learning_rate": 0.0003720444353852426, "loss": 3.5022, "step": 73965 }, { "epoch": 5.025818725370295, "grad_norm": 1.1989860534667969, "learning_rate": 0.00037200197037640987, "loss": 3.4364, "step": 73970 }, { "epoch": 5.026158445440957, "grad_norm": 1.0396504402160645, "learning_rate": 0.0003719595053675771, "loss": 3.2748, "step": 73975 }, { "epoch": 5.026498165511619, "grad_norm": 1.3780397176742554, "learning_rate": 0.00037191704035874443, "loss": 3.2901, "step": 73980 }, { "epoch": 5.02683788558228, "grad_norm": 1.1388542652130127, "learning_rate": 0.0003718745753499117, "loss": 3.1878, "step": 73985 }, { "epoch": 5.027177605652942, "grad_norm": 1.273654580116272, "learning_rate": 0.00037183211034107894, "loss": 3.2243, "step": 73990 }, { "epoch": 5.027517325723604, "grad_norm": 1.031184434890747, "learning_rate": 0.0003717896453322462, "loss": 3.2285, "step": 73995 }, { "epoch": 5.027857045794265, "grad_norm": 1.161658525466919, "learning_rate": 0.00037174718032341355, "loss": 3.1358, "step": 74000 }, { "epoch": 5.028196765864927, "grad_norm": 1.4053397178649902, "learning_rate": 0.0003717047153145808, "loss": 3.5013, "step": 74005 }, { "epoch": 5.028536485935589, "grad_norm": 1.57672119140625, "learning_rate": 0.00037166225030574806, "loss": 3.2863, "step": 74010 }, { "epoch": 5.028876206006251, "grad_norm": 1.6417772769927979, "learning_rate": 0.0003716197852969154, "loss": 3.2421, "step": 74015 }, { "epoch": 5.029215926076913, "grad_norm": 1.4307873249053955, "learning_rate": 0.0003715773202880826, "loss": 3.1542, "step": 74020 }, { "epoch": 5.029555646147575, "grad_norm": 1.5073349475860596, "learning_rate": 0.0003715348552792499, "loss": 3.4775, "step": 74025 }, { "epoch": 5.029895366218236, "grad_norm": 1.3037407398223877, "learning_rate": 0.0003714923902704172, "loss": 3.2845, "step": 74030 }, { "epoch": 5.030235086288898, "grad_norm": 1.1249198913574219, "learning_rate": 0.00037144992526158446, "loss": 3.286, "step": 74035 }, { "epoch": 5.03057480635956, "grad_norm": 1.405970573425293, "learning_rate": 0.00037140746025275174, "loss": 3.2681, "step": 74040 }, { "epoch": 5.030914526430221, "grad_norm": 1.509360671043396, "learning_rate": 0.000371364995243919, "loss": 3.0472, "step": 74045 }, { "epoch": 5.031254246500883, "grad_norm": 1.4935965538024902, "learning_rate": 0.0003713225302350863, "loss": 3.5276, "step": 74050 }, { "epoch": 5.0315939665715455, "grad_norm": 1.2223016023635864, "learning_rate": 0.0003712800652262536, "loss": 2.9747, "step": 74055 }, { "epoch": 5.031933686642207, "grad_norm": 1.8164136409759521, "learning_rate": 0.00037123760021742086, "loss": 3.0553, "step": 74060 }, { "epoch": 5.032273406712869, "grad_norm": 1.2018605470657349, "learning_rate": 0.0003711951352085881, "loss": 3.4563, "step": 74065 }, { "epoch": 5.032613126783531, "grad_norm": 1.4513052701950073, "learning_rate": 0.0003711526701997554, "loss": 3.1066, "step": 74070 }, { "epoch": 5.032952846854192, "grad_norm": 1.6418076753616333, "learning_rate": 0.0003711102051909227, "loss": 3.2228, "step": 74075 }, { "epoch": 5.033292566924854, "grad_norm": 1.5404938459396362, "learning_rate": 0.0003710677401820899, "loss": 2.9791, "step": 74080 }, { "epoch": 5.033632286995516, "grad_norm": 1.4821507930755615, "learning_rate": 0.00037102527517325726, "loss": 3.3505, "step": 74085 }, { "epoch": 5.033972007066177, "grad_norm": 1.6947373151779175, "learning_rate": 0.00037098281016442454, "loss": 3.1643, "step": 74090 }, { "epoch": 5.034311727136839, "grad_norm": 1.2038021087646484, "learning_rate": 0.00037094034515559177, "loss": 3.5904, "step": 74095 }, { "epoch": 5.0346514472075015, "grad_norm": 1.2496389150619507, "learning_rate": 0.00037089788014675905, "loss": 3.4192, "step": 74100 }, { "epoch": 5.034991167278163, "grad_norm": 2.1697802543640137, "learning_rate": 0.0003708554151379264, "loss": 3.3557, "step": 74105 }, { "epoch": 5.035330887348825, "grad_norm": 1.3693020343780518, "learning_rate": 0.0003708129501290936, "loss": 3.3305, "step": 74110 }, { "epoch": 5.035670607419486, "grad_norm": 1.3027647733688354, "learning_rate": 0.0003707704851202609, "loss": 3.6406, "step": 74115 }, { "epoch": 5.036010327490148, "grad_norm": 0.9897179007530212, "learning_rate": 0.0003707280201114282, "loss": 3.0765, "step": 74120 }, { "epoch": 5.03635004756081, "grad_norm": 1.3077951669692993, "learning_rate": 0.0003706855551025955, "loss": 3.348, "step": 74125 }, { "epoch": 5.036689767631471, "grad_norm": 1.3821063041687012, "learning_rate": 0.0003706430900937627, "loss": 3.4314, "step": 74130 }, { "epoch": 5.037029487702133, "grad_norm": 1.638697624206543, "learning_rate": 0.00037060062508493, "loss": 3.1117, "step": 74135 }, { "epoch": 5.037369207772795, "grad_norm": 1.34154212474823, "learning_rate": 0.00037055816007609734, "loss": 3.3776, "step": 74140 }, { "epoch": 5.037708927843457, "grad_norm": 1.3321692943572998, "learning_rate": 0.00037051569506726457, "loss": 3.3709, "step": 74145 }, { "epoch": 5.038048647914119, "grad_norm": 1.067611575126648, "learning_rate": 0.00037047323005843185, "loss": 3.3024, "step": 74150 }, { "epoch": 5.038388367984781, "grad_norm": 1.192905068397522, "learning_rate": 0.0003704307650495992, "loss": 3.3526, "step": 74155 }, { "epoch": 5.038728088055442, "grad_norm": 1.7053968906402588, "learning_rate": 0.0003703883000407664, "loss": 3.141, "step": 74160 }, { "epoch": 5.039067808126104, "grad_norm": 1.33283269405365, "learning_rate": 0.0003703458350319337, "loss": 3.682, "step": 74165 }, { "epoch": 5.039407528196766, "grad_norm": 1.2498801946640015, "learning_rate": 0.00037030337002310097, "loss": 3.3385, "step": 74170 }, { "epoch": 5.039747248267427, "grad_norm": 1.9767193794250488, "learning_rate": 0.00037026090501426825, "loss": 3.5896, "step": 74175 }, { "epoch": 5.040086968338089, "grad_norm": 1.2013306617736816, "learning_rate": 0.00037021844000543553, "loss": 3.2864, "step": 74180 }, { "epoch": 5.040426688408751, "grad_norm": 0.9990150928497314, "learning_rate": 0.0003701759749966028, "loss": 3.2193, "step": 74185 }, { "epoch": 5.040766408479413, "grad_norm": 1.2592006921768188, "learning_rate": 0.0003701335099877701, "loss": 3.4127, "step": 74190 }, { "epoch": 5.041106128550075, "grad_norm": 1.3712149858474731, "learning_rate": 0.00037009104497893737, "loss": 3.2771, "step": 74195 }, { "epoch": 5.041445848620737, "grad_norm": 1.3287980556488037, "learning_rate": 0.00037004857997010465, "loss": 3.5373, "step": 74200 }, { "epoch": 5.041785568691398, "grad_norm": 1.1993587017059326, "learning_rate": 0.0003700061149612719, "loss": 3.28, "step": 74205 }, { "epoch": 5.04212528876206, "grad_norm": 1.5448882579803467, "learning_rate": 0.0003699636499524392, "loss": 3.6201, "step": 74210 }, { "epoch": 5.042465008832722, "grad_norm": 1.2903823852539062, "learning_rate": 0.0003699211849436065, "loss": 3.3394, "step": 74215 }, { "epoch": 5.042804728903383, "grad_norm": 1.166286587715149, "learning_rate": 0.0003698787199347737, "loss": 3.4512, "step": 74220 }, { "epoch": 5.043144448974045, "grad_norm": 1.4320658445358276, "learning_rate": 0.00036983625492594105, "loss": 3.27, "step": 74225 }, { "epoch": 5.043484169044707, "grad_norm": 1.992785930633545, "learning_rate": 0.00036979378991710833, "loss": 3.4365, "step": 74230 }, { "epoch": 5.043823889115369, "grad_norm": 1.09660804271698, "learning_rate": 0.00036975132490827555, "loss": 3.1497, "step": 74235 }, { "epoch": 5.044163609186031, "grad_norm": 1.4450690746307373, "learning_rate": 0.0003697088598994429, "loss": 2.9317, "step": 74240 }, { "epoch": 5.044503329256693, "grad_norm": 1.809114694595337, "learning_rate": 0.00036966639489061017, "loss": 3.301, "step": 74245 }, { "epoch": 5.044843049327354, "grad_norm": 1.4467123746871948, "learning_rate": 0.0003696239298817774, "loss": 3.25, "step": 74250 }, { "epoch": 5.045182769398016, "grad_norm": 1.2907294034957886, "learning_rate": 0.0003695814648729447, "loss": 3.4981, "step": 74255 }, { "epoch": 5.045522489468678, "grad_norm": 1.358654260635376, "learning_rate": 0.000369538999864112, "loss": 3.3432, "step": 74260 }, { "epoch": 5.045862209539339, "grad_norm": 1.3526705503463745, "learning_rate": 0.00036949653485527923, "loss": 3.4384, "step": 74265 }, { "epoch": 5.046201929610001, "grad_norm": 1.2653939723968506, "learning_rate": 0.0003694540698464465, "loss": 3.3717, "step": 74270 }, { "epoch": 5.046541649680663, "grad_norm": 1.250239372253418, "learning_rate": 0.00036941160483761385, "loss": 3.3471, "step": 74275 }, { "epoch": 5.046881369751325, "grad_norm": 1.437057375907898, "learning_rate": 0.0003693691398287811, "loss": 3.0176, "step": 74280 }, { "epoch": 5.047221089821987, "grad_norm": 1.2809194326400757, "learning_rate": 0.00036932667481994836, "loss": 3.0908, "step": 74285 }, { "epoch": 5.047560809892649, "grad_norm": 1.148973822593689, "learning_rate": 0.00036928420981111564, "loss": 3.3544, "step": 74290 }, { "epoch": 5.04790052996331, "grad_norm": 1.2016514539718628, "learning_rate": 0.00036924174480228297, "loss": 3.2211, "step": 74295 }, { "epoch": 5.048240250033972, "grad_norm": 1.2155365943908691, "learning_rate": 0.0003691992797934502, "loss": 3.5807, "step": 74300 }, { "epoch": 5.048579970104634, "grad_norm": 1.9651442766189575, "learning_rate": 0.0003691568147846175, "loss": 3.4538, "step": 74305 }, { "epoch": 5.048919690175295, "grad_norm": 1.335036277770996, "learning_rate": 0.0003691143497757848, "loss": 3.6963, "step": 74310 }, { "epoch": 5.049259410245957, "grad_norm": 1.4028456211090088, "learning_rate": 0.00036907188476695204, "loss": 3.3951, "step": 74315 }, { "epoch": 5.0495991303166194, "grad_norm": 1.1991201639175415, "learning_rate": 0.0003690294197581193, "loss": 3.5117, "step": 74320 }, { "epoch": 5.049938850387281, "grad_norm": 0.9742298126220703, "learning_rate": 0.0003689869547492866, "loss": 3.3059, "step": 74325 }, { "epoch": 5.050278570457943, "grad_norm": 1.2979520559310913, "learning_rate": 0.0003689444897404539, "loss": 3.0653, "step": 74330 }, { "epoch": 5.050618290528605, "grad_norm": 1.260608434677124, "learning_rate": 0.00036890202473162116, "loss": 3.4276, "step": 74335 }, { "epoch": 5.050958010599266, "grad_norm": 1.3156096935272217, "learning_rate": 0.00036885955972278844, "loss": 3.1885, "step": 74340 }, { "epoch": 5.051297730669928, "grad_norm": 1.4455454349517822, "learning_rate": 0.0003688170947139557, "loss": 3.0378, "step": 74345 }, { "epoch": 5.05163745074059, "grad_norm": 1.212362289428711, "learning_rate": 0.000368774629705123, "loss": 3.2911, "step": 74350 }, { "epoch": 5.051977170811251, "grad_norm": 1.1634610891342163, "learning_rate": 0.0003687321646962903, "loss": 3.435, "step": 74355 }, { "epoch": 5.052316890881913, "grad_norm": 1.1145908832550049, "learning_rate": 0.0003686896996874575, "loss": 3.3133, "step": 74360 }, { "epoch": 5.0526566109525755, "grad_norm": 1.2184144258499146, "learning_rate": 0.00036864723467862484, "loss": 3.1892, "step": 74365 }, { "epoch": 5.052996331023237, "grad_norm": 1.3072564601898193, "learning_rate": 0.0003686047696697921, "loss": 3.2827, "step": 74370 }, { "epoch": 5.053336051093899, "grad_norm": 1.4544939994812012, "learning_rate": 0.00036856230466095934, "loss": 3.2875, "step": 74375 }, { "epoch": 5.053675771164561, "grad_norm": 1.380309820175171, "learning_rate": 0.0003685198396521267, "loss": 2.9973, "step": 74380 }, { "epoch": 5.054015491235222, "grad_norm": 1.7774450778961182, "learning_rate": 0.00036847737464329396, "loss": 3.1195, "step": 74385 }, { "epoch": 5.054355211305884, "grad_norm": 1.5175777673721313, "learning_rate": 0.0003684349096344612, "loss": 3.4712, "step": 74390 }, { "epoch": 5.054694931376546, "grad_norm": 1.1972925662994385, "learning_rate": 0.00036839244462562846, "loss": 3.3161, "step": 74395 }, { "epoch": 5.055034651447207, "grad_norm": 1.3620975017547607, "learning_rate": 0.0003683499796167958, "loss": 2.9833, "step": 74400 }, { "epoch": 5.055374371517869, "grad_norm": 1.3960022926330566, "learning_rate": 0.000368307514607963, "loss": 3.2521, "step": 74405 }, { "epoch": 5.0557140915885315, "grad_norm": 1.1711255311965942, "learning_rate": 0.0003682650495991303, "loss": 3.2688, "step": 74410 }, { "epoch": 5.056053811659193, "grad_norm": 1.1848437786102295, "learning_rate": 0.00036822258459029764, "loss": 3.3912, "step": 74415 }, { "epoch": 5.056393531729855, "grad_norm": 1.2535878419876099, "learning_rate": 0.00036818011958146486, "loss": 3.409, "step": 74420 }, { "epoch": 5.056733251800517, "grad_norm": 1.2341421842575073, "learning_rate": 0.00036813765457263214, "loss": 3.6436, "step": 74425 }, { "epoch": 5.057072971871178, "grad_norm": 1.2742996215820312, "learning_rate": 0.0003680951895637994, "loss": 3.4213, "step": 74430 }, { "epoch": 5.05741269194184, "grad_norm": 1.3263784646987915, "learning_rate": 0.0003680527245549667, "loss": 3.2571, "step": 74435 }, { "epoch": 5.057752412012501, "grad_norm": 1.2589560747146606, "learning_rate": 0.000368010259546134, "loss": 3.2897, "step": 74440 }, { "epoch": 5.058092132083163, "grad_norm": 1.3367152214050293, "learning_rate": 0.00036796779453730126, "loss": 3.2822, "step": 74445 }, { "epoch": 5.058431852153825, "grad_norm": 1.568966269493103, "learning_rate": 0.00036792532952846854, "loss": 3.0548, "step": 74450 }, { "epoch": 5.058771572224487, "grad_norm": 1.1858922243118286, "learning_rate": 0.0003678828645196358, "loss": 3.2824, "step": 74455 }, { "epoch": 5.059111292295149, "grad_norm": 1.5279293060302734, "learning_rate": 0.0003678403995108031, "loss": 3.3452, "step": 74460 }, { "epoch": 5.059451012365811, "grad_norm": 1.4749168157577515, "learning_rate": 0.0003677979345019704, "loss": 3.4002, "step": 74465 }, { "epoch": 5.059790732436472, "grad_norm": 1.1444640159606934, "learning_rate": 0.00036775546949313766, "loss": 3.1443, "step": 74470 }, { "epoch": 5.060130452507134, "grad_norm": 1.277409553527832, "learning_rate": 0.00036771300448430494, "loss": 3.4072, "step": 74475 }, { "epoch": 5.060470172577796, "grad_norm": 1.6459317207336426, "learning_rate": 0.0003676705394754722, "loss": 3.5063, "step": 74480 }, { "epoch": 5.060809892648457, "grad_norm": 1.2713501453399658, "learning_rate": 0.0003676280744666395, "loss": 3.5529, "step": 74485 }, { "epoch": 5.061149612719119, "grad_norm": 1.635818362236023, "learning_rate": 0.0003675856094578068, "loss": 3.1977, "step": 74490 }, { "epoch": 5.061489332789781, "grad_norm": 1.4140849113464355, "learning_rate": 0.00036754314444897406, "loss": 3.2715, "step": 74495 }, { "epoch": 5.061829052860443, "grad_norm": 1.6633692979812622, "learning_rate": 0.0003675006794401413, "loss": 3.485, "step": 74500 }, { "epoch": 5.062168772931105, "grad_norm": 1.207716464996338, "learning_rate": 0.0003674582144313086, "loss": 3.0466, "step": 74505 }, { "epoch": 5.062508493001767, "grad_norm": 1.404072880744934, "learning_rate": 0.0003674157494224759, "loss": 3.3243, "step": 74510 }, { "epoch": 5.062848213072428, "grad_norm": 1.275320053100586, "learning_rate": 0.00036737328441364313, "loss": 3.3318, "step": 74515 }, { "epoch": 5.06318793314309, "grad_norm": 1.4946315288543701, "learning_rate": 0.00036733081940481046, "loss": 3.4394, "step": 74520 }, { "epoch": 5.063527653213752, "grad_norm": 1.2627123594284058, "learning_rate": 0.00036728835439597774, "loss": 2.94, "step": 74525 }, { "epoch": 5.063867373284413, "grad_norm": 1.5216134786605835, "learning_rate": 0.00036724588938714497, "loss": 3.5065, "step": 74530 }, { "epoch": 5.064207093355075, "grad_norm": 1.2524248361587524, "learning_rate": 0.0003672034243783123, "loss": 3.3617, "step": 74535 }, { "epoch": 5.064546813425737, "grad_norm": 1.6872329711914062, "learning_rate": 0.0003671609593694796, "loss": 3.1899, "step": 74540 }, { "epoch": 5.064886533496399, "grad_norm": 1.35062575340271, "learning_rate": 0.0003671184943606468, "loss": 3.3101, "step": 74545 }, { "epoch": 5.065226253567061, "grad_norm": 1.1556540727615356, "learning_rate": 0.0003670760293518141, "loss": 3.2566, "step": 74550 }, { "epoch": 5.065565973637723, "grad_norm": 1.4084374904632568, "learning_rate": 0.0003670335643429814, "loss": 3.3675, "step": 74555 }, { "epoch": 5.065905693708384, "grad_norm": 1.2078161239624023, "learning_rate": 0.00036699109933414865, "loss": 3.1263, "step": 74560 }, { "epoch": 5.066245413779046, "grad_norm": 1.3429648876190186, "learning_rate": 0.00036694863432531593, "loss": 3.4568, "step": 74565 }, { "epoch": 5.066585133849708, "grad_norm": 1.380710244178772, "learning_rate": 0.00036690616931648327, "loss": 3.3142, "step": 74570 }, { "epoch": 5.066924853920369, "grad_norm": 1.6410232782363892, "learning_rate": 0.0003668637043076505, "loss": 3.5276, "step": 74575 }, { "epoch": 5.067264573991031, "grad_norm": 1.241692066192627, "learning_rate": 0.00036682123929881777, "loss": 2.9926, "step": 74580 }, { "epoch": 5.067604294061693, "grad_norm": 1.4704455137252808, "learning_rate": 0.00036677877428998505, "loss": 3.4741, "step": 74585 }, { "epoch": 5.067944014132355, "grad_norm": 1.4853860139846802, "learning_rate": 0.00036673630928115233, "loss": 3.2757, "step": 74590 }, { "epoch": 5.068283734203017, "grad_norm": 1.5675740242004395, "learning_rate": 0.0003666938442723196, "loss": 3.3453, "step": 74595 }, { "epoch": 5.068623454273679, "grad_norm": 1.442299723625183, "learning_rate": 0.0003666513792634869, "loss": 3.4834, "step": 74600 }, { "epoch": 5.06896317434434, "grad_norm": 1.1698570251464844, "learning_rate": 0.00036660891425465417, "loss": 3.3427, "step": 74605 }, { "epoch": 5.069302894415002, "grad_norm": 1.2884430885314941, "learning_rate": 0.00036656644924582145, "loss": 3.6263, "step": 74610 }, { "epoch": 5.069642614485664, "grad_norm": 1.042495846748352, "learning_rate": 0.00036652398423698873, "loss": 3.3975, "step": 74615 }, { "epoch": 5.069982334556325, "grad_norm": 1.1059495210647583, "learning_rate": 0.00036648151922815596, "loss": 3.4297, "step": 74620 }, { "epoch": 5.070322054626987, "grad_norm": 1.5528019666671753, "learning_rate": 0.0003664390542193233, "loss": 3.0273, "step": 74625 }, { "epoch": 5.0706617746976494, "grad_norm": 1.4036439657211304, "learning_rate": 0.00036639658921049057, "loss": 3.2199, "step": 74630 }, { "epoch": 5.071001494768311, "grad_norm": 1.4199371337890625, "learning_rate": 0.00036635412420165785, "loss": 3.3707, "step": 74635 }, { "epoch": 5.071341214838973, "grad_norm": 1.4059288501739502, "learning_rate": 0.00036631165919282513, "loss": 3.4153, "step": 74640 }, { "epoch": 5.071680934909635, "grad_norm": 1.4820066690444946, "learning_rate": 0.0003662691941839924, "loss": 3.245, "step": 74645 }, { "epoch": 5.072020654980296, "grad_norm": 1.094780683517456, "learning_rate": 0.0003662267291751597, "loss": 3.2535, "step": 74650 }, { "epoch": 5.072360375050958, "grad_norm": 1.1323961019515991, "learning_rate": 0.0003661842641663269, "loss": 3.3174, "step": 74655 }, { "epoch": 5.07270009512162, "grad_norm": 1.6660436391830444, "learning_rate": 0.00036614179915749425, "loss": 3.2715, "step": 74660 }, { "epoch": 5.073039815192281, "grad_norm": 1.0617222785949707, "learning_rate": 0.00036609933414866153, "loss": 3.5536, "step": 74665 }, { "epoch": 5.073379535262943, "grad_norm": 1.1782654523849487, "learning_rate": 0.00036605686913982876, "loss": 3.7699, "step": 74670 }, { "epoch": 5.0737192553336055, "grad_norm": 1.6950256824493408, "learning_rate": 0.0003660144041309961, "loss": 3.0782, "step": 74675 }, { "epoch": 5.074058975404267, "grad_norm": 1.4218188524246216, "learning_rate": 0.00036597193912216337, "loss": 3.2671, "step": 74680 }, { "epoch": 5.074398695474929, "grad_norm": 1.2676154375076294, "learning_rate": 0.0003659294741133306, "loss": 3.3808, "step": 74685 }, { "epoch": 5.074738415545591, "grad_norm": 1.1669819355010986, "learning_rate": 0.0003658870091044979, "loss": 3.0211, "step": 74690 }, { "epoch": 5.075078135616252, "grad_norm": 1.3565444946289062, "learning_rate": 0.0003658445440956652, "loss": 3.0351, "step": 74695 }, { "epoch": 5.075417855686914, "grad_norm": 1.1330783367156982, "learning_rate": 0.00036580207908683244, "loss": 3.2045, "step": 74700 }, { "epoch": 5.075757575757576, "grad_norm": 1.3834350109100342, "learning_rate": 0.0003657596140779997, "loss": 3.2286, "step": 74705 }, { "epoch": 5.076097295828237, "grad_norm": 1.3246060609817505, "learning_rate": 0.00036571714906916705, "loss": 3.307, "step": 74710 }, { "epoch": 5.076437015898899, "grad_norm": 1.5455390214920044, "learning_rate": 0.0003656746840603343, "loss": 3.4477, "step": 74715 }, { "epoch": 5.0767767359695615, "grad_norm": 1.3306742906570435, "learning_rate": 0.00036563221905150156, "loss": 3.3362, "step": 74720 }, { "epoch": 5.077116456040223, "grad_norm": 1.2972912788391113, "learning_rate": 0.00036558975404266884, "loss": 3.3356, "step": 74725 }, { "epoch": 5.077456176110885, "grad_norm": 1.2857166528701782, "learning_rate": 0.0003655472890338361, "loss": 3.5305, "step": 74730 }, { "epoch": 5.077795896181547, "grad_norm": 1.331938624382019, "learning_rate": 0.0003655048240250034, "loss": 3.1617, "step": 74735 }, { "epoch": 5.078135616252208, "grad_norm": 1.992009162902832, "learning_rate": 0.0003654623590161707, "loss": 3.3318, "step": 74740 }, { "epoch": 5.07847533632287, "grad_norm": 1.4537208080291748, "learning_rate": 0.00036541989400733796, "loss": 3.2079, "step": 74745 }, { "epoch": 5.078815056393532, "grad_norm": 1.3905574083328247, "learning_rate": 0.00036537742899850524, "loss": 3.2146, "step": 74750 }, { "epoch": 5.079154776464193, "grad_norm": 1.2996125221252441, "learning_rate": 0.0003653349639896725, "loss": 3.4937, "step": 74755 }, { "epoch": 5.079494496534855, "grad_norm": 1.3813766241073608, "learning_rate": 0.00036529249898083975, "loss": 3.3652, "step": 74760 }, { "epoch": 5.0798342166055175, "grad_norm": 1.7567323446273804, "learning_rate": 0.0003652500339720071, "loss": 3.3899, "step": 74765 }, { "epoch": 5.080173936676179, "grad_norm": 1.279044270515442, "learning_rate": 0.00036520756896317436, "loss": 3.3825, "step": 74770 }, { "epoch": 5.080513656746841, "grad_norm": 1.2238235473632812, "learning_rate": 0.0003651651039543416, "loss": 3.456, "step": 74775 }, { "epoch": 5.080853376817503, "grad_norm": 1.6501991748809814, "learning_rate": 0.0003651226389455089, "loss": 3.2413, "step": 74780 }, { "epoch": 5.081193096888164, "grad_norm": 2.2379233837127686, "learning_rate": 0.0003650801739366762, "loss": 3.3037, "step": 74785 }, { "epoch": 5.081532816958826, "grad_norm": 1.4390827417373657, "learning_rate": 0.0003650377089278434, "loss": 3.2679, "step": 74790 }, { "epoch": 5.081872537029487, "grad_norm": 1.396539568901062, "learning_rate": 0.00036499524391901076, "loss": 3.3005, "step": 74795 }, { "epoch": 5.082212257100149, "grad_norm": 1.0811101198196411, "learning_rate": 0.00036495277891017804, "loss": 3.2372, "step": 74800 }, { "epoch": 5.082551977170811, "grad_norm": 1.1992905139923096, "learning_rate": 0.0003649103139013453, "loss": 3.2494, "step": 74805 }, { "epoch": 5.082891697241473, "grad_norm": 1.4482601881027222, "learning_rate": 0.00036486784889251255, "loss": 3.2665, "step": 74810 }, { "epoch": 5.083231417312135, "grad_norm": 1.5031623840332031, "learning_rate": 0.0003648253838836799, "loss": 3.3798, "step": 74815 }, { "epoch": 5.083571137382797, "grad_norm": 1.345275640487671, "learning_rate": 0.00036478291887484716, "loss": 3.2765, "step": 74820 }, { "epoch": 5.083910857453458, "grad_norm": 1.3167189359664917, "learning_rate": 0.0003647404538660144, "loss": 3.5012, "step": 74825 }, { "epoch": 5.08425057752412, "grad_norm": 1.3860236406326294, "learning_rate": 0.0003646979888571817, "loss": 3.4216, "step": 74830 }, { "epoch": 5.084590297594782, "grad_norm": 1.2459272146224976, "learning_rate": 0.000364655523848349, "loss": 3.3394, "step": 74835 }, { "epoch": 5.084930017665443, "grad_norm": 1.410080909729004, "learning_rate": 0.0003646130588395162, "loss": 3.264, "step": 74840 }, { "epoch": 5.085269737736105, "grad_norm": 1.6384727954864502, "learning_rate": 0.0003645705938306835, "loss": 3.6431, "step": 74845 }, { "epoch": 5.085609457806767, "grad_norm": 1.1419286727905273, "learning_rate": 0.00036452812882185084, "loss": 3.4462, "step": 74850 }, { "epoch": 5.085949177877429, "grad_norm": 1.336077094078064, "learning_rate": 0.00036448566381301807, "loss": 3.0973, "step": 74855 }, { "epoch": 5.086288897948091, "grad_norm": 1.2957842350006104, "learning_rate": 0.00036444319880418535, "loss": 3.2842, "step": 74860 }, { "epoch": 5.086628618018753, "grad_norm": 1.2487293481826782, "learning_rate": 0.0003644007337953527, "loss": 3.302, "step": 74865 }, { "epoch": 5.086968338089414, "grad_norm": 1.2850874662399292, "learning_rate": 0.0003643582687865199, "loss": 3.1319, "step": 74870 }, { "epoch": 5.087308058160076, "grad_norm": 1.566379189491272, "learning_rate": 0.0003643158037776872, "loss": 3.2956, "step": 74875 }, { "epoch": 5.087647778230738, "grad_norm": 1.2099751234054565, "learning_rate": 0.00036427333876885447, "loss": 3.5619, "step": 74880 }, { "epoch": 5.087987498301399, "grad_norm": 1.3993216753005981, "learning_rate": 0.00036423087376002175, "loss": 3.2722, "step": 74885 }, { "epoch": 5.088327218372061, "grad_norm": 1.3509286642074585, "learning_rate": 0.00036418840875118903, "loss": 3.1685, "step": 74890 }, { "epoch": 5.088666938442723, "grad_norm": 1.2504760026931763, "learning_rate": 0.0003641459437423563, "loss": 3.0224, "step": 74895 }, { "epoch": 5.089006658513385, "grad_norm": 1.47469162940979, "learning_rate": 0.0003641034787335236, "loss": 3.4339, "step": 74900 }, { "epoch": 5.089346378584047, "grad_norm": 1.2862823009490967, "learning_rate": 0.00036406101372469087, "loss": 3.2319, "step": 74905 }, { "epoch": 5.089686098654709, "grad_norm": 1.4192728996276855, "learning_rate": 0.00036401854871585815, "loss": 3.0903, "step": 74910 }, { "epoch": 5.09002581872537, "grad_norm": 1.1420130729675293, "learning_rate": 0.0003639760837070254, "loss": 3.3354, "step": 74915 }, { "epoch": 5.090365538796032, "grad_norm": 1.1469323635101318, "learning_rate": 0.0003639336186981927, "loss": 3.3295, "step": 74920 }, { "epoch": 5.090705258866694, "grad_norm": 1.2825005054473877, "learning_rate": 0.00036389115368936, "loss": 3.1814, "step": 74925 }, { "epoch": 5.091044978937355, "grad_norm": 1.2429022789001465, "learning_rate": 0.0003638486886805272, "loss": 3.2025, "step": 74930 }, { "epoch": 5.091384699008017, "grad_norm": 1.09634530544281, "learning_rate": 0.00036380622367169455, "loss": 3.2934, "step": 74935 }, { "epoch": 5.0917244190786795, "grad_norm": 1.4691054821014404, "learning_rate": 0.00036376375866286183, "loss": 3.5206, "step": 74940 }, { "epoch": 5.092064139149341, "grad_norm": 1.3019311428070068, "learning_rate": 0.00036372129365402905, "loss": 3.164, "step": 74945 }, { "epoch": 5.092403859220003, "grad_norm": 1.2933632135391235, "learning_rate": 0.00036367882864519633, "loss": 3.0667, "step": 74950 }, { "epoch": 5.092743579290665, "grad_norm": 1.426411747932434, "learning_rate": 0.00036363636363636367, "loss": 3.4971, "step": 74955 }, { "epoch": 5.093083299361326, "grad_norm": 1.2080094814300537, "learning_rate": 0.0003635938986275309, "loss": 3.36, "step": 74960 }, { "epoch": 5.093423019431988, "grad_norm": 1.13712739944458, "learning_rate": 0.0003635514336186982, "loss": 3.4218, "step": 74965 }, { "epoch": 5.09376273950265, "grad_norm": 1.048737645149231, "learning_rate": 0.0003635089686098655, "loss": 3.4867, "step": 74970 }, { "epoch": 5.094102459573311, "grad_norm": 1.4410735368728638, "learning_rate": 0.0003634665036010328, "loss": 3.4042, "step": 74975 }, { "epoch": 5.094442179643973, "grad_norm": 1.5027052164077759, "learning_rate": 0.0003634240385922, "loss": 3.0934, "step": 74980 }, { "epoch": 5.0947818997146355, "grad_norm": 1.435143232345581, "learning_rate": 0.0003633815735833673, "loss": 3.4393, "step": 74985 }, { "epoch": 5.095121619785297, "grad_norm": 2.611736536026001, "learning_rate": 0.00036333910857453463, "loss": 3.4539, "step": 74990 }, { "epoch": 5.095461339855959, "grad_norm": 1.0771442651748657, "learning_rate": 0.00036329664356570185, "loss": 3.6429, "step": 74995 }, { "epoch": 5.095801059926621, "grad_norm": 1.293079137802124, "learning_rate": 0.00036325417855686914, "loss": 3.4623, "step": 75000 }, { "epoch": 5.096140779997282, "grad_norm": 1.2047775983810425, "learning_rate": 0.00036321171354803647, "loss": 3.1656, "step": 75005 }, { "epoch": 5.096480500067944, "grad_norm": 1.2377318143844604, "learning_rate": 0.0003631692485392037, "loss": 3.4797, "step": 75010 }, { "epoch": 5.096820220138606, "grad_norm": 1.1927684545516968, "learning_rate": 0.000363126783530371, "loss": 3.4886, "step": 75015 }, { "epoch": 5.097159940209267, "grad_norm": 1.8615412712097168, "learning_rate": 0.00036308431852153826, "loss": 3.3701, "step": 75020 }, { "epoch": 5.097499660279929, "grad_norm": 1.1272482872009277, "learning_rate": 0.00036304185351270554, "loss": 3.3768, "step": 75025 }, { "epoch": 5.0978393803505915, "grad_norm": 1.4273320436477661, "learning_rate": 0.0003629993885038728, "loss": 3.1624, "step": 75030 }, { "epoch": 5.098179100421253, "grad_norm": 1.4844063520431519, "learning_rate": 0.0003629569234950401, "loss": 3.1433, "step": 75035 }, { "epoch": 5.098518820491915, "grad_norm": 1.158460259437561, "learning_rate": 0.0003629144584862074, "loss": 3.2662, "step": 75040 }, { "epoch": 5.098858540562577, "grad_norm": 1.0080666542053223, "learning_rate": 0.00036287199347737466, "loss": 3.4554, "step": 75045 }, { "epoch": 5.099198260633238, "grad_norm": 1.584231972694397, "learning_rate": 0.00036282952846854194, "loss": 3.0309, "step": 75050 }, { "epoch": 5.0995379807039, "grad_norm": 1.3914207220077515, "learning_rate": 0.00036278706345970916, "loss": 3.1108, "step": 75055 }, { "epoch": 5.099877700774562, "grad_norm": 1.1592756509780884, "learning_rate": 0.0003627445984508765, "loss": 3.381, "step": 75060 }, { "epoch": 5.100217420845223, "grad_norm": 1.0673444271087646, "learning_rate": 0.0003627021334420438, "loss": 3.2143, "step": 75065 }, { "epoch": 5.100557140915885, "grad_norm": 1.3215738534927368, "learning_rate": 0.000362659668433211, "loss": 3.3189, "step": 75070 }, { "epoch": 5.1008968609865475, "grad_norm": 1.5787386894226074, "learning_rate": 0.00036261720342437834, "loss": 3.3893, "step": 75075 }, { "epoch": 5.101236581057209, "grad_norm": 1.367693305015564, "learning_rate": 0.0003625747384155456, "loss": 3.4247, "step": 75080 }, { "epoch": 5.101576301127871, "grad_norm": 1.56894052028656, "learning_rate": 0.00036253227340671284, "loss": 3.2225, "step": 75085 }, { "epoch": 5.101916021198533, "grad_norm": 1.417846441268921, "learning_rate": 0.0003624898083978802, "loss": 3.518, "step": 75090 }, { "epoch": 5.102255741269194, "grad_norm": 1.5795506238937378, "learning_rate": 0.00036244734338904746, "loss": 3.2769, "step": 75095 }, { "epoch": 5.102595461339856, "grad_norm": 1.9495452642440796, "learning_rate": 0.0003624048783802147, "loss": 3.3671, "step": 75100 }, { "epoch": 5.102935181410518, "grad_norm": 1.341369867324829, "learning_rate": 0.00036236241337138196, "loss": 3.3065, "step": 75105 }, { "epoch": 5.103274901481179, "grad_norm": 1.4203909635543823, "learning_rate": 0.0003623199483625493, "loss": 3.2277, "step": 75110 }, { "epoch": 5.103614621551841, "grad_norm": 1.2296242713928223, "learning_rate": 0.0003622774833537165, "loss": 3.3322, "step": 75115 }, { "epoch": 5.103954341622503, "grad_norm": 1.2378811836242676, "learning_rate": 0.0003622350183448838, "loss": 3.2943, "step": 75120 }, { "epoch": 5.104294061693165, "grad_norm": 1.2720839977264404, "learning_rate": 0.00036219255333605114, "loss": 3.2318, "step": 75125 }, { "epoch": 5.104633781763827, "grad_norm": 1.4516208171844482, "learning_rate": 0.00036215008832721836, "loss": 3.3059, "step": 75130 }, { "epoch": 5.104973501834488, "grad_norm": 1.6800814867019653, "learning_rate": 0.00036210762331838564, "loss": 3.2001, "step": 75135 }, { "epoch": 5.10531322190515, "grad_norm": 1.5693975687026978, "learning_rate": 0.0003620651583095529, "loss": 3.0573, "step": 75140 }, { "epoch": 5.105652941975812, "grad_norm": 1.929099202156067, "learning_rate": 0.00036202269330072026, "loss": 3.0666, "step": 75145 }, { "epoch": 5.105992662046473, "grad_norm": 1.3622095584869385, "learning_rate": 0.0003619802282918875, "loss": 3.3946, "step": 75150 }, { "epoch": 5.106332382117135, "grad_norm": 1.3258200883865356, "learning_rate": 0.00036193776328305476, "loss": 3.3097, "step": 75155 }, { "epoch": 5.106672102187797, "grad_norm": 1.7196356058120728, "learning_rate": 0.0003618952982742221, "loss": 3.1046, "step": 75160 }, { "epoch": 5.107011822258459, "grad_norm": 1.4441264867782593, "learning_rate": 0.0003618528332653893, "loss": 3.5832, "step": 75165 }, { "epoch": 5.107351542329121, "grad_norm": 1.2164005041122437, "learning_rate": 0.0003618103682565566, "loss": 2.9983, "step": 75170 }, { "epoch": 5.107691262399783, "grad_norm": 1.652768850326538, "learning_rate": 0.0003617679032477239, "loss": 3.3382, "step": 75175 }, { "epoch": 5.108030982470444, "grad_norm": 1.727452039718628, "learning_rate": 0.00036172543823889116, "loss": 3.1748, "step": 75180 }, { "epoch": 5.108370702541106, "grad_norm": 1.4825767278671265, "learning_rate": 0.00036168297323005844, "loss": 3.3, "step": 75185 }, { "epoch": 5.108710422611768, "grad_norm": 1.0827581882476807, "learning_rate": 0.0003616405082212257, "loss": 3.2025, "step": 75190 }, { "epoch": 5.109050142682429, "grad_norm": 1.5980546474456787, "learning_rate": 0.000361598043212393, "loss": 3.343, "step": 75195 }, { "epoch": 5.109389862753091, "grad_norm": 1.45962655544281, "learning_rate": 0.0003615555782035603, "loss": 3.2494, "step": 75200 }, { "epoch": 5.1097295828237534, "grad_norm": 1.1135891675949097, "learning_rate": 0.00036151311319472756, "loss": 3.3449, "step": 75205 }, { "epoch": 5.110069302894415, "grad_norm": 1.1848669052124023, "learning_rate": 0.0003614706481858948, "loss": 3.3649, "step": 75210 }, { "epoch": 5.110409022965077, "grad_norm": 1.3668500185012817, "learning_rate": 0.0003614281831770621, "loss": 3.2166, "step": 75215 }, { "epoch": 5.110748743035739, "grad_norm": 1.3018666505813599, "learning_rate": 0.0003613857181682294, "loss": 3.2788, "step": 75220 }, { "epoch": 5.1110884631064, "grad_norm": 1.1295078992843628, "learning_rate": 0.00036134325315939663, "loss": 3.4742, "step": 75225 }, { "epoch": 5.111428183177062, "grad_norm": 1.3231176137924194, "learning_rate": 0.00036130078815056396, "loss": 3.4591, "step": 75230 }, { "epoch": 5.111767903247724, "grad_norm": 1.2248033285140991, "learning_rate": 0.00036125832314173124, "loss": 3.3681, "step": 75235 }, { "epoch": 5.112107623318385, "grad_norm": 1.366594910621643, "learning_rate": 0.00036121585813289847, "loss": 3.2288, "step": 75240 }, { "epoch": 5.112447343389047, "grad_norm": 1.6016844511032104, "learning_rate": 0.00036117339312406575, "loss": 3.1534, "step": 75245 }, { "epoch": 5.1127870634597095, "grad_norm": 1.5618237257003784, "learning_rate": 0.0003611309281152331, "loss": 3.5499, "step": 75250 }, { "epoch": 5.113126783530371, "grad_norm": 1.2673310041427612, "learning_rate": 0.0003610884631064003, "loss": 2.994, "step": 75255 }, { "epoch": 5.113466503601033, "grad_norm": 1.2292569875717163, "learning_rate": 0.0003610459980975676, "loss": 3.2449, "step": 75260 }, { "epoch": 5.113806223671695, "grad_norm": 1.2898969650268555, "learning_rate": 0.0003610035330887349, "loss": 3.3835, "step": 75265 }, { "epoch": 5.114145943742356, "grad_norm": 1.2196030616760254, "learning_rate": 0.00036096106807990215, "loss": 3.4161, "step": 75270 }, { "epoch": 5.114485663813018, "grad_norm": 1.261610984802246, "learning_rate": 0.00036091860307106943, "loss": 3.2389, "step": 75275 }, { "epoch": 5.11482538388368, "grad_norm": 1.2317677736282349, "learning_rate": 0.0003608761380622367, "loss": 3.4334, "step": 75280 }, { "epoch": 5.115165103954341, "grad_norm": 1.4647340774536133, "learning_rate": 0.000360833673053404, "loss": 3.3958, "step": 75285 }, { "epoch": 5.115504824025003, "grad_norm": 1.4073371887207031, "learning_rate": 0.00036079120804457127, "loss": 3.251, "step": 75290 }, { "epoch": 5.1158445440956655, "grad_norm": 1.4479310512542725, "learning_rate": 0.00036074874303573855, "loss": 3.4055, "step": 75295 }, { "epoch": 5.116184264166327, "grad_norm": 1.294970989227295, "learning_rate": 0.00036070627802690583, "loss": 3.3633, "step": 75300 }, { "epoch": 5.116523984236989, "grad_norm": 1.5412031412124634, "learning_rate": 0.0003606638130180731, "loss": 3.0657, "step": 75305 }, { "epoch": 5.116863704307651, "grad_norm": 1.2625123262405396, "learning_rate": 0.0003606213480092404, "loss": 3.5324, "step": 75310 }, { "epoch": 5.117203424378312, "grad_norm": 2.437898635864258, "learning_rate": 0.00036057888300040767, "loss": 3.2701, "step": 75315 }, { "epoch": 5.117543144448974, "grad_norm": 1.067104458808899, "learning_rate": 0.00036053641799157495, "loss": 3.3837, "step": 75320 }, { "epoch": 5.117882864519636, "grad_norm": 1.2225251197814941, "learning_rate": 0.00036049395298274223, "loss": 3.3983, "step": 75325 }, { "epoch": 5.118222584590297, "grad_norm": 1.3036079406738281, "learning_rate": 0.0003604514879739095, "loss": 3.2965, "step": 75330 }, { "epoch": 5.118562304660959, "grad_norm": 1.385980486869812, "learning_rate": 0.0003604090229650768, "loss": 3.2434, "step": 75335 }, { "epoch": 5.1189020247316215, "grad_norm": 1.48622465133667, "learning_rate": 0.00036036655795624407, "loss": 3.3258, "step": 75340 }, { "epoch": 5.119241744802283, "grad_norm": 1.5425410270690918, "learning_rate": 0.00036032409294741135, "loss": 3.1969, "step": 75345 }, { "epoch": 5.119581464872945, "grad_norm": 1.0968502759933472, "learning_rate": 0.0003602816279385786, "loss": 3.5717, "step": 75350 }, { "epoch": 5.119921184943607, "grad_norm": 1.318588137626648, "learning_rate": 0.0003602391629297459, "loss": 3.2145, "step": 75355 }, { "epoch": 5.120260905014268, "grad_norm": 1.1381458044052124, "learning_rate": 0.0003601966979209132, "loss": 3.0892, "step": 75360 }, { "epoch": 5.12060062508493, "grad_norm": 1.1607396602630615, "learning_rate": 0.0003601542329120804, "loss": 3.395, "step": 75365 }, { "epoch": 5.120940345155592, "grad_norm": 1.8180241584777832, "learning_rate": 0.00036011176790324775, "loss": 3.5158, "step": 75370 }, { "epoch": 5.121280065226253, "grad_norm": 1.375066876411438, "learning_rate": 0.00036006930289441503, "loss": 3.3891, "step": 75375 }, { "epoch": 5.121619785296915, "grad_norm": 1.4209288358688354, "learning_rate": 0.00036002683788558226, "loss": 3.1944, "step": 75380 }, { "epoch": 5.1219595053675775, "grad_norm": 1.4538700580596924, "learning_rate": 0.0003599843728767496, "loss": 3.0432, "step": 75385 }, { "epoch": 5.122299225438239, "grad_norm": 1.1178635358810425, "learning_rate": 0.00035994190786791687, "loss": 3.1032, "step": 75390 }, { "epoch": 5.122638945508901, "grad_norm": 1.4537137746810913, "learning_rate": 0.0003598994428590841, "loss": 3.1425, "step": 75395 }, { "epoch": 5.122978665579563, "grad_norm": 1.2510950565338135, "learning_rate": 0.0003598569778502514, "loss": 3.3944, "step": 75400 }, { "epoch": 5.123318385650224, "grad_norm": 1.1889954805374146, "learning_rate": 0.0003598145128414187, "loss": 3.3136, "step": 75405 }, { "epoch": 5.123658105720886, "grad_norm": 1.4642410278320312, "learning_rate": 0.00035977204783258594, "loss": 3.4215, "step": 75410 }, { "epoch": 5.123997825791548, "grad_norm": 1.230671763420105, "learning_rate": 0.0003597295828237532, "loss": 3.1544, "step": 75415 }, { "epoch": 5.124337545862209, "grad_norm": 1.114412784576416, "learning_rate": 0.00035968711781492055, "loss": 3.3061, "step": 75420 }, { "epoch": 5.124677265932871, "grad_norm": 1.127288818359375, "learning_rate": 0.0003596446528060878, "loss": 3.1173, "step": 75425 }, { "epoch": 5.1250169860035335, "grad_norm": 1.3080298900604248, "learning_rate": 0.00035960218779725506, "loss": 3.404, "step": 75430 }, { "epoch": 5.125356706074195, "grad_norm": 1.4728939533233643, "learning_rate": 0.00035955972278842234, "loss": 3.2717, "step": 75435 }, { "epoch": 5.125696426144857, "grad_norm": 1.4183976650238037, "learning_rate": 0.0003595172577795896, "loss": 3.369, "step": 75440 }, { "epoch": 5.126036146215519, "grad_norm": 1.2427418231964111, "learning_rate": 0.0003594747927707569, "loss": 3.5814, "step": 75445 }, { "epoch": 5.12637586628618, "grad_norm": 1.4636266231536865, "learning_rate": 0.0003594323277619242, "loss": 3.3442, "step": 75450 }, { "epoch": 5.126715586356842, "grad_norm": 1.2478917837142944, "learning_rate": 0.00035938986275309146, "loss": 3.4757, "step": 75455 }, { "epoch": 5.127055306427504, "grad_norm": 1.499600887298584, "learning_rate": 0.00035934739774425874, "loss": 3.1648, "step": 75460 }, { "epoch": 5.127395026498165, "grad_norm": 1.474312663078308, "learning_rate": 0.000359304932735426, "loss": 3.2227, "step": 75465 }, { "epoch": 5.127734746568827, "grad_norm": 1.4454556703567505, "learning_rate": 0.00035926246772659325, "loss": 3.1778, "step": 75470 }, { "epoch": 5.1280744666394895, "grad_norm": 1.3018511533737183, "learning_rate": 0.0003592200027177606, "loss": 2.8183, "step": 75475 }, { "epoch": 5.128414186710151, "grad_norm": 1.804276466369629, "learning_rate": 0.00035917753770892786, "loss": 3.1802, "step": 75480 }, { "epoch": 5.128753906780813, "grad_norm": 1.7606208324432373, "learning_rate": 0.00035913507270009514, "loss": 3.1807, "step": 75485 }, { "epoch": 5.129093626851474, "grad_norm": 1.1149115562438965, "learning_rate": 0.0003590926076912624, "loss": 3.2963, "step": 75490 }, { "epoch": 5.129433346922136, "grad_norm": 1.384313941001892, "learning_rate": 0.0003590501426824297, "loss": 3.4387, "step": 75495 }, { "epoch": 5.129773066992798, "grad_norm": 1.0571566820144653, "learning_rate": 0.000359007677673597, "loss": 3.1953, "step": 75500 }, { "epoch": 5.130112787063459, "grad_norm": 1.2159416675567627, "learning_rate": 0.0003589652126647642, "loss": 3.2753, "step": 75505 }, { "epoch": 5.130452507134121, "grad_norm": 1.3558011054992676, "learning_rate": 0.00035892274765593154, "loss": 3.3082, "step": 75510 }, { "epoch": 5.1307922272047835, "grad_norm": 1.4385713338851929, "learning_rate": 0.0003588802826470988, "loss": 3.5328, "step": 75515 }, { "epoch": 5.131131947275445, "grad_norm": 1.4674513339996338, "learning_rate": 0.00035883781763826605, "loss": 3.285, "step": 75520 }, { "epoch": 5.131471667346107, "grad_norm": 1.1079834699630737, "learning_rate": 0.0003587953526294334, "loss": 3.1944, "step": 75525 }, { "epoch": 5.131811387416769, "grad_norm": 1.015790581703186, "learning_rate": 0.00035875288762060066, "loss": 3.1514, "step": 75530 }, { "epoch": 5.13215110748743, "grad_norm": 1.2304682731628418, "learning_rate": 0.0003587104226117679, "loss": 3.2394, "step": 75535 }, { "epoch": 5.132490827558092, "grad_norm": 1.2593815326690674, "learning_rate": 0.00035866795760293517, "loss": 3.381, "step": 75540 }, { "epoch": 5.132830547628754, "grad_norm": 0.9521081447601318, "learning_rate": 0.0003586254925941025, "loss": 3.7337, "step": 75545 }, { "epoch": 5.133170267699415, "grad_norm": 1.2839418649673462, "learning_rate": 0.0003585830275852697, "loss": 3.0928, "step": 75550 }, { "epoch": 5.133509987770077, "grad_norm": 1.1301807165145874, "learning_rate": 0.000358540562576437, "loss": 3.3544, "step": 75555 }, { "epoch": 5.1338497078407395, "grad_norm": 1.5368881225585938, "learning_rate": 0.00035849809756760434, "loss": 3.4195, "step": 75560 }, { "epoch": 5.134189427911401, "grad_norm": 1.1687612533569336, "learning_rate": 0.00035845563255877157, "loss": 3.3829, "step": 75565 }, { "epoch": 5.134529147982063, "grad_norm": 1.3497593402862549, "learning_rate": 0.00035841316754993885, "loss": 3.2354, "step": 75570 }, { "epoch": 5.134868868052725, "grad_norm": 1.1653386354446411, "learning_rate": 0.0003583707025411061, "loss": 3.2527, "step": 75575 }, { "epoch": 5.135208588123386, "grad_norm": 1.3614872694015503, "learning_rate": 0.0003583282375322734, "loss": 3.3086, "step": 75580 }, { "epoch": 5.135548308194048, "grad_norm": 1.1097534894943237, "learning_rate": 0.0003582857725234407, "loss": 3.5305, "step": 75585 }, { "epoch": 5.13588802826471, "grad_norm": 1.4770110845565796, "learning_rate": 0.00035824330751460797, "loss": 3.0766, "step": 75590 }, { "epoch": 5.136227748335371, "grad_norm": 1.1665362119674683, "learning_rate": 0.00035820084250577525, "loss": 3.3549, "step": 75595 }, { "epoch": 5.136567468406033, "grad_norm": 1.342851161956787, "learning_rate": 0.00035815837749694253, "loss": 3.2972, "step": 75600 }, { "epoch": 5.1369071884766955, "grad_norm": 1.7862439155578613, "learning_rate": 0.0003581159124881098, "loss": 3.4912, "step": 75605 }, { "epoch": 5.137246908547357, "grad_norm": 1.2093114852905273, "learning_rate": 0.00035807344747927703, "loss": 3.4611, "step": 75610 }, { "epoch": 5.137586628618019, "grad_norm": 1.266635537147522, "learning_rate": 0.00035803098247044437, "loss": 3.41, "step": 75615 }, { "epoch": 5.137926348688681, "grad_norm": 1.3837028741836548, "learning_rate": 0.00035798851746161165, "loss": 3.7892, "step": 75620 }, { "epoch": 5.138266068759342, "grad_norm": 1.3659188747406006, "learning_rate": 0.0003579460524527789, "loss": 3.5718, "step": 75625 }, { "epoch": 5.138605788830004, "grad_norm": 1.6894183158874512, "learning_rate": 0.0003579035874439462, "loss": 3.3108, "step": 75630 }, { "epoch": 5.138945508900666, "grad_norm": 1.2196805477142334, "learning_rate": 0.0003578611224351135, "loss": 3.4083, "step": 75635 }, { "epoch": 5.139285228971327, "grad_norm": 1.2688153982162476, "learning_rate": 0.0003578186574262807, "loss": 3.1004, "step": 75640 }, { "epoch": 5.139624949041989, "grad_norm": 1.0949288606643677, "learning_rate": 0.000357776192417448, "loss": 3.2262, "step": 75645 }, { "epoch": 5.1399646691126515, "grad_norm": 1.4679473638534546, "learning_rate": 0.00035773372740861533, "loss": 3.4043, "step": 75650 }, { "epoch": 5.140304389183313, "grad_norm": 1.2689809799194336, "learning_rate": 0.0003576912623997826, "loss": 3.422, "step": 75655 }, { "epoch": 5.140644109253975, "grad_norm": 1.2004297971725464, "learning_rate": 0.00035764879739094983, "loss": 3.235, "step": 75660 }, { "epoch": 5.140983829324637, "grad_norm": 1.229810357093811, "learning_rate": 0.00035760633238211717, "loss": 3.3819, "step": 75665 }, { "epoch": 5.141323549395298, "grad_norm": 1.2928354740142822, "learning_rate": 0.00035756386737328445, "loss": 3.3628, "step": 75670 }, { "epoch": 5.14166326946596, "grad_norm": 1.2755063772201538, "learning_rate": 0.0003575214023644517, "loss": 3.6342, "step": 75675 }, { "epoch": 5.142002989536622, "grad_norm": 1.092498540878296, "learning_rate": 0.000357478937355619, "loss": 3.2576, "step": 75680 }, { "epoch": 5.142342709607283, "grad_norm": 1.1387569904327393, "learning_rate": 0.0003574364723467863, "loss": 3.2742, "step": 75685 }, { "epoch": 5.142682429677945, "grad_norm": 1.1757586002349854, "learning_rate": 0.0003573940073379535, "loss": 3.3178, "step": 75690 }, { "epoch": 5.1430221497486075, "grad_norm": 1.5124423503875732, "learning_rate": 0.0003573515423291208, "loss": 3.289, "step": 75695 }, { "epoch": 5.143361869819269, "grad_norm": 1.220572829246521, "learning_rate": 0.00035730907732028813, "loss": 3.4319, "step": 75700 }, { "epoch": 5.143701589889931, "grad_norm": 1.1708018779754639, "learning_rate": 0.00035726661231145535, "loss": 3.5787, "step": 75705 }, { "epoch": 5.144041309960593, "grad_norm": 1.482296109199524, "learning_rate": 0.00035722414730262263, "loss": 3.3715, "step": 75710 }, { "epoch": 5.144381030031254, "grad_norm": 1.3076090812683105, "learning_rate": 0.00035718168229378997, "loss": 3.3409, "step": 75715 }, { "epoch": 5.144720750101916, "grad_norm": 1.3480123281478882, "learning_rate": 0.0003571392172849572, "loss": 3.4754, "step": 75720 }, { "epoch": 5.145060470172578, "grad_norm": 1.5358591079711914, "learning_rate": 0.0003570967522761245, "loss": 3.5124, "step": 75725 }, { "epoch": 5.145400190243239, "grad_norm": 1.6669872999191284, "learning_rate": 0.00035705428726729176, "loss": 3.3726, "step": 75730 }, { "epoch": 5.145739910313901, "grad_norm": 1.59843909740448, "learning_rate": 0.00035701182225845904, "loss": 3.2208, "step": 75735 }, { "epoch": 5.1460796303845635, "grad_norm": 1.1213632822036743, "learning_rate": 0.0003569693572496263, "loss": 3.1981, "step": 75740 }, { "epoch": 5.146419350455225, "grad_norm": 1.3884869813919067, "learning_rate": 0.0003569268922407936, "loss": 3.232, "step": 75745 }, { "epoch": 5.146759070525887, "grad_norm": 1.3892223834991455, "learning_rate": 0.0003568844272319609, "loss": 3.5, "step": 75750 }, { "epoch": 5.147098790596549, "grad_norm": 1.3259679079055786, "learning_rate": 0.00035684196222312816, "loss": 3.1827, "step": 75755 }, { "epoch": 5.14743851066721, "grad_norm": 1.5927945375442505, "learning_rate": 0.00035679949721429544, "loss": 3.2341, "step": 75760 }, { "epoch": 5.147778230737872, "grad_norm": 1.501751184463501, "learning_rate": 0.00035675703220546266, "loss": 3.6814, "step": 75765 }, { "epoch": 5.148117950808534, "grad_norm": 1.3072998523712158, "learning_rate": 0.00035671456719663, "loss": 3.3627, "step": 75770 }, { "epoch": 5.148457670879195, "grad_norm": 1.4837275743484497, "learning_rate": 0.0003566721021877973, "loss": 3.3326, "step": 75775 }, { "epoch": 5.1487973909498574, "grad_norm": 1.2749912738800049, "learning_rate": 0.0003566296371789645, "loss": 3.182, "step": 75780 }, { "epoch": 5.1491371110205195, "grad_norm": 2.017638683319092, "learning_rate": 0.00035658717217013184, "loss": 3.2102, "step": 75785 }, { "epoch": 5.149476831091181, "grad_norm": 1.0878232717514038, "learning_rate": 0.0003565447071612991, "loss": 3.3357, "step": 75790 }, { "epoch": 5.149816551161843, "grad_norm": 1.198079228401184, "learning_rate": 0.00035650224215246634, "loss": 3.2082, "step": 75795 }, { "epoch": 5.150156271232504, "grad_norm": 1.0691592693328857, "learning_rate": 0.0003564597771436336, "loss": 3.3574, "step": 75800 }, { "epoch": 5.150495991303166, "grad_norm": 1.3574895858764648, "learning_rate": 0.00035641731213480096, "loss": 3.0837, "step": 75805 }, { "epoch": 5.150835711373828, "grad_norm": 1.215646505355835, "learning_rate": 0.0003563748471259682, "loss": 3.3964, "step": 75810 }, { "epoch": 5.151175431444489, "grad_norm": 1.3001521825790405, "learning_rate": 0.00035633238211713546, "loss": 3.1292, "step": 75815 }, { "epoch": 5.151515151515151, "grad_norm": 1.4920742511749268, "learning_rate": 0.0003562899171083028, "loss": 3.3906, "step": 75820 }, { "epoch": 5.1518548715858135, "grad_norm": 1.4229345321655273, "learning_rate": 0.0003562474520994701, "loss": 3.2852, "step": 75825 }, { "epoch": 5.152194591656475, "grad_norm": 1.1842374801635742, "learning_rate": 0.0003562049870906373, "loss": 3.5285, "step": 75830 }, { "epoch": 5.152534311727137, "grad_norm": 1.2701716423034668, "learning_rate": 0.0003561625220818046, "loss": 3.5359, "step": 75835 }, { "epoch": 5.152874031797799, "grad_norm": 1.3017139434814453, "learning_rate": 0.0003561200570729719, "loss": 3.3007, "step": 75840 }, { "epoch": 5.15321375186846, "grad_norm": 1.1264091730117798, "learning_rate": 0.00035607759206413914, "loss": 3.1427, "step": 75845 }, { "epoch": 5.153553471939122, "grad_norm": 1.2699100971221924, "learning_rate": 0.0003560351270553064, "loss": 3.3102, "step": 75850 }, { "epoch": 5.153893192009784, "grad_norm": 1.444420576095581, "learning_rate": 0.00035599266204647376, "loss": 3.3163, "step": 75855 }, { "epoch": 5.154232912080445, "grad_norm": 1.2661811113357544, "learning_rate": 0.000355950197037641, "loss": 3.4505, "step": 75860 }, { "epoch": 5.154572632151107, "grad_norm": 1.560095191001892, "learning_rate": 0.00035590773202880826, "loss": 3.3533, "step": 75865 }, { "epoch": 5.1549123522217695, "grad_norm": 1.5627753734588623, "learning_rate": 0.00035586526701997554, "loss": 3.1706, "step": 75870 }, { "epoch": 5.155252072292431, "grad_norm": 1.463585615158081, "learning_rate": 0.0003558228020111428, "loss": 3.3392, "step": 75875 }, { "epoch": 5.155591792363093, "grad_norm": 1.4108678102493286, "learning_rate": 0.0003557803370023101, "loss": 3.233, "step": 75880 }, { "epoch": 5.155931512433755, "grad_norm": 2.846595048904419, "learning_rate": 0.0003557378719934774, "loss": 3.3642, "step": 75885 }, { "epoch": 5.156271232504416, "grad_norm": 1.3437459468841553, "learning_rate": 0.00035569540698464466, "loss": 3.3425, "step": 75890 }, { "epoch": 5.156610952575078, "grad_norm": 1.5299153327941895, "learning_rate": 0.00035565294197581194, "loss": 3.14, "step": 75895 }, { "epoch": 5.15695067264574, "grad_norm": 1.2280523777008057, "learning_rate": 0.0003556104769669792, "loss": 3.2403, "step": 75900 }, { "epoch": 5.157290392716401, "grad_norm": 1.7269768714904785, "learning_rate": 0.00035556801195814645, "loss": 3.2292, "step": 75905 }, { "epoch": 5.157630112787063, "grad_norm": 1.2061421871185303, "learning_rate": 0.0003555255469493138, "loss": 3.3172, "step": 75910 }, { "epoch": 5.1579698328577255, "grad_norm": 1.3574674129486084, "learning_rate": 0.00035548308194048106, "loss": 2.7914, "step": 75915 }, { "epoch": 5.158309552928387, "grad_norm": 1.3224884271621704, "learning_rate": 0.0003554406169316483, "loss": 3.3566, "step": 75920 }, { "epoch": 5.158649272999049, "grad_norm": 1.1596052646636963, "learning_rate": 0.0003553981519228156, "loss": 3.4022, "step": 75925 }, { "epoch": 5.158988993069711, "grad_norm": 1.188884973526001, "learning_rate": 0.0003553556869139829, "loss": 3.1545, "step": 75930 }, { "epoch": 5.159328713140372, "grad_norm": 2.2122130393981934, "learning_rate": 0.00035531322190515013, "loss": 3.2378, "step": 75935 }, { "epoch": 5.159668433211034, "grad_norm": 1.4550584554672241, "learning_rate": 0.00035527075689631746, "loss": 3.3605, "step": 75940 }, { "epoch": 5.160008153281696, "grad_norm": 1.2327661514282227, "learning_rate": 0.00035522829188748474, "loss": 3.2884, "step": 75945 }, { "epoch": 5.160347873352357, "grad_norm": 0.9761620759963989, "learning_rate": 0.00035518582687865197, "loss": 3.3086, "step": 75950 }, { "epoch": 5.160687593423019, "grad_norm": 1.3654358386993408, "learning_rate": 0.00035514336186981925, "loss": 3.2124, "step": 75955 }, { "epoch": 5.1610273134936815, "grad_norm": 1.4991326332092285, "learning_rate": 0.0003551008968609866, "loss": 3.356, "step": 75960 }, { "epoch": 5.161367033564343, "grad_norm": 1.915915846824646, "learning_rate": 0.0003550584318521538, "loss": 3.466, "step": 75965 }, { "epoch": 5.161706753635005, "grad_norm": 1.2777018547058105, "learning_rate": 0.0003550159668433211, "loss": 3.2526, "step": 75970 }, { "epoch": 5.162046473705667, "grad_norm": 1.2299447059631348, "learning_rate": 0.0003549735018344884, "loss": 3.1415, "step": 75975 }, { "epoch": 5.162386193776328, "grad_norm": 1.2725944519042969, "learning_rate": 0.00035493103682565565, "loss": 3.5834, "step": 75980 }, { "epoch": 5.16272591384699, "grad_norm": 1.1544936895370483, "learning_rate": 0.00035488857181682293, "loss": 3.1716, "step": 75985 }, { "epoch": 5.163065633917652, "grad_norm": 1.2233810424804688, "learning_rate": 0.0003548461068079902, "loss": 3.3042, "step": 75990 }, { "epoch": 5.163405353988313, "grad_norm": 1.2934544086456299, "learning_rate": 0.00035480364179915755, "loss": 3.4116, "step": 75995 }, { "epoch": 5.163745074058975, "grad_norm": 0.9638126492500305, "learning_rate": 0.00035476117679032477, "loss": 3.4874, "step": 76000 }, { "epoch": 5.1640847941296375, "grad_norm": 1.063568115234375, "learning_rate": 0.00035471871178149205, "loss": 3.4557, "step": 76005 }, { "epoch": 5.164424514200299, "grad_norm": 1.646284818649292, "learning_rate": 0.0003546762467726594, "loss": 3.2637, "step": 76010 }, { "epoch": 5.164764234270961, "grad_norm": 1.1841330528259277, "learning_rate": 0.0003546337817638266, "loss": 3.3908, "step": 76015 }, { "epoch": 5.165103954341623, "grad_norm": 1.4316449165344238, "learning_rate": 0.0003545913167549939, "loss": 3.5002, "step": 76020 }, { "epoch": 5.165443674412284, "grad_norm": 1.6268898248672485, "learning_rate": 0.00035454885174616117, "loss": 3.1485, "step": 76025 }, { "epoch": 5.165783394482946, "grad_norm": 2.1834490299224854, "learning_rate": 0.00035450638673732845, "loss": 3.162, "step": 76030 }, { "epoch": 5.166123114553608, "grad_norm": 1.6592758893966675, "learning_rate": 0.00035446392172849573, "loss": 3.4187, "step": 76035 }, { "epoch": 5.166462834624269, "grad_norm": 1.6247525215148926, "learning_rate": 0.000354421456719663, "loss": 3.2508, "step": 76040 }, { "epoch": 5.166802554694931, "grad_norm": 1.2547736167907715, "learning_rate": 0.0003543789917108303, "loss": 3.2084, "step": 76045 }, { "epoch": 5.1671422747655935, "grad_norm": 1.3206192255020142, "learning_rate": 0.00035433652670199757, "loss": 3.4108, "step": 76050 }, { "epoch": 5.167481994836255, "grad_norm": 1.6358124017715454, "learning_rate": 0.00035429406169316485, "loss": 3.1993, "step": 76055 }, { "epoch": 5.167821714906917, "grad_norm": 1.3624823093414307, "learning_rate": 0.0003542515966843321, "loss": 3.0789, "step": 76060 }, { "epoch": 5.168161434977579, "grad_norm": 1.1190781593322754, "learning_rate": 0.0003542091316754994, "loss": 3.4122, "step": 76065 }, { "epoch": 5.16850115504824, "grad_norm": 1.21442711353302, "learning_rate": 0.0003541666666666667, "loss": 3.6273, "step": 76070 }, { "epoch": 5.168840875118902, "grad_norm": 1.70646071434021, "learning_rate": 0.0003541242016578339, "loss": 3.2771, "step": 76075 }, { "epoch": 5.169180595189564, "grad_norm": 1.51065993309021, "learning_rate": 0.00035408173664900125, "loss": 3.3664, "step": 76080 }, { "epoch": 5.169520315260225, "grad_norm": 1.2757892608642578, "learning_rate": 0.00035403927164016853, "loss": 3.5042, "step": 76085 }, { "epoch": 5.1698600353308874, "grad_norm": 1.358917474746704, "learning_rate": 0.00035399680663133576, "loss": 3.4777, "step": 76090 }, { "epoch": 5.1701997554015495, "grad_norm": 1.3387236595153809, "learning_rate": 0.00035395434162250304, "loss": 3.3677, "step": 76095 }, { "epoch": 5.170539475472211, "grad_norm": 1.4425313472747803, "learning_rate": 0.00035391187661367037, "loss": 3.3712, "step": 76100 }, { "epoch": 5.170879195542873, "grad_norm": 1.2624294757843018, "learning_rate": 0.0003538694116048376, "loss": 3.2729, "step": 76105 }, { "epoch": 5.171218915613535, "grad_norm": 1.41737699508667, "learning_rate": 0.0003538269465960049, "loss": 3.2108, "step": 76110 }, { "epoch": 5.171558635684196, "grad_norm": 1.2944227457046509, "learning_rate": 0.0003537844815871722, "loss": 3.4927, "step": 76115 }, { "epoch": 5.171898355754858, "grad_norm": 1.1621049642562866, "learning_rate": 0.00035374201657833944, "loss": 3.4474, "step": 76120 }, { "epoch": 5.17223807582552, "grad_norm": 1.3123160600662231, "learning_rate": 0.0003536995515695067, "loss": 3.0662, "step": 76125 }, { "epoch": 5.172577795896181, "grad_norm": 1.2242239713668823, "learning_rate": 0.000353657086560674, "loss": 3.3241, "step": 76130 }, { "epoch": 5.1729175159668435, "grad_norm": 1.550430178642273, "learning_rate": 0.0003536146215518413, "loss": 3.4006, "step": 76135 }, { "epoch": 5.1732572360375055, "grad_norm": 1.3190231323242188, "learning_rate": 0.00035357215654300856, "loss": 3.2141, "step": 76140 }, { "epoch": 5.173596956108167, "grad_norm": 1.4485621452331543, "learning_rate": 0.00035352969153417584, "loss": 3.1189, "step": 76145 }, { "epoch": 5.173936676178829, "grad_norm": 1.1768690347671509, "learning_rate": 0.0003534872265253431, "loss": 3.273, "step": 76150 }, { "epoch": 5.174276396249491, "grad_norm": 1.3907742500305176, "learning_rate": 0.0003534447615165104, "loss": 3.2126, "step": 76155 }, { "epoch": 5.174616116320152, "grad_norm": 1.3195213079452515, "learning_rate": 0.0003534022965076777, "loss": 3.4724, "step": 76160 }, { "epoch": 5.174955836390814, "grad_norm": 1.4432742595672607, "learning_rate": 0.00035335983149884496, "loss": 3.3898, "step": 76165 }, { "epoch": 5.175295556461475, "grad_norm": 1.3234198093414307, "learning_rate": 0.00035331736649001224, "loss": 3.1419, "step": 76170 }, { "epoch": 5.175635276532137, "grad_norm": 1.4167823791503906, "learning_rate": 0.0003532749014811795, "loss": 3.4574, "step": 76175 }, { "epoch": 5.1759749966027995, "grad_norm": 1.2590572834014893, "learning_rate": 0.0003532324364723468, "loss": 3.3848, "step": 76180 }, { "epoch": 5.176314716673461, "grad_norm": 1.0445940494537354, "learning_rate": 0.0003531899714635141, "loss": 3.5109, "step": 76185 }, { "epoch": 5.176654436744123, "grad_norm": 1.4935216903686523, "learning_rate": 0.00035314750645468136, "loss": 3.1117, "step": 76190 }, { "epoch": 5.176994156814785, "grad_norm": 1.224655270576477, "learning_rate": 0.00035310504144584864, "loss": 3.3844, "step": 76195 }, { "epoch": 5.177333876885446, "grad_norm": 1.4446768760681152, "learning_rate": 0.00035307106943878246, "loss": 3.6041, "step": 76200 }, { "epoch": 5.177673596956108, "grad_norm": 1.7843965291976929, "learning_rate": 0.00035302860442994974, "loss": 3.2287, "step": 76205 }, { "epoch": 5.17801331702677, "grad_norm": 1.3948582410812378, "learning_rate": 0.000352986139421117, "loss": 3.4521, "step": 76210 }, { "epoch": 5.178353037097431, "grad_norm": 1.4030587673187256, "learning_rate": 0.0003529436744122843, "loss": 3.3896, "step": 76215 }, { "epoch": 5.178692757168093, "grad_norm": 1.2197388410568237, "learning_rate": 0.00035290120940345153, "loss": 3.7006, "step": 76220 }, { "epoch": 5.1790324772387555, "grad_norm": 1.0312191247940063, "learning_rate": 0.00035285874439461886, "loss": 3.5877, "step": 76225 }, { "epoch": 5.179372197309417, "grad_norm": 1.4290142059326172, "learning_rate": 0.00035281627938578614, "loss": 3.1573, "step": 76230 }, { "epoch": 5.179711917380079, "grad_norm": 1.5205161571502686, "learning_rate": 0.00035277381437695337, "loss": 3.0803, "step": 76235 }, { "epoch": 5.180051637450741, "grad_norm": 1.3737705945968628, "learning_rate": 0.0003527313493681207, "loss": 3.4304, "step": 76240 }, { "epoch": 5.180391357521402, "grad_norm": 1.133754849433899, "learning_rate": 0.000352688884359288, "loss": 3.3053, "step": 76245 }, { "epoch": 5.180731077592064, "grad_norm": 1.2567678689956665, "learning_rate": 0.0003526464193504552, "loss": 3.4102, "step": 76250 }, { "epoch": 5.181070797662726, "grad_norm": 1.3800485134124756, "learning_rate": 0.0003526039543416225, "loss": 3.0988, "step": 76255 }, { "epoch": 5.181410517733387, "grad_norm": 1.484040379524231, "learning_rate": 0.0003525614893327898, "loss": 3.3373, "step": 76260 }, { "epoch": 5.181750237804049, "grad_norm": 1.426395297050476, "learning_rate": 0.00035251902432395705, "loss": 3.3456, "step": 76265 }, { "epoch": 5.1820899578747115, "grad_norm": 1.5226850509643555, "learning_rate": 0.00035247655931512433, "loss": 3.263, "step": 76270 }, { "epoch": 5.182429677945373, "grad_norm": 1.317283034324646, "learning_rate": 0.00035243409430629167, "loss": 3.402, "step": 76275 }, { "epoch": 5.182769398016035, "grad_norm": 1.3070857524871826, "learning_rate": 0.0003523916292974589, "loss": 3.2887, "step": 76280 }, { "epoch": 5.183109118086697, "grad_norm": 1.6561120748519897, "learning_rate": 0.00035234916428862617, "loss": 3.2432, "step": 76285 }, { "epoch": 5.183448838157358, "grad_norm": 1.0624058246612549, "learning_rate": 0.00035230669927979345, "loss": 3.5203, "step": 76290 }, { "epoch": 5.18378855822802, "grad_norm": 1.3106821775436401, "learning_rate": 0.00035226423427096073, "loss": 2.994, "step": 76295 }, { "epoch": 5.184128278298682, "grad_norm": 1.5929545164108276, "learning_rate": 0.000352221769262128, "loss": 3.3138, "step": 76300 }, { "epoch": 5.184467998369343, "grad_norm": 1.2366384267807007, "learning_rate": 0.0003521793042532953, "loss": 3.5779, "step": 76305 }, { "epoch": 5.184807718440005, "grad_norm": 1.5444364547729492, "learning_rate": 0.00035213683924446257, "loss": 3.3566, "step": 76310 }, { "epoch": 5.1851474385106675, "grad_norm": 1.2371928691864014, "learning_rate": 0.00035209437423562985, "loss": 3.3064, "step": 76315 }, { "epoch": 5.185487158581329, "grad_norm": 1.2644813060760498, "learning_rate": 0.00035205190922679713, "loss": 3.3058, "step": 76320 }, { "epoch": 5.185826878651991, "grad_norm": 1.5240782499313354, "learning_rate": 0.00035200944421796436, "loss": 3.2884, "step": 76325 }, { "epoch": 5.186166598722653, "grad_norm": 1.6287847757339478, "learning_rate": 0.0003519669792091317, "loss": 3.3199, "step": 76330 }, { "epoch": 5.186506318793314, "grad_norm": 1.2600878477096558, "learning_rate": 0.00035192451420029897, "loss": 3.3142, "step": 76335 }, { "epoch": 5.186846038863976, "grad_norm": 1.4199740886688232, "learning_rate": 0.0003518820491914662, "loss": 3.2135, "step": 76340 }, { "epoch": 5.187185758934638, "grad_norm": 1.2077022790908813, "learning_rate": 0.00035183958418263353, "loss": 3.3289, "step": 76345 }, { "epoch": 5.187525479005299, "grad_norm": 1.3191505670547485, "learning_rate": 0.0003517971191738008, "loss": 3.2566, "step": 76350 }, { "epoch": 5.187865199075961, "grad_norm": 1.3041504621505737, "learning_rate": 0.00035175465416496804, "loss": 3.4335, "step": 76355 }, { "epoch": 5.1882049191466235, "grad_norm": 1.2531706094741821, "learning_rate": 0.0003517121891561353, "loss": 3.2863, "step": 76360 }, { "epoch": 5.188544639217285, "grad_norm": 1.2647862434387207, "learning_rate": 0.00035166972414730265, "loss": 3.2328, "step": 76365 }, { "epoch": 5.188884359287947, "grad_norm": 1.0904730558395386, "learning_rate": 0.00035162725913846993, "loss": 3.5616, "step": 76370 }, { "epoch": 5.189224079358609, "grad_norm": 1.2567461729049683, "learning_rate": 0.00035158479412963716, "loss": 3.2101, "step": 76375 }, { "epoch": 5.18956379942927, "grad_norm": 1.2498446702957153, "learning_rate": 0.0003515423291208045, "loss": 3.106, "step": 76380 }, { "epoch": 5.189903519499932, "grad_norm": 1.34320867061615, "learning_rate": 0.00035149986411197177, "loss": 3.4646, "step": 76385 }, { "epoch": 5.190243239570594, "grad_norm": 1.3266406059265137, "learning_rate": 0.000351457399103139, "loss": 3.409, "step": 76390 }, { "epoch": 5.190582959641255, "grad_norm": 1.5488297939300537, "learning_rate": 0.0003514149340943063, "loss": 3.5994, "step": 76395 }, { "epoch": 5.1909226797119175, "grad_norm": 1.2677327394485474, "learning_rate": 0.0003513724690854736, "loss": 3.2415, "step": 76400 }, { "epoch": 5.1912623997825795, "grad_norm": 1.1441259384155273, "learning_rate": 0.00035133000407664084, "loss": 3.2717, "step": 76405 }, { "epoch": 5.191602119853241, "grad_norm": 1.397462010383606, "learning_rate": 0.0003512875390678081, "loss": 3.5027, "step": 76410 }, { "epoch": 5.191941839923903, "grad_norm": 1.0014909505844116, "learning_rate": 0.00035124507405897545, "loss": 3.052, "step": 76415 }, { "epoch": 5.192281559994565, "grad_norm": 1.1656172275543213, "learning_rate": 0.0003512026090501427, "loss": 2.9834, "step": 76420 }, { "epoch": 5.192621280065226, "grad_norm": 1.073598027229309, "learning_rate": 0.00035116014404130996, "loss": 3.5707, "step": 76425 }, { "epoch": 5.192961000135888, "grad_norm": 1.391277551651001, "learning_rate": 0.0003511176790324773, "loss": 3.229, "step": 76430 }, { "epoch": 5.19330072020655, "grad_norm": 1.0551583766937256, "learning_rate": 0.0003510752140236445, "loss": 3.3094, "step": 76435 }, { "epoch": 5.193640440277211, "grad_norm": 1.350744366645813, "learning_rate": 0.0003510327490148118, "loss": 3.3073, "step": 76440 }, { "epoch": 5.1939801603478735, "grad_norm": 1.3571510314941406, "learning_rate": 0.0003509902840059791, "loss": 3.6219, "step": 76445 }, { "epoch": 5.1943198804185355, "grad_norm": 1.029222846031189, "learning_rate": 0.00035094781899714636, "loss": 3.4763, "step": 76450 }, { "epoch": 5.194659600489197, "grad_norm": 1.4977748394012451, "learning_rate": 0.00035090535398831364, "loss": 3.3588, "step": 76455 }, { "epoch": 5.194999320559859, "grad_norm": 1.0521904230117798, "learning_rate": 0.0003508628889794809, "loss": 3.4015, "step": 76460 }, { "epoch": 5.195339040630521, "grad_norm": 1.3281675577163696, "learning_rate": 0.0003508204239706482, "loss": 3.3568, "step": 76465 }, { "epoch": 5.195678760701182, "grad_norm": 1.288015604019165, "learning_rate": 0.0003507779589618155, "loss": 3.2719, "step": 76470 }, { "epoch": 5.196018480771844, "grad_norm": 1.377938985824585, "learning_rate": 0.00035073549395298276, "loss": 3.3293, "step": 76475 }, { "epoch": 5.196358200842505, "grad_norm": 1.034874439239502, "learning_rate": 0.00035069302894415, "loss": 3.4436, "step": 76480 }, { "epoch": 5.196697920913167, "grad_norm": 1.4954016208648682, "learning_rate": 0.0003506505639353173, "loss": 3.3863, "step": 76485 }, { "epoch": 5.1970376409838295, "grad_norm": 1.292203426361084, "learning_rate": 0.0003506080989264846, "loss": 3.4324, "step": 76490 }, { "epoch": 5.197377361054491, "grad_norm": 1.52464759349823, "learning_rate": 0.0003505656339176518, "loss": 3.2308, "step": 76495 }, { "epoch": 5.197717081125153, "grad_norm": 1.5834121704101562, "learning_rate": 0.00035052316890881916, "loss": 3.2884, "step": 76500 }, { "epoch": 5.198056801195815, "grad_norm": 1.5730093717575073, "learning_rate": 0.00035048070389998644, "loss": 3.2782, "step": 76505 }, { "epoch": 5.198396521266476, "grad_norm": 1.492300271987915, "learning_rate": 0.00035043823889115367, "loss": 3.3466, "step": 76510 }, { "epoch": 5.198736241337138, "grad_norm": 1.1744312047958374, "learning_rate": 0.00035039577388232095, "loss": 3.5675, "step": 76515 }, { "epoch": 5.1990759614078, "grad_norm": 0.9177154898643494, "learning_rate": 0.0003503533088734883, "loss": 3.1489, "step": 76520 }, { "epoch": 5.199415681478461, "grad_norm": 1.6327288150787354, "learning_rate": 0.0003503108438646555, "loss": 3.3262, "step": 76525 }, { "epoch": 5.199755401549123, "grad_norm": 1.017502784729004, "learning_rate": 0.0003502683788558228, "loss": 3.3478, "step": 76530 }, { "epoch": 5.2000951216197855, "grad_norm": 1.3980399370193481, "learning_rate": 0.0003502259138469901, "loss": 3.5819, "step": 76535 }, { "epoch": 5.200434841690447, "grad_norm": 1.5359877347946167, "learning_rate": 0.0003501834488381574, "loss": 3.472, "step": 76540 }, { "epoch": 5.200774561761109, "grad_norm": 1.366410255432129, "learning_rate": 0.0003501409838293246, "loss": 3.3315, "step": 76545 }, { "epoch": 5.201114281831771, "grad_norm": 1.7605764865875244, "learning_rate": 0.0003500985188204919, "loss": 3.3795, "step": 76550 }, { "epoch": 5.201454001902432, "grad_norm": 1.1557985544204712, "learning_rate": 0.00035005605381165924, "loss": 3.1895, "step": 76555 }, { "epoch": 5.201793721973094, "grad_norm": 1.3623740673065186, "learning_rate": 0.00035001358880282647, "loss": 3.2863, "step": 76560 }, { "epoch": 5.202133442043756, "grad_norm": 1.467329740524292, "learning_rate": 0.00034997112379399375, "loss": 3.3646, "step": 76565 }, { "epoch": 5.202473162114417, "grad_norm": 1.4521528482437134, "learning_rate": 0.0003499286587851611, "loss": 3.1948, "step": 76570 }, { "epoch": 5.202812882185079, "grad_norm": 1.1485151052474976, "learning_rate": 0.0003498861937763283, "loss": 3.4437, "step": 76575 }, { "epoch": 5.2031526022557415, "grad_norm": 1.3426456451416016, "learning_rate": 0.0003498437287674956, "loss": 3.4708, "step": 76580 }, { "epoch": 5.203492322326403, "grad_norm": 1.1143542528152466, "learning_rate": 0.00034980126375866287, "loss": 3.1394, "step": 76585 }, { "epoch": 5.203832042397065, "grad_norm": 1.2759510278701782, "learning_rate": 0.00034975879874983015, "loss": 3.3431, "step": 76590 }, { "epoch": 5.204171762467727, "grad_norm": 1.1001977920532227, "learning_rate": 0.00034971633374099743, "loss": 3.2833, "step": 76595 }, { "epoch": 5.204511482538388, "grad_norm": 1.2694770097732544, "learning_rate": 0.0003496738687321647, "loss": 3.3929, "step": 76600 }, { "epoch": 5.20485120260905, "grad_norm": 1.1788674592971802, "learning_rate": 0.000349631403723332, "loss": 3.1443, "step": 76605 }, { "epoch": 5.205190922679712, "grad_norm": 1.3057341575622559, "learning_rate": 0.00034958893871449927, "loss": 3.2244, "step": 76610 }, { "epoch": 5.205530642750373, "grad_norm": 1.2974817752838135, "learning_rate": 0.00034954647370566655, "loss": 3.4309, "step": 76615 }, { "epoch": 5.205870362821035, "grad_norm": 1.0553818941116333, "learning_rate": 0.0003495040086968338, "loss": 3.4439, "step": 76620 }, { "epoch": 5.2062100828916975, "grad_norm": 1.6059681177139282, "learning_rate": 0.0003494615436880011, "loss": 3.5424, "step": 76625 }, { "epoch": 5.206549802962359, "grad_norm": 1.2798901796340942, "learning_rate": 0.0003494190786791684, "loss": 3.3532, "step": 76630 }, { "epoch": 5.206889523033021, "grad_norm": 1.347472071647644, "learning_rate": 0.0003493766136703356, "loss": 3.4717, "step": 76635 }, { "epoch": 5.207229243103683, "grad_norm": 1.185900330543518, "learning_rate": 0.00034933414866150295, "loss": 3.334, "step": 76640 }, { "epoch": 5.207568963174344, "grad_norm": 1.3395248651504517, "learning_rate": 0.00034929168365267023, "loss": 3.0973, "step": 76645 }, { "epoch": 5.207908683245006, "grad_norm": 1.4903051853179932, "learning_rate": 0.00034924921864383745, "loss": 3.1692, "step": 76650 }, { "epoch": 5.208248403315668, "grad_norm": 1.2485185861587524, "learning_rate": 0.00034920675363500473, "loss": 3.5273, "step": 76655 }, { "epoch": 5.208588123386329, "grad_norm": 1.258947730064392, "learning_rate": 0.00034916428862617207, "loss": 3.0728, "step": 76660 }, { "epoch": 5.2089278434569914, "grad_norm": 1.2088069915771484, "learning_rate": 0.0003491218236173393, "loss": 3.2918, "step": 76665 }, { "epoch": 5.2092675635276535, "grad_norm": 1.2699404954910278, "learning_rate": 0.0003490793586085066, "loss": 3.1118, "step": 76670 }, { "epoch": 5.209607283598315, "grad_norm": 1.2215121984481812, "learning_rate": 0.0003490368935996739, "loss": 3.278, "step": 76675 }, { "epoch": 5.209947003668977, "grad_norm": 1.1681578159332275, "learning_rate": 0.00034899442859084113, "loss": 3.3011, "step": 76680 }, { "epoch": 5.210286723739639, "grad_norm": 1.4514702558517456, "learning_rate": 0.0003489519635820084, "loss": 3.2179, "step": 76685 }, { "epoch": 5.2106264438103, "grad_norm": 1.3305739164352417, "learning_rate": 0.00034891799157494224, "loss": 3.1527, "step": 76690 }, { "epoch": 5.210966163880962, "grad_norm": 1.2065824270248413, "learning_rate": 0.00034887552656610957, "loss": 3.3504, "step": 76695 }, { "epoch": 5.211305883951624, "grad_norm": 1.1059447526931763, "learning_rate": 0.0003488330615572768, "loss": 3.4296, "step": 76700 }, { "epoch": 5.211645604022285, "grad_norm": 1.2862528562545776, "learning_rate": 0.0003487905965484441, "loss": 3.2368, "step": 76705 }, { "epoch": 5.2119853240929475, "grad_norm": 1.5771458148956299, "learning_rate": 0.00034874813153961136, "loss": 3.3945, "step": 76710 }, { "epoch": 5.2123250441636095, "grad_norm": 1.6994200944900513, "learning_rate": 0.00034870566653077864, "loss": 3.2731, "step": 76715 }, { "epoch": 5.212664764234271, "grad_norm": 1.2744256258010864, "learning_rate": 0.0003486632015219459, "loss": 3.5742, "step": 76720 }, { "epoch": 5.213004484304933, "grad_norm": 1.1251447200775146, "learning_rate": 0.0003486207365131132, "loss": 3.383, "step": 76725 }, { "epoch": 5.213344204375595, "grad_norm": 1.4125008583068848, "learning_rate": 0.0003485782715042805, "loss": 3.3236, "step": 76730 }, { "epoch": 5.213683924446256, "grad_norm": 1.260568618774414, "learning_rate": 0.00034853580649544776, "loss": 3.4534, "step": 76735 }, { "epoch": 5.214023644516918, "grad_norm": 1.4971275329589844, "learning_rate": 0.00034849334148661504, "loss": 3.3738, "step": 76740 }, { "epoch": 5.21436336458758, "grad_norm": 1.3362822532653809, "learning_rate": 0.0003484508764777823, "loss": 3.3207, "step": 76745 }, { "epoch": 5.214703084658241, "grad_norm": 1.4952117204666138, "learning_rate": 0.0003484084114689496, "loss": 3.1404, "step": 76750 }, { "epoch": 5.2150428047289035, "grad_norm": 1.826330542564392, "learning_rate": 0.0003483659464601169, "loss": 3.0989, "step": 76755 }, { "epoch": 5.2153825247995655, "grad_norm": 1.2407013177871704, "learning_rate": 0.00034832348145128416, "loss": 3.3234, "step": 76760 }, { "epoch": 5.215722244870227, "grad_norm": 1.6452921628952026, "learning_rate": 0.00034828101644245144, "loss": 3.3535, "step": 76765 }, { "epoch": 5.216061964940889, "grad_norm": 1.4470863342285156, "learning_rate": 0.0003482385514336187, "loss": 3.1602, "step": 76770 }, { "epoch": 5.216401685011551, "grad_norm": 1.237700343132019, "learning_rate": 0.000348196086424786, "loss": 3.2199, "step": 76775 }, { "epoch": 5.216741405082212, "grad_norm": 1.3772375583648682, "learning_rate": 0.0003481536214159532, "loss": 3.2008, "step": 76780 }, { "epoch": 5.217081125152874, "grad_norm": 1.5938490629196167, "learning_rate": 0.00034811115640712056, "loss": 3.3127, "step": 76785 }, { "epoch": 5.217420845223536, "grad_norm": 1.5185749530792236, "learning_rate": 0.00034806869139828784, "loss": 3.6345, "step": 76790 }, { "epoch": 5.217760565294197, "grad_norm": 1.3373347520828247, "learning_rate": 0.00034802622638945507, "loss": 3.1921, "step": 76795 }, { "epoch": 5.2181002853648595, "grad_norm": 1.3528728485107422, "learning_rate": 0.0003479837613806224, "loss": 3.2626, "step": 76800 }, { "epoch": 5.2184400054355216, "grad_norm": 0.9372124671936035, "learning_rate": 0.0003479412963717897, "loss": 3.467, "step": 76805 }, { "epoch": 5.218779725506183, "grad_norm": 1.1712130308151245, "learning_rate": 0.0003478988313629569, "loss": 3.2831, "step": 76810 }, { "epoch": 5.219119445576845, "grad_norm": 0.977882981300354, "learning_rate": 0.0003478563663541242, "loss": 3.3322, "step": 76815 }, { "epoch": 5.219459165647507, "grad_norm": 1.1804475784301758, "learning_rate": 0.0003478139013452915, "loss": 3.1928, "step": 76820 }, { "epoch": 5.219798885718168, "grad_norm": 1.3109290599822998, "learning_rate": 0.00034777143633645875, "loss": 3.3104, "step": 76825 }, { "epoch": 5.22013860578883, "grad_norm": 1.1630609035491943, "learning_rate": 0.000347728971327626, "loss": 3.304, "step": 76830 }, { "epoch": 5.220478325859492, "grad_norm": 1.1811869144439697, "learning_rate": 0.00034768650631879336, "loss": 3.129, "step": 76835 }, { "epoch": 5.220818045930153, "grad_norm": 1.441803216934204, "learning_rate": 0.0003476440413099606, "loss": 3.3003, "step": 76840 }, { "epoch": 5.2211577660008155, "grad_norm": 1.1036990880966187, "learning_rate": 0.00034760157630112787, "loss": 3.2868, "step": 76845 }, { "epoch": 5.221497486071477, "grad_norm": 1.3369569778442383, "learning_rate": 0.00034755911129229515, "loss": 3.4283, "step": 76850 }, { "epoch": 5.221837206142139, "grad_norm": 1.1583809852600098, "learning_rate": 0.0003475166462834624, "loss": 3.2359, "step": 76855 }, { "epoch": 5.222176926212801, "grad_norm": 1.4127557277679443, "learning_rate": 0.0003474741812746297, "loss": 3.464, "step": 76860 }, { "epoch": 5.222516646283462, "grad_norm": 1.3833576440811157, "learning_rate": 0.000347431716265797, "loss": 3.4641, "step": 76865 }, { "epoch": 5.222856366354124, "grad_norm": 1.3452203273773193, "learning_rate": 0.00034738925125696427, "loss": 3.4679, "step": 76870 }, { "epoch": 5.223196086424786, "grad_norm": 1.3372430801391602, "learning_rate": 0.00034734678624813155, "loss": 3.2954, "step": 76875 }, { "epoch": 5.223535806495447, "grad_norm": 1.1746461391448975, "learning_rate": 0.00034730432123929883, "loss": 3.0974, "step": 76880 }, { "epoch": 5.223875526566109, "grad_norm": 1.3167140483856201, "learning_rate": 0.00034726185623046605, "loss": 3.2357, "step": 76885 }, { "epoch": 5.2242152466367715, "grad_norm": 1.0951335430145264, "learning_rate": 0.0003472193912216334, "loss": 3.4773, "step": 76890 }, { "epoch": 5.224554966707433, "grad_norm": 1.3427836894989014, "learning_rate": 0.00034717692621280067, "loss": 3.1151, "step": 76895 }, { "epoch": 5.224894686778095, "grad_norm": 1.662401795387268, "learning_rate": 0.0003471344612039679, "loss": 3.358, "step": 76900 }, { "epoch": 5.225234406848757, "grad_norm": 1.278641939163208, "learning_rate": 0.00034709199619513523, "loss": 3.5353, "step": 76905 }, { "epoch": 5.225574126919418, "grad_norm": 1.2765551805496216, "learning_rate": 0.0003470495311863025, "loss": 3.3048, "step": 76910 }, { "epoch": 5.22591384699008, "grad_norm": 1.245737075805664, "learning_rate": 0.0003470070661774698, "loss": 3.2976, "step": 76915 }, { "epoch": 5.226253567060742, "grad_norm": 1.5470951795578003, "learning_rate": 0.000346964601168637, "loss": 3.3748, "step": 76920 }, { "epoch": 5.226593287131403, "grad_norm": 1.2056485414505005, "learning_rate": 0.00034692213615980435, "loss": 3.4214, "step": 76925 }, { "epoch": 5.226933007202065, "grad_norm": 0.9917882084846497, "learning_rate": 0.00034687967115097163, "loss": 3.5644, "step": 76930 }, { "epoch": 5.2272727272727275, "grad_norm": 1.3683100938796997, "learning_rate": 0.00034683720614213885, "loss": 3.1316, "step": 76935 }, { "epoch": 5.227612447343389, "grad_norm": 1.1241319179534912, "learning_rate": 0.0003467947411333062, "loss": 3.2586, "step": 76940 }, { "epoch": 5.227952167414051, "grad_norm": 1.2117630243301392, "learning_rate": 0.00034675227612447347, "loss": 3.357, "step": 76945 }, { "epoch": 5.228291887484713, "grad_norm": 1.1558011770248413, "learning_rate": 0.0003467098111156407, "loss": 3.3139, "step": 76950 }, { "epoch": 5.228631607555374, "grad_norm": 1.1078312397003174, "learning_rate": 0.00034666734610680803, "loss": 3.4106, "step": 76955 }, { "epoch": 5.228971327626036, "grad_norm": 1.3514111042022705, "learning_rate": 0.0003466248810979753, "loss": 3.3426, "step": 76960 }, { "epoch": 5.229311047696698, "grad_norm": 1.1933107376098633, "learning_rate": 0.00034658241608914253, "loss": 3.3382, "step": 76965 }, { "epoch": 5.229650767767359, "grad_norm": 1.3030847311019897, "learning_rate": 0.0003465399510803098, "loss": 3.4075, "step": 76970 }, { "epoch": 5.2299904878380215, "grad_norm": 1.365268349647522, "learning_rate": 0.00034649748607147715, "loss": 3.4168, "step": 76975 }, { "epoch": 5.2303302079086835, "grad_norm": 1.3688907623291016, "learning_rate": 0.0003464550210626444, "loss": 3.4513, "step": 76980 }, { "epoch": 5.230669927979345, "grad_norm": 1.4512158632278442, "learning_rate": 0.00034641255605381165, "loss": 3.2418, "step": 76985 }, { "epoch": 5.231009648050007, "grad_norm": 1.4248929023742676, "learning_rate": 0.000346370091044979, "loss": 3.2691, "step": 76990 }, { "epoch": 5.231349368120669, "grad_norm": 1.1799136400222778, "learning_rate": 0.0003463276260361462, "loss": 3.4333, "step": 76995 }, { "epoch": 5.23168908819133, "grad_norm": 1.4118555784225464, "learning_rate": 0.0003462851610273135, "loss": 3.3503, "step": 77000 }, { "epoch": 5.232028808261992, "grad_norm": 1.1847206354141235, "learning_rate": 0.0003462426960184808, "loss": 3.2662, "step": 77005 }, { "epoch": 5.232368528332654, "grad_norm": 1.3036231994628906, "learning_rate": 0.00034620023100964806, "loss": 3.1677, "step": 77010 }, { "epoch": 5.232708248403315, "grad_norm": 1.2264469861984253, "learning_rate": 0.00034615776600081534, "loss": 3.2375, "step": 77015 }, { "epoch": 5.2330479684739775, "grad_norm": 1.6376159191131592, "learning_rate": 0.0003461153009919826, "loss": 3.335, "step": 77020 }, { "epoch": 5.2333876885446395, "grad_norm": 1.2703450918197632, "learning_rate": 0.0003460728359831499, "loss": 3.3525, "step": 77025 }, { "epoch": 5.233727408615301, "grad_norm": 1.156001091003418, "learning_rate": 0.0003460303709743172, "loss": 3.4377, "step": 77030 }, { "epoch": 5.234067128685963, "grad_norm": 1.6597068309783936, "learning_rate": 0.00034598790596548446, "loss": 3.3989, "step": 77035 }, { "epoch": 5.234406848756625, "grad_norm": 1.1601232290267944, "learning_rate": 0.0003459454409566517, "loss": 3.3043, "step": 77040 }, { "epoch": 5.234746568827286, "grad_norm": 1.2680658102035522, "learning_rate": 0.000345902975947819, "loss": 3.2721, "step": 77045 }, { "epoch": 5.235086288897948, "grad_norm": 1.1541186571121216, "learning_rate": 0.0003458605109389863, "loss": 3.4219, "step": 77050 }, { "epoch": 5.23542600896861, "grad_norm": 1.1413278579711914, "learning_rate": 0.0003458180459301535, "loss": 3.2393, "step": 77055 }, { "epoch": 5.235765729039271, "grad_norm": 1.2206346988677979, "learning_rate": 0.00034577558092132086, "loss": 3.1782, "step": 77060 }, { "epoch": 5.2361054491099335, "grad_norm": 1.7312560081481934, "learning_rate": 0.00034573311591248814, "loss": 3.3477, "step": 77065 }, { "epoch": 5.2364451691805955, "grad_norm": 1.3187371492385864, "learning_rate": 0.00034569065090365536, "loss": 3.4588, "step": 77070 }, { "epoch": 5.236784889251257, "grad_norm": 1.151195764541626, "learning_rate": 0.00034564818589482264, "loss": 3.3052, "step": 77075 }, { "epoch": 5.237124609321919, "grad_norm": 1.2700175046920776, "learning_rate": 0.00034560572088599, "loss": 3.3461, "step": 77080 }, { "epoch": 5.237464329392581, "grad_norm": 1.295283555984497, "learning_rate": 0.00034556325587715726, "loss": 3.4552, "step": 77085 }, { "epoch": 5.237804049463242, "grad_norm": 1.2703721523284912, "learning_rate": 0.0003455207908683245, "loss": 3.3127, "step": 77090 }, { "epoch": 5.238143769533904, "grad_norm": 1.470618486404419, "learning_rate": 0.0003454783258594918, "loss": 3.528, "step": 77095 }, { "epoch": 5.238483489604566, "grad_norm": 1.1562542915344238, "learning_rate": 0.0003454358608506591, "loss": 3.4325, "step": 77100 }, { "epoch": 5.238823209675227, "grad_norm": 1.197649598121643, "learning_rate": 0.0003453933958418263, "loss": 3.2092, "step": 77105 }, { "epoch": 5.2391629297458895, "grad_norm": 1.592536211013794, "learning_rate": 0.0003453509308329936, "loss": 3.3469, "step": 77110 }, { "epoch": 5.239502649816552, "grad_norm": 1.4712038040161133, "learning_rate": 0.00034530846582416094, "loss": 3.2541, "step": 77115 }, { "epoch": 5.239842369887213, "grad_norm": 1.3476835489273071, "learning_rate": 0.00034526600081532816, "loss": 3.3941, "step": 77120 }, { "epoch": 5.240182089957875, "grad_norm": 1.3819055557250977, "learning_rate": 0.00034522353580649544, "loss": 3.212, "step": 77125 }, { "epoch": 5.240521810028537, "grad_norm": 1.477605938911438, "learning_rate": 0.0003451810707976628, "loss": 3.3786, "step": 77130 }, { "epoch": 5.240861530099198, "grad_norm": 1.5708467960357666, "learning_rate": 0.00034513860578883, "loss": 3.3226, "step": 77135 }, { "epoch": 5.24120125016986, "grad_norm": 1.4475345611572266, "learning_rate": 0.0003450961407799973, "loss": 3.524, "step": 77140 }, { "epoch": 5.241540970240522, "grad_norm": 1.194491982460022, "learning_rate": 0.00034505367577116456, "loss": 3.2889, "step": 77145 }, { "epoch": 5.241880690311183, "grad_norm": 1.2116683721542358, "learning_rate": 0.00034501121076233184, "loss": 3.485, "step": 77150 }, { "epoch": 5.2422204103818455, "grad_norm": 1.095668911933899, "learning_rate": 0.0003449687457534991, "loss": 3.2448, "step": 77155 }, { "epoch": 5.242560130452507, "grad_norm": 1.0905046463012695, "learning_rate": 0.0003449262807446664, "loss": 3.3166, "step": 77160 }, { "epoch": 5.242899850523169, "grad_norm": 1.0694310665130615, "learning_rate": 0.0003448838157358337, "loss": 3.4543, "step": 77165 }, { "epoch": 5.243239570593831, "grad_norm": 1.2502495050430298, "learning_rate": 0.00034484135072700096, "loss": 3.4522, "step": 77170 }, { "epoch": 5.243579290664492, "grad_norm": 1.550174593925476, "learning_rate": 0.00034479888571816824, "loss": 3.142, "step": 77175 }, { "epoch": 5.243919010735154, "grad_norm": 1.4206067323684692, "learning_rate": 0.00034475642070933547, "loss": 3.3582, "step": 77180 }, { "epoch": 5.244258730805816, "grad_norm": 1.5490957498550415, "learning_rate": 0.0003447139557005028, "loss": 3.3013, "step": 77185 }, { "epoch": 5.244598450876477, "grad_norm": 1.676584243774414, "learning_rate": 0.0003446714906916701, "loss": 3.0385, "step": 77190 }, { "epoch": 5.244938170947139, "grad_norm": 1.2027363777160645, "learning_rate": 0.0003446290256828373, "loss": 3.5263, "step": 77195 }, { "epoch": 5.2452778910178015, "grad_norm": 1.0761295557022095, "learning_rate": 0.00034458656067400464, "loss": 3.3229, "step": 77200 }, { "epoch": 5.245617611088463, "grad_norm": 1.2372084856033325, "learning_rate": 0.0003445440956651719, "loss": 3.4539, "step": 77205 }, { "epoch": 5.245957331159125, "grad_norm": 1.2958675622940063, "learning_rate": 0.00034450163065633915, "loss": 3.1775, "step": 77210 }, { "epoch": 5.246297051229787, "grad_norm": 1.2417718172073364, "learning_rate": 0.00034445916564750643, "loss": 3.1891, "step": 77215 }, { "epoch": 5.246636771300448, "grad_norm": 1.2931008338928223, "learning_rate": 0.00034441670063867376, "loss": 3.4513, "step": 77220 }, { "epoch": 5.24697649137111, "grad_norm": 1.281794786453247, "learning_rate": 0.000344374235629841, "loss": 3.161, "step": 77225 }, { "epoch": 5.247316211441772, "grad_norm": 1.8195439577102661, "learning_rate": 0.00034433177062100827, "loss": 3.3831, "step": 77230 }, { "epoch": 5.247655931512433, "grad_norm": 1.0742005109786987, "learning_rate": 0.0003442893056121756, "loss": 3.1475, "step": 77235 }, { "epoch": 5.2479956515830954, "grad_norm": 1.5477834939956665, "learning_rate": 0.00034424684060334283, "loss": 3.3918, "step": 77240 }, { "epoch": 5.2483353716537575, "grad_norm": 1.3471792936325073, "learning_rate": 0.0003442043755945101, "loss": 3.4475, "step": 77245 }, { "epoch": 5.248675091724419, "grad_norm": 1.4299572706222534, "learning_rate": 0.00034416191058567744, "loss": 3.3723, "step": 77250 }, { "epoch": 5.249014811795081, "grad_norm": 1.3145619630813599, "learning_rate": 0.0003441194455768447, "loss": 3.2062, "step": 77255 }, { "epoch": 5.249354531865743, "grad_norm": 1.0915676355361938, "learning_rate": 0.00034407698056801195, "loss": 3.3441, "step": 77260 }, { "epoch": 5.249694251936404, "grad_norm": 1.7151906490325928, "learning_rate": 0.00034403451555917923, "loss": 3.4019, "step": 77265 }, { "epoch": 5.250033972007066, "grad_norm": 1.2570526599884033, "learning_rate": 0.00034399205055034656, "loss": 3.2407, "step": 77270 }, { "epoch": 5.250373692077728, "grad_norm": 1.3135942220687866, "learning_rate": 0.0003439495855415138, "loss": 3.3154, "step": 77275 }, { "epoch": 5.250713412148389, "grad_norm": 2.4467663764953613, "learning_rate": 0.00034390712053268107, "loss": 3.511, "step": 77280 }, { "epoch": 5.2510531322190515, "grad_norm": 1.5253772735595703, "learning_rate": 0.0003438646555238484, "loss": 3.3773, "step": 77285 }, { "epoch": 5.2513928522897135, "grad_norm": 1.0128499269485474, "learning_rate": 0.00034382219051501563, "loss": 3.3131, "step": 77290 }, { "epoch": 5.251732572360375, "grad_norm": 1.1331232786178589, "learning_rate": 0.0003437797255061829, "loss": 3.2568, "step": 77295 }, { "epoch": 5.252072292431037, "grad_norm": 1.5420048236846924, "learning_rate": 0.0003437372604973502, "loss": 3.0216, "step": 77300 }, { "epoch": 5.252412012501699, "grad_norm": 1.181372046470642, "learning_rate": 0.00034369479548851747, "loss": 3.2809, "step": 77305 }, { "epoch": 5.25275173257236, "grad_norm": 1.45158052444458, "learning_rate": 0.00034365233047968475, "loss": 3.4332, "step": 77310 }, { "epoch": 5.253091452643022, "grad_norm": 1.144189476966858, "learning_rate": 0.00034360986547085203, "loss": 3.3736, "step": 77315 }, { "epoch": 5.253431172713684, "grad_norm": 1.1777236461639404, "learning_rate": 0.0003435674004620193, "loss": 3.223, "step": 77320 }, { "epoch": 5.253770892784345, "grad_norm": 1.197402000427246, "learning_rate": 0.0003435249354531866, "loss": 3.5215, "step": 77325 }, { "epoch": 5.2541106128550075, "grad_norm": 1.442566990852356, "learning_rate": 0.00034348247044435387, "loss": 3.3442, "step": 77330 }, { "epoch": 5.2544503329256695, "grad_norm": 1.3193517923355103, "learning_rate": 0.0003434400054355211, "loss": 3.3687, "step": 77335 }, { "epoch": 5.254790052996331, "grad_norm": 0.9743009209632874, "learning_rate": 0.00034339754042668843, "loss": 3.4777, "step": 77340 }, { "epoch": 5.255129773066993, "grad_norm": 1.1839343309402466, "learning_rate": 0.0003433550754178557, "loss": 3.5247, "step": 77345 }, { "epoch": 5.255469493137655, "grad_norm": 1.2192647457122803, "learning_rate": 0.00034331261040902294, "loss": 3.3919, "step": 77350 }, { "epoch": 5.255809213208316, "grad_norm": 1.2386685609817505, "learning_rate": 0.00034327014540019027, "loss": 3.3505, "step": 77355 }, { "epoch": 5.256148933278978, "grad_norm": 1.3507753610610962, "learning_rate": 0.00034322768039135755, "loss": 3.2587, "step": 77360 }, { "epoch": 5.25648865334964, "grad_norm": 1.1619559526443481, "learning_rate": 0.0003431852153825248, "loss": 3.5005, "step": 77365 }, { "epoch": 5.256828373420301, "grad_norm": 1.0584490299224854, "learning_rate": 0.00034314275037369206, "loss": 3.5011, "step": 77370 }, { "epoch": 5.2571680934909635, "grad_norm": 1.1690561771392822, "learning_rate": 0.0003431002853648594, "loss": 3.3064, "step": 77375 }, { "epoch": 5.2575078135616256, "grad_norm": 1.4674532413482666, "learning_rate": 0.0003430578203560266, "loss": 3.3884, "step": 77380 }, { "epoch": 5.257847533632287, "grad_norm": 1.4661427736282349, "learning_rate": 0.0003430153553471939, "loss": 3.2274, "step": 77385 }, { "epoch": 5.258187253702949, "grad_norm": 1.3891942501068115, "learning_rate": 0.00034297289033836123, "loss": 3.3918, "step": 77390 }, { "epoch": 5.258526973773611, "grad_norm": 1.433388113975525, "learning_rate": 0.00034293042532952846, "loss": 3.3979, "step": 77395 }, { "epoch": 5.258866693844272, "grad_norm": 1.1424039602279663, "learning_rate": 0.00034288796032069574, "loss": 3.2, "step": 77400 }, { "epoch": 5.259206413914934, "grad_norm": 1.387815237045288, "learning_rate": 0.000342845495311863, "loss": 3.4544, "step": 77405 }, { "epoch": 5.259546133985596, "grad_norm": 1.068311095237732, "learning_rate": 0.0003428030303030303, "loss": 3.4035, "step": 77410 }, { "epoch": 5.259885854056257, "grad_norm": 1.3616116046905518, "learning_rate": 0.0003427605652941976, "loss": 3.3261, "step": 77415 }, { "epoch": 5.2602255741269195, "grad_norm": 1.3685451745986938, "learning_rate": 0.00034271810028536486, "loss": 3.5799, "step": 77420 }, { "epoch": 5.260565294197582, "grad_norm": 1.332987904548645, "learning_rate": 0.0003426756352765322, "loss": 3.145, "step": 77425 }, { "epoch": 5.260905014268243, "grad_norm": 1.5235302448272705, "learning_rate": 0.0003426331702676994, "loss": 3.4771, "step": 77430 }, { "epoch": 5.261244734338905, "grad_norm": 1.6119025945663452, "learning_rate": 0.0003425907052588667, "loss": 3.3983, "step": 77435 }, { "epoch": 5.261584454409567, "grad_norm": 1.1194210052490234, "learning_rate": 0.000342548240250034, "loss": 3.4477, "step": 77440 }, { "epoch": 5.261924174480228, "grad_norm": 1.2699685096740723, "learning_rate": 0.00034250577524120126, "loss": 3.2998, "step": 77445 }, { "epoch": 5.26226389455089, "grad_norm": 1.2570769786834717, "learning_rate": 0.00034246331023236854, "loss": 3.2014, "step": 77450 }, { "epoch": 5.262603614621552, "grad_norm": 1.6498969793319702, "learning_rate": 0.0003424208452235358, "loss": 3.4186, "step": 77455 }, { "epoch": 5.262943334692213, "grad_norm": 1.4487247467041016, "learning_rate": 0.0003423783802147031, "loss": 3.4257, "step": 77460 }, { "epoch": 5.2632830547628755, "grad_norm": 1.056660771369934, "learning_rate": 0.0003423359152058704, "loss": 3.5337, "step": 77465 }, { "epoch": 5.263622774833538, "grad_norm": 1.5518803596496582, "learning_rate": 0.00034229345019703766, "loss": 3.2059, "step": 77470 }, { "epoch": 5.263962494904199, "grad_norm": 1.319757342338562, "learning_rate": 0.0003422509851882049, "loss": 3.5448, "step": 77475 }, { "epoch": 5.264302214974861, "grad_norm": 1.5584453344345093, "learning_rate": 0.0003422085201793722, "loss": 3.3647, "step": 77480 }, { "epoch": 5.264641935045523, "grad_norm": 1.1577733755111694, "learning_rate": 0.0003421660551705395, "loss": 3.246, "step": 77485 }, { "epoch": 5.264981655116184, "grad_norm": 1.2353332042694092, "learning_rate": 0.0003421235901617067, "loss": 3.2066, "step": 77490 }, { "epoch": 5.265321375186846, "grad_norm": 1.5069290399551392, "learning_rate": 0.00034208112515287406, "loss": 3.2276, "step": 77495 }, { "epoch": 5.265661095257508, "grad_norm": 1.2551180124282837, "learning_rate": 0.00034203866014404134, "loss": 3.4431, "step": 77500 }, { "epoch": 5.266000815328169, "grad_norm": 1.2523651123046875, "learning_rate": 0.00034199619513520857, "loss": 3.3055, "step": 77505 }, { "epoch": 5.2663405353988315, "grad_norm": 1.2457071542739868, "learning_rate": 0.00034195373012637585, "loss": 3.2741, "step": 77510 }, { "epoch": 5.266680255469494, "grad_norm": 1.0288121700286865, "learning_rate": 0.0003419112651175432, "loss": 3.3906, "step": 77515 }, { "epoch": 5.267019975540155, "grad_norm": 1.6166387796401978, "learning_rate": 0.0003418688001087104, "loss": 3.439, "step": 77520 }, { "epoch": 5.267359695610817, "grad_norm": 1.1378670930862427, "learning_rate": 0.0003418263350998777, "loss": 3.3086, "step": 77525 }, { "epoch": 5.267699415681479, "grad_norm": 1.345792531967163, "learning_rate": 0.000341783870091045, "loss": 3.1806, "step": 77530 }, { "epoch": 5.26803913575214, "grad_norm": 1.6999773979187012, "learning_rate": 0.00034174140508221225, "loss": 3.4034, "step": 77535 }, { "epoch": 5.268378855822802, "grad_norm": 1.2217426300048828, "learning_rate": 0.0003416989400733795, "loss": 3.2342, "step": 77540 }, { "epoch": 5.268718575893463, "grad_norm": 1.0958179235458374, "learning_rate": 0.00034165647506454686, "loss": 3.3658, "step": 77545 }, { "epoch": 5.2690582959641254, "grad_norm": 1.128432035446167, "learning_rate": 0.0003416140100557141, "loss": 3.3282, "step": 77550 }, { "epoch": 5.2693980160347875, "grad_norm": 1.1572654247283936, "learning_rate": 0.00034157154504688137, "loss": 3.2348, "step": 77555 }, { "epoch": 5.269737736105449, "grad_norm": 1.1845556497573853, "learning_rate": 0.00034152908003804865, "loss": 3.2525, "step": 77560 }, { "epoch": 5.270077456176111, "grad_norm": 1.2997958660125732, "learning_rate": 0.0003414866150292159, "loss": 3.522, "step": 77565 }, { "epoch": 5.270417176246773, "grad_norm": 1.175744891166687, "learning_rate": 0.0003414441500203832, "loss": 3.4878, "step": 77570 }, { "epoch": 5.270756896317434, "grad_norm": 1.1612714529037476, "learning_rate": 0.0003414016850115505, "loss": 3.6485, "step": 77575 }, { "epoch": 5.271096616388096, "grad_norm": 1.5481517314910889, "learning_rate": 0.00034135922000271777, "loss": 3.3562, "step": 77580 }, { "epoch": 5.271436336458758, "grad_norm": 1.4209474325180054, "learning_rate": 0.00034131675499388505, "loss": 3.2313, "step": 77585 }, { "epoch": 5.271776056529419, "grad_norm": 1.1675386428833008, "learning_rate": 0.00034127428998505233, "loss": 3.0814, "step": 77590 }, { "epoch": 5.2721157766000815, "grad_norm": 1.2831493616104126, "learning_rate": 0.0003412318249762196, "loss": 3.4415, "step": 77595 }, { "epoch": 5.2724554966707435, "grad_norm": 1.447527527809143, "learning_rate": 0.0003411893599673869, "loss": 3.3099, "step": 77600 }, { "epoch": 5.272795216741405, "grad_norm": 1.1399688720703125, "learning_rate": 0.00034114689495855417, "loss": 3.2008, "step": 77605 }, { "epoch": 5.273134936812067, "grad_norm": 1.5729269981384277, "learning_rate": 0.00034110442994972145, "loss": 3.3099, "step": 77610 }, { "epoch": 5.273474656882729, "grad_norm": 1.2636914253234863, "learning_rate": 0.00034106196494088873, "loss": 3.3922, "step": 77615 }, { "epoch": 5.27381437695339, "grad_norm": 1.3398267030715942, "learning_rate": 0.000341019499932056, "loss": 3.4013, "step": 77620 }, { "epoch": 5.274154097024052, "grad_norm": 1.6177403926849365, "learning_rate": 0.0003409770349232233, "loss": 3.1416, "step": 77625 }, { "epoch": 5.274493817094714, "grad_norm": 1.2817474603652954, "learning_rate": 0.0003409345699143905, "loss": 3.4348, "step": 77630 }, { "epoch": 5.274833537165375, "grad_norm": 1.2852692604064941, "learning_rate": 0.00034089210490555785, "loss": 3.4339, "step": 77635 }, { "epoch": 5.2751732572360375, "grad_norm": 1.1398552656173706, "learning_rate": 0.00034084963989672513, "loss": 3.3792, "step": 77640 }, { "epoch": 5.2755129773066995, "grad_norm": 1.1807525157928467, "learning_rate": 0.00034080717488789235, "loss": 3.2051, "step": 77645 }, { "epoch": 5.275852697377361, "grad_norm": 1.4564563035964966, "learning_rate": 0.0003407647098790597, "loss": 3.2259, "step": 77650 }, { "epoch": 5.276192417448023, "grad_norm": 1.2142200469970703, "learning_rate": 0.00034072224487022697, "loss": 3.2861, "step": 77655 }, { "epoch": 5.276532137518685, "grad_norm": 1.2992076873779297, "learning_rate": 0.0003406797798613942, "loss": 3.4245, "step": 77660 }, { "epoch": 5.276871857589346, "grad_norm": 1.4727177619934082, "learning_rate": 0.0003406373148525615, "loss": 3.2274, "step": 77665 }, { "epoch": 5.277211577660008, "grad_norm": 1.272855520248413, "learning_rate": 0.0003405948498437288, "loss": 3.4832, "step": 77670 }, { "epoch": 5.27755129773067, "grad_norm": 1.3208658695220947, "learning_rate": 0.00034055238483489603, "loss": 3.1727, "step": 77675 }, { "epoch": 5.277891017801331, "grad_norm": 1.5020934343338013, "learning_rate": 0.0003405099198260633, "loss": 3.0205, "step": 77680 }, { "epoch": 5.2782307378719935, "grad_norm": 1.3964407444000244, "learning_rate": 0.00034046745481723065, "loss": 3.4165, "step": 77685 }, { "epoch": 5.278570457942656, "grad_norm": 1.5771247148513794, "learning_rate": 0.0003404249898083979, "loss": 3.2546, "step": 77690 }, { "epoch": 5.278910178013317, "grad_norm": 1.490522861480713, "learning_rate": 0.00034038252479956515, "loss": 3.5217, "step": 77695 }, { "epoch": 5.279249898083979, "grad_norm": 0.9783104062080383, "learning_rate": 0.00034034005979073243, "loss": 3.1447, "step": 77700 }, { "epoch": 5.279589618154641, "grad_norm": 1.236138939857483, "learning_rate": 0.0003402975947818997, "loss": 3.5832, "step": 77705 }, { "epoch": 5.279929338225302, "grad_norm": 1.2152348756790161, "learning_rate": 0.000340255129773067, "loss": 3.037, "step": 77710 }, { "epoch": 5.280269058295964, "grad_norm": 1.372514247894287, "learning_rate": 0.0003402126647642343, "loss": 3.2492, "step": 77715 }, { "epoch": 5.280608778366626, "grad_norm": 1.3093351125717163, "learning_rate": 0.00034017019975540156, "loss": 3.4048, "step": 77720 }, { "epoch": 5.280948498437287, "grad_norm": 1.4505654573440552, "learning_rate": 0.00034012773474656884, "loss": 3.3956, "step": 77725 }, { "epoch": 5.2812882185079495, "grad_norm": 1.6040477752685547, "learning_rate": 0.0003400852697377361, "loss": 3.2604, "step": 77730 }, { "epoch": 5.281627938578612, "grad_norm": 1.393136978149414, "learning_rate": 0.00034004280472890334, "loss": 3.4836, "step": 77735 }, { "epoch": 5.281967658649273, "grad_norm": 1.30943763256073, "learning_rate": 0.0003400003397200707, "loss": 3.1642, "step": 77740 }, { "epoch": 5.282307378719935, "grad_norm": 1.1919667720794678, "learning_rate": 0.00033995787471123796, "loss": 3.3126, "step": 77745 }, { "epoch": 5.282647098790597, "grad_norm": 1.167640209197998, "learning_rate": 0.0003399154097024052, "loss": 3.1556, "step": 77750 }, { "epoch": 5.282986818861258, "grad_norm": 1.1995831727981567, "learning_rate": 0.0003398729446935725, "loss": 3.4556, "step": 77755 }, { "epoch": 5.28332653893192, "grad_norm": 1.5056208372116089, "learning_rate": 0.0003398304796847398, "loss": 3.4326, "step": 77760 }, { "epoch": 5.283666259002582, "grad_norm": 1.1560407876968384, "learning_rate": 0.0003397880146759071, "loss": 2.8488, "step": 77765 }, { "epoch": 5.284005979073243, "grad_norm": 1.1752831935882568, "learning_rate": 0.0003397455496670743, "loss": 3.3302, "step": 77770 }, { "epoch": 5.2843456991439055, "grad_norm": 1.3047723770141602, "learning_rate": 0.00033970308465824164, "loss": 3.3796, "step": 77775 }, { "epoch": 5.284685419214568, "grad_norm": 1.4516550302505493, "learning_rate": 0.0003396606196494089, "loss": 3.1455, "step": 77780 }, { "epoch": 5.285025139285229, "grad_norm": 1.2879512310028076, "learning_rate": 0.00033961815464057614, "loss": 3.3112, "step": 77785 }, { "epoch": 5.285364859355891, "grad_norm": 1.4673739671707153, "learning_rate": 0.0003395756896317435, "loss": 3.4894, "step": 77790 }, { "epoch": 5.285704579426553, "grad_norm": 1.2696900367736816, "learning_rate": 0.00033953322462291076, "loss": 3.2113, "step": 77795 }, { "epoch": 5.286044299497214, "grad_norm": 1.4563772678375244, "learning_rate": 0.000339490759614078, "loss": 3.2, "step": 77800 }, { "epoch": 5.286384019567876, "grad_norm": 1.081413745880127, "learning_rate": 0.00033944829460524526, "loss": 3.3719, "step": 77805 }, { "epoch": 5.286723739638538, "grad_norm": 1.1899012327194214, "learning_rate": 0.0003394058295964126, "loss": 3.3885, "step": 77810 }, { "epoch": 5.287063459709199, "grad_norm": 1.461724042892456, "learning_rate": 0.0003393633645875798, "loss": 3.2877, "step": 77815 }, { "epoch": 5.2874031797798615, "grad_norm": 1.256651759147644, "learning_rate": 0.0003393208995787471, "loss": 3.3314, "step": 77820 }, { "epoch": 5.287742899850523, "grad_norm": 1.0484434366226196, "learning_rate": 0.00033927843456991444, "loss": 3.3854, "step": 77825 }, { "epoch": 5.288082619921185, "grad_norm": 1.05916428565979, "learning_rate": 0.00033923596956108166, "loss": 3.0912, "step": 77830 }, { "epoch": 5.288422339991847, "grad_norm": 1.2991796731948853, "learning_rate": 0.00033919350455224894, "loss": 3.4333, "step": 77835 }, { "epoch": 5.288762060062508, "grad_norm": 1.2031279802322388, "learning_rate": 0.0003391510395434163, "loss": 3.609, "step": 77840 }, { "epoch": 5.28910178013317, "grad_norm": 1.5211169719696045, "learning_rate": 0.0003391085745345835, "loss": 3.2804, "step": 77845 }, { "epoch": 5.289441500203832, "grad_norm": 1.2567496299743652, "learning_rate": 0.0003390661095257508, "loss": 3.2625, "step": 77850 }, { "epoch": 5.289781220274493, "grad_norm": 1.538437008857727, "learning_rate": 0.00033902364451691806, "loss": 3.4461, "step": 77855 }, { "epoch": 5.2901209403451555, "grad_norm": 1.2278295755386353, "learning_rate": 0.00033898117950808534, "loss": 3.4237, "step": 77860 }, { "epoch": 5.2904606604158175, "grad_norm": 1.2515496015548706, "learning_rate": 0.0003389387144992526, "loss": 3.2165, "step": 77865 }, { "epoch": 5.290800380486479, "grad_norm": 0.8920322060585022, "learning_rate": 0.0003388962494904199, "loss": 3.2331, "step": 77870 }, { "epoch": 5.291140100557141, "grad_norm": 1.1861764192581177, "learning_rate": 0.0003388537844815872, "loss": 3.5494, "step": 77875 }, { "epoch": 5.291479820627803, "grad_norm": 1.5289312601089478, "learning_rate": 0.00033881131947275446, "loss": 3.1595, "step": 77880 }, { "epoch": 5.291819540698464, "grad_norm": 1.2151811122894287, "learning_rate": 0.00033876885446392174, "loss": 3.2235, "step": 77885 }, { "epoch": 5.292159260769126, "grad_norm": 1.2194135189056396, "learning_rate": 0.00033872638945508897, "loss": 3.3905, "step": 77890 }, { "epoch": 5.292498980839788, "grad_norm": 1.5587830543518066, "learning_rate": 0.0003386839244462563, "loss": 3.3298, "step": 77895 }, { "epoch": 5.292838700910449, "grad_norm": 1.3896065950393677, "learning_rate": 0.0003386414594374236, "loss": 3.3137, "step": 77900 }, { "epoch": 5.2931784209811115, "grad_norm": 1.7766565084457397, "learning_rate": 0.0003385989944285908, "loss": 3.2201, "step": 77905 }, { "epoch": 5.2935181410517735, "grad_norm": 1.4437592029571533, "learning_rate": 0.00033855652941975814, "loss": 3.2996, "step": 77910 }, { "epoch": 5.293857861122435, "grad_norm": 1.359389066696167, "learning_rate": 0.0003385140644109254, "loss": 3.3606, "step": 77915 }, { "epoch": 5.294197581193097, "grad_norm": 1.8118075132369995, "learning_rate": 0.00033847159940209265, "loss": 3.2868, "step": 77920 }, { "epoch": 5.294537301263759, "grad_norm": 1.3502877950668335, "learning_rate": 0.00033842913439325993, "loss": 3.2083, "step": 77925 }, { "epoch": 5.29487702133442, "grad_norm": 1.2680612802505493, "learning_rate": 0.00033838666938442726, "loss": 3.2501, "step": 77930 }, { "epoch": 5.295216741405082, "grad_norm": 1.0947519540786743, "learning_rate": 0.00033834420437559454, "loss": 3.5383, "step": 77935 }, { "epoch": 5.295556461475744, "grad_norm": 1.6659972667694092, "learning_rate": 0.00033830173936676177, "loss": 3.3247, "step": 77940 }, { "epoch": 5.295896181546405, "grad_norm": 1.4862651824951172, "learning_rate": 0.0003382592743579291, "loss": 3.4307, "step": 77945 }, { "epoch": 5.2962359016170675, "grad_norm": 1.1000113487243652, "learning_rate": 0.0003382168093490964, "loss": 3.4636, "step": 77950 }, { "epoch": 5.2965756216877296, "grad_norm": 1.1672865152359009, "learning_rate": 0.0003381743443402636, "loss": 3.3586, "step": 77955 }, { "epoch": 5.296915341758391, "grad_norm": 1.1527769565582275, "learning_rate": 0.0003381318793314309, "loss": 3.208, "step": 77960 }, { "epoch": 5.297255061829053, "grad_norm": 1.4200021028518677, "learning_rate": 0.0003380894143225982, "loss": 3.336, "step": 77965 }, { "epoch": 5.297594781899715, "grad_norm": 1.1340326070785522, "learning_rate": 0.00033804694931376545, "loss": 3.3352, "step": 77970 }, { "epoch": 5.297934501970376, "grad_norm": 1.4579005241394043, "learning_rate": 0.00033800448430493273, "loss": 3.267, "step": 77975 }, { "epoch": 5.298274222041038, "grad_norm": 1.3524045944213867, "learning_rate": 0.00033796201929610006, "loss": 3.6125, "step": 77980 }, { "epoch": 5.2986139421117, "grad_norm": 1.2351655960083008, "learning_rate": 0.0003379195542872673, "loss": 3.3092, "step": 77985 }, { "epoch": 5.298953662182361, "grad_norm": 1.234057068824768, "learning_rate": 0.00033787708927843457, "loss": 3.3695, "step": 77990 }, { "epoch": 5.2992933822530235, "grad_norm": 1.8663302659988403, "learning_rate": 0.00033783462426960185, "loss": 3.2025, "step": 77995 }, { "epoch": 5.299633102323686, "grad_norm": 1.1808922290802002, "learning_rate": 0.00033779215926076913, "loss": 3.1968, "step": 78000 }, { "epoch": 5.299972822394347, "grad_norm": 1.6792043447494507, "learning_rate": 0.0003377496942519364, "loss": 3.2616, "step": 78005 }, { "epoch": 5.300312542465009, "grad_norm": 1.4892669916152954, "learning_rate": 0.0003377072292431037, "loss": 3.0936, "step": 78010 }, { "epoch": 5.300652262535671, "grad_norm": 1.3056280612945557, "learning_rate": 0.00033766476423427097, "loss": 3.3664, "step": 78015 }, { "epoch": 5.300991982606332, "grad_norm": 1.2448794841766357, "learning_rate": 0.00033762229922543825, "loss": 3.285, "step": 78020 }, { "epoch": 5.301331702676994, "grad_norm": 1.6375060081481934, "learning_rate": 0.00033757983421660553, "loss": 3.3008, "step": 78025 }, { "epoch": 5.301671422747656, "grad_norm": 1.2714054584503174, "learning_rate": 0.00033753736920777276, "loss": 3.2657, "step": 78030 }, { "epoch": 5.302011142818317, "grad_norm": 1.626105546951294, "learning_rate": 0.0003374949041989401, "loss": 3.4729, "step": 78035 }, { "epoch": 5.3023508628889795, "grad_norm": 1.2325104475021362, "learning_rate": 0.00033745243919010737, "loss": 3.4093, "step": 78040 }, { "epoch": 5.302690582959642, "grad_norm": 0.9552026391029358, "learning_rate": 0.0003374099741812746, "loss": 3.402, "step": 78045 }, { "epoch": 5.303030303030303, "grad_norm": 1.7612649202346802, "learning_rate": 0.00033736750917244193, "loss": 3.1698, "step": 78050 }, { "epoch": 5.303370023100965, "grad_norm": 1.5624852180480957, "learning_rate": 0.0003373250441636092, "loss": 3.4397, "step": 78055 }, { "epoch": 5.303709743171627, "grad_norm": 1.2124720811843872, "learning_rate": 0.00033728257915477644, "loss": 3.3007, "step": 78060 }, { "epoch": 5.304049463242288, "grad_norm": 1.3912559747695923, "learning_rate": 0.0003372401141459437, "loss": 3.5442, "step": 78065 }, { "epoch": 5.30438918331295, "grad_norm": 1.34218430519104, "learning_rate": 0.00033719764913711105, "loss": 3.2477, "step": 78070 }, { "epoch": 5.304728903383612, "grad_norm": 1.6306846141815186, "learning_rate": 0.0003371551841282783, "loss": 3.1403, "step": 78075 }, { "epoch": 5.305068623454273, "grad_norm": 1.487286925315857, "learning_rate": 0.00033711271911944556, "loss": 3.4703, "step": 78080 }, { "epoch": 5.3054083435249355, "grad_norm": 1.240784764289856, "learning_rate": 0.0003370702541106129, "loss": 3.1912, "step": 78085 }, { "epoch": 5.305748063595598, "grad_norm": 1.4878710508346558, "learning_rate": 0.0003370277891017801, "loss": 3.3758, "step": 78090 }, { "epoch": 5.306087783666259, "grad_norm": 1.2879090309143066, "learning_rate": 0.0003369853240929474, "loss": 3.3328, "step": 78095 }, { "epoch": 5.306427503736921, "grad_norm": 1.2778265476226807, "learning_rate": 0.00033694285908411473, "loss": 3.126, "step": 78100 }, { "epoch": 5.306767223807583, "grad_norm": 1.142805814743042, "learning_rate": 0.000336900394075282, "loss": 3.4685, "step": 78105 }, { "epoch": 5.307106943878244, "grad_norm": 1.612791657447815, "learning_rate": 0.00033685792906644924, "loss": 3.7206, "step": 78110 }, { "epoch": 5.307446663948906, "grad_norm": 1.1234370470046997, "learning_rate": 0.0003368154640576165, "loss": 3.4555, "step": 78115 }, { "epoch": 5.307786384019568, "grad_norm": 1.0352420806884766, "learning_rate": 0.00033677299904878385, "loss": 3.1681, "step": 78120 }, { "epoch": 5.3081261040902294, "grad_norm": 1.112146258354187, "learning_rate": 0.0003367305340399511, "loss": 3.1386, "step": 78125 }, { "epoch": 5.3084658241608915, "grad_norm": 1.3903610706329346, "learning_rate": 0.00033668806903111836, "loss": 3.4658, "step": 78130 }, { "epoch": 5.308805544231554, "grad_norm": 1.0771167278289795, "learning_rate": 0.0003366456040222857, "loss": 3.1681, "step": 78135 }, { "epoch": 5.309145264302215, "grad_norm": 1.0907037258148193, "learning_rate": 0.0003366031390134529, "loss": 3.2175, "step": 78140 }, { "epoch": 5.309484984372877, "grad_norm": 1.4778424501419067, "learning_rate": 0.0003365606740046202, "loss": 3.088, "step": 78145 }, { "epoch": 5.309824704443539, "grad_norm": 1.2486037015914917, "learning_rate": 0.0003365182089957875, "loss": 3.4459, "step": 78150 }, { "epoch": 5.3101644245142, "grad_norm": 1.1535438299179077, "learning_rate": 0.00033647574398695476, "loss": 3.4738, "step": 78155 }, { "epoch": 5.310504144584862, "grad_norm": 1.3399616479873657, "learning_rate": 0.00033643327897812204, "loss": 3.4239, "step": 78160 }, { "epoch": 5.310843864655524, "grad_norm": 1.0907036066055298, "learning_rate": 0.0003363908139692893, "loss": 3.7096, "step": 78165 }, { "epoch": 5.3111835847261855, "grad_norm": 1.0812755823135376, "learning_rate": 0.0003363483489604566, "loss": 3.28, "step": 78170 }, { "epoch": 5.3115233047968475, "grad_norm": 1.4028637409210205, "learning_rate": 0.0003363058839516239, "loss": 3.4841, "step": 78175 }, { "epoch": 5.31186302486751, "grad_norm": 1.9818763732910156, "learning_rate": 0.00033626341894279116, "loss": 3.7435, "step": 78180 }, { "epoch": 5.312202744938171, "grad_norm": 1.230970859527588, "learning_rate": 0.0003362209539339584, "loss": 3.0266, "step": 78185 }, { "epoch": 5.312542465008833, "grad_norm": 1.2558287382125854, "learning_rate": 0.0003361784889251257, "loss": 3.045, "step": 78190 }, { "epoch": 5.312882185079495, "grad_norm": 1.1385436058044434, "learning_rate": 0.000336136023916293, "loss": 3.2232, "step": 78195 }, { "epoch": 5.313221905150156, "grad_norm": 1.1762263774871826, "learning_rate": 0.0003360935589074602, "loss": 3.3638, "step": 78200 }, { "epoch": 5.313561625220818, "grad_norm": 2.0133261680603027, "learning_rate": 0.00033605109389862756, "loss": 3.6017, "step": 78205 }, { "epoch": 5.31390134529148, "grad_norm": 1.373490571975708, "learning_rate": 0.00033600862888979484, "loss": 3.4997, "step": 78210 }, { "epoch": 5.3142410653621415, "grad_norm": 1.8161826133728027, "learning_rate": 0.00033596616388096207, "loss": 3.0641, "step": 78215 }, { "epoch": 5.3145807854328035, "grad_norm": 1.1360419988632202, "learning_rate": 0.00033592369887212935, "loss": 3.3536, "step": 78220 }, { "epoch": 5.314920505503465, "grad_norm": 1.237380027770996, "learning_rate": 0.0003358812338632967, "loss": 3.5072, "step": 78225 }, { "epoch": 5.315260225574127, "grad_norm": 1.5952563285827637, "learning_rate": 0.0003358387688544639, "loss": 3.3061, "step": 78230 }, { "epoch": 5.315599945644789, "grad_norm": 1.1516368389129639, "learning_rate": 0.0003357963038456312, "loss": 3.1287, "step": 78235 }, { "epoch": 5.31593966571545, "grad_norm": 1.1352694034576416, "learning_rate": 0.0003357538388367985, "loss": 3.415, "step": 78240 }, { "epoch": 5.316279385786112, "grad_norm": 1.2830572128295898, "learning_rate": 0.00033571137382796575, "loss": 3.3041, "step": 78245 }, { "epoch": 5.316619105856774, "grad_norm": 1.5622073411941528, "learning_rate": 0.000335668908819133, "loss": 3.1796, "step": 78250 }, { "epoch": 5.316958825927435, "grad_norm": 1.401415467262268, "learning_rate": 0.0003356264438103003, "loss": 3.4056, "step": 78255 }, { "epoch": 5.3172985459980975, "grad_norm": 1.205122709274292, "learning_rate": 0.0003355839788014676, "loss": 3.4429, "step": 78260 }, { "epoch": 5.31763826606876, "grad_norm": 1.2491841316223145, "learning_rate": 0.00033554151379263487, "loss": 3.1601, "step": 78265 }, { "epoch": 5.317977986139421, "grad_norm": 1.1854430437088013, "learning_rate": 0.00033549904878380215, "loss": 3.3663, "step": 78270 }, { "epoch": 5.318317706210083, "grad_norm": 1.3048378229141235, "learning_rate": 0.0003354565837749695, "loss": 3.2797, "step": 78275 }, { "epoch": 5.318657426280745, "grad_norm": 1.31524658203125, "learning_rate": 0.0003354141187661367, "loss": 2.9685, "step": 78280 }, { "epoch": 5.318997146351406, "grad_norm": 1.5868388414382935, "learning_rate": 0.000335371653757304, "loss": 3.2321, "step": 78285 }, { "epoch": 5.319336866422068, "grad_norm": 1.1358301639556885, "learning_rate": 0.00033532918874847127, "loss": 3.3228, "step": 78290 }, { "epoch": 5.31967658649273, "grad_norm": 1.4781080484390259, "learning_rate": 0.00033528672373963855, "loss": 3.4095, "step": 78295 }, { "epoch": 5.320016306563391, "grad_norm": 1.2445346117019653, "learning_rate": 0.00033524425873080583, "loss": 3.1875, "step": 78300 }, { "epoch": 5.3203560266340535, "grad_norm": 1.281026840209961, "learning_rate": 0.0003352017937219731, "loss": 3.3173, "step": 78305 }, { "epoch": 5.320695746704716, "grad_norm": 1.2970389127731323, "learning_rate": 0.0003351593287131404, "loss": 3.1704, "step": 78310 }, { "epoch": 5.321035466775377, "grad_norm": 1.305619239807129, "learning_rate": 0.00033511686370430767, "loss": 3.4585, "step": 78315 }, { "epoch": 5.321375186846039, "grad_norm": 1.2414119243621826, "learning_rate": 0.00033507439869547495, "loss": 3.129, "step": 78320 }, { "epoch": 5.321714906916701, "grad_norm": 1.3439279794692993, "learning_rate": 0.0003350319336866422, "loss": 3.2912, "step": 78325 }, { "epoch": 5.322054626987362, "grad_norm": 1.236528992652893, "learning_rate": 0.0003349894686778095, "loss": 3.1267, "step": 78330 }, { "epoch": 5.322394347058024, "grad_norm": 1.2856924533843994, "learning_rate": 0.0003349470036689768, "loss": 3.3441, "step": 78335 }, { "epoch": 5.322734067128686, "grad_norm": 1.2119526863098145, "learning_rate": 0.000334904538660144, "loss": 3.4635, "step": 78340 }, { "epoch": 5.323073787199347, "grad_norm": 1.4485899209976196, "learning_rate": 0.00033486207365131135, "loss": 3.4495, "step": 78345 }, { "epoch": 5.3234135072700095, "grad_norm": 1.0821880102157593, "learning_rate": 0.00033481960864247863, "loss": 3.2886, "step": 78350 }, { "epoch": 5.323753227340672, "grad_norm": 1.439361333847046, "learning_rate": 0.00033477714363364585, "loss": 3.3968, "step": 78355 }, { "epoch": 5.324092947411333, "grad_norm": 1.2560222148895264, "learning_rate": 0.00033473467862481313, "loss": 3.4258, "step": 78360 }, { "epoch": 5.324432667481995, "grad_norm": 1.529812216758728, "learning_rate": 0.00033469221361598047, "loss": 3.3226, "step": 78365 }, { "epoch": 5.324772387552657, "grad_norm": 1.174808382987976, "learning_rate": 0.0003346497486071477, "loss": 3.3366, "step": 78370 }, { "epoch": 5.325112107623318, "grad_norm": 1.1597875356674194, "learning_rate": 0.000334607283598315, "loss": 3.2024, "step": 78375 }, { "epoch": 5.32545182769398, "grad_norm": 1.4943922758102417, "learning_rate": 0.0003345648185894823, "loss": 3.4531, "step": 78380 }, { "epoch": 5.325791547764642, "grad_norm": 1.3397789001464844, "learning_rate": 0.00033452235358064953, "loss": 3.3804, "step": 78385 }, { "epoch": 5.326131267835303, "grad_norm": 1.17921781539917, "learning_rate": 0.0003344798885718168, "loss": 3.2129, "step": 78390 }, { "epoch": 5.3264709879059655, "grad_norm": 1.4187359809875488, "learning_rate": 0.00033443742356298415, "loss": 3.3691, "step": 78395 }, { "epoch": 5.326810707976628, "grad_norm": 1.7109862565994263, "learning_rate": 0.0003343949585541514, "loss": 3.2293, "step": 78400 }, { "epoch": 5.327150428047289, "grad_norm": 1.2818585634231567, "learning_rate": 0.00033435249354531865, "loss": 3.2563, "step": 78405 }, { "epoch": 5.327490148117951, "grad_norm": 1.0834717750549316, "learning_rate": 0.00033431002853648593, "loss": 3.287, "step": 78410 }, { "epoch": 5.327829868188613, "grad_norm": 1.7381904125213623, "learning_rate": 0.0003342675635276532, "loss": 3.2598, "step": 78415 }, { "epoch": 5.328169588259274, "grad_norm": 1.59710693359375, "learning_rate": 0.0003342250985188205, "loss": 3.0064, "step": 78420 }, { "epoch": 5.328509308329936, "grad_norm": 1.3929604291915894, "learning_rate": 0.0003341826335099878, "loss": 3.2252, "step": 78425 }, { "epoch": 5.328849028400598, "grad_norm": 1.1577495336532593, "learning_rate": 0.00033414016850115505, "loss": 3.2253, "step": 78430 }, { "epoch": 5.3291887484712595, "grad_norm": 1.2451642751693726, "learning_rate": 0.00033409770349232234, "loss": 3.1191, "step": 78435 }, { "epoch": 5.3295284685419215, "grad_norm": 1.5661088228225708, "learning_rate": 0.0003340552384834896, "loss": 3.5171, "step": 78440 }, { "epoch": 5.329868188612584, "grad_norm": 1.1927828788757324, "learning_rate": 0.0003340127734746569, "loss": 3.227, "step": 78445 }, { "epoch": 5.330207908683245, "grad_norm": 1.4019824266433716, "learning_rate": 0.0003339703084658242, "loss": 3.3326, "step": 78450 }, { "epoch": 5.330547628753907, "grad_norm": 1.2323046922683716, "learning_rate": 0.00033392784345699146, "loss": 3.5113, "step": 78455 }, { "epoch": 5.330887348824569, "grad_norm": 1.2670155763626099, "learning_rate": 0.00033388537844815874, "loss": 3.5318, "step": 78460 }, { "epoch": 5.33122706889523, "grad_norm": 1.2993744611740112, "learning_rate": 0.000333842913439326, "loss": 3.5083, "step": 78465 }, { "epoch": 5.331566788965892, "grad_norm": 1.213822603225708, "learning_rate": 0.0003338004484304933, "loss": 3.1266, "step": 78470 }, { "epoch": 5.331906509036554, "grad_norm": 1.3108371496200562, "learning_rate": 0.0003337579834216606, "loss": 3.2523, "step": 78475 }, { "epoch": 5.3322462291072155, "grad_norm": 1.1273857355117798, "learning_rate": 0.0003337155184128278, "loss": 3.4906, "step": 78480 }, { "epoch": 5.3325859491778775, "grad_norm": 1.678335189819336, "learning_rate": 0.00033367305340399514, "loss": 3.323, "step": 78485 }, { "epoch": 5.33292566924854, "grad_norm": 1.5252732038497925, "learning_rate": 0.0003336305883951624, "loss": 3.3411, "step": 78490 }, { "epoch": 5.333265389319201, "grad_norm": 1.3213074207305908, "learning_rate": 0.00033358812338632964, "loss": 3.1228, "step": 78495 }, { "epoch": 5.333605109389863, "grad_norm": 1.2334355115890503, "learning_rate": 0.000333545658377497, "loss": 3.1961, "step": 78500 }, { "epoch": 5.333944829460524, "grad_norm": 1.1524741649627686, "learning_rate": 0.00033350319336866426, "loss": 3.5491, "step": 78505 }, { "epoch": 5.334284549531186, "grad_norm": 1.3577600717544556, "learning_rate": 0.0003334607283598315, "loss": 3.1513, "step": 78510 }, { "epoch": 5.334624269601848, "grad_norm": 1.3996444940567017, "learning_rate": 0.00033341826335099876, "loss": 3.1349, "step": 78515 }, { "epoch": 5.334963989672509, "grad_norm": 1.1564885377883911, "learning_rate": 0.0003333757983421661, "loss": 3.4536, "step": 78520 }, { "epoch": 5.3353037097431715, "grad_norm": 1.0695300102233887, "learning_rate": 0.0003333333333333333, "loss": 3.6817, "step": 78525 }, { "epoch": 5.3356434298138335, "grad_norm": 1.1322247982025146, "learning_rate": 0.0003332908683245006, "loss": 3.4391, "step": 78530 }, { "epoch": 5.335983149884495, "grad_norm": 1.1519473791122437, "learning_rate": 0.00033324840331566794, "loss": 3.4445, "step": 78535 }, { "epoch": 5.336322869955157, "grad_norm": 1.282899022102356, "learning_rate": 0.00033320593830683516, "loss": 3.0011, "step": 78540 }, { "epoch": 5.336662590025819, "grad_norm": 1.1874845027923584, "learning_rate": 0.00033316347329800244, "loss": 3.2509, "step": 78545 }, { "epoch": 5.33700231009648, "grad_norm": 1.1690049171447754, "learning_rate": 0.0003331210082891697, "loss": 3.0908, "step": 78550 }, { "epoch": 5.337342030167142, "grad_norm": 1.0996838808059692, "learning_rate": 0.000333078543280337, "loss": 3.2535, "step": 78555 }, { "epoch": 5.337681750237804, "grad_norm": 1.2006475925445557, "learning_rate": 0.0003330360782715043, "loss": 3.5357, "step": 78560 }, { "epoch": 5.338021470308465, "grad_norm": 1.417529821395874, "learning_rate": 0.00033299361326267156, "loss": 3.0943, "step": 78565 }, { "epoch": 5.3383611903791275, "grad_norm": 1.3367176055908203, "learning_rate": 0.00033295114825383884, "loss": 2.999, "step": 78570 }, { "epoch": 5.33870091044979, "grad_norm": 1.3676787614822388, "learning_rate": 0.0003329086832450061, "loss": 2.9944, "step": 78575 }, { "epoch": 5.339040630520451, "grad_norm": 1.3034327030181885, "learning_rate": 0.0003328662182361734, "loss": 3.2616, "step": 78580 }, { "epoch": 5.339380350591113, "grad_norm": 1.6876492500305176, "learning_rate": 0.00033282375322734063, "loss": 2.9734, "step": 78585 }, { "epoch": 5.339720070661775, "grad_norm": 1.5573909282684326, "learning_rate": 0.00033278128821850796, "loss": 3.1623, "step": 78590 }, { "epoch": 5.340059790732436, "grad_norm": 1.431626796722412, "learning_rate": 0.00033273882320967524, "loss": 3.4161, "step": 78595 }, { "epoch": 5.340399510803098, "grad_norm": 1.3788461685180664, "learning_rate": 0.00033269635820084247, "loss": 3.3945, "step": 78600 }, { "epoch": 5.34073923087376, "grad_norm": 1.3109595775604248, "learning_rate": 0.0003326538931920098, "loss": 3.2716, "step": 78605 }, { "epoch": 5.341078950944421, "grad_norm": 1.477562427520752, "learning_rate": 0.0003326114281831771, "loss": 3.2755, "step": 78610 }, { "epoch": 5.3414186710150835, "grad_norm": 1.4710614681243896, "learning_rate": 0.00033256896317434436, "loss": 3.2871, "step": 78615 }, { "epoch": 5.341758391085746, "grad_norm": 1.4207918643951416, "learning_rate": 0.0003325264981655116, "loss": 3.4721, "step": 78620 }, { "epoch": 5.342098111156407, "grad_norm": 1.7541697025299072, "learning_rate": 0.0003324840331566789, "loss": 3.2172, "step": 78625 }, { "epoch": 5.342437831227069, "grad_norm": 1.3132632970809937, "learning_rate": 0.0003324415681478462, "loss": 3.6312, "step": 78630 }, { "epoch": 5.342777551297731, "grad_norm": 1.3735706806182861, "learning_rate": 0.00033239910313901343, "loss": 3.4436, "step": 78635 }, { "epoch": 5.343117271368392, "grad_norm": 1.1200306415557861, "learning_rate": 0.00033235663813018076, "loss": 3.0635, "step": 78640 }, { "epoch": 5.343456991439054, "grad_norm": 1.5390589237213135, "learning_rate": 0.00033231417312134804, "loss": 3.4314, "step": 78645 }, { "epoch": 5.343796711509716, "grad_norm": 1.3904105424880981, "learning_rate": 0.00033227170811251527, "loss": 3.1229, "step": 78650 }, { "epoch": 5.344136431580377, "grad_norm": 1.297414779663086, "learning_rate": 0.00033222924310368255, "loss": 3.2546, "step": 78655 }, { "epoch": 5.3444761516510395, "grad_norm": 1.371975064277649, "learning_rate": 0.0003321867780948499, "loss": 3.5802, "step": 78660 }, { "epoch": 5.344815871721702, "grad_norm": 1.4774730205535889, "learning_rate": 0.0003321443130860171, "loss": 3.3452, "step": 78665 }, { "epoch": 5.345155591792363, "grad_norm": 1.3950344324111938, "learning_rate": 0.0003321018480771844, "loss": 3.3555, "step": 78670 }, { "epoch": 5.345495311863025, "grad_norm": 1.35014009475708, "learning_rate": 0.0003320593830683517, "loss": 3.2088, "step": 78675 }, { "epoch": 5.345835031933687, "grad_norm": 1.6751480102539062, "learning_rate": 0.00033201691805951895, "loss": 3.2152, "step": 78680 }, { "epoch": 5.346174752004348, "grad_norm": 1.1551661491394043, "learning_rate": 0.00033197445305068623, "loss": 3.3155, "step": 78685 }, { "epoch": 5.34651447207501, "grad_norm": 1.0417512655258179, "learning_rate": 0.00033193198804185356, "loss": 3.5322, "step": 78690 }, { "epoch": 5.346854192145672, "grad_norm": 1.1975915431976318, "learning_rate": 0.0003318895230330208, "loss": 3.5443, "step": 78695 }, { "epoch": 5.3471939122163334, "grad_norm": 1.3049616813659668, "learning_rate": 0.00033184705802418807, "loss": 3.4662, "step": 78700 }, { "epoch": 5.3475336322869955, "grad_norm": 1.1163090467453003, "learning_rate": 0.00033180459301535535, "loss": 3.3859, "step": 78705 }, { "epoch": 5.347873352357658, "grad_norm": 0.9921570420265198, "learning_rate": 0.00033176212800652263, "loss": 2.9419, "step": 78710 }, { "epoch": 5.348213072428319, "grad_norm": 1.0273956060409546, "learning_rate": 0.0003317196629976899, "loss": 3.1836, "step": 78715 }, { "epoch": 5.348552792498981, "grad_norm": 1.4911655187606812, "learning_rate": 0.0003316771979888572, "loss": 3.249, "step": 78720 }, { "epoch": 5.348892512569643, "grad_norm": 1.0864390134811401, "learning_rate": 0.00033163473298002447, "loss": 3.4787, "step": 78725 }, { "epoch": 5.349232232640304, "grad_norm": 1.415465235710144, "learning_rate": 0.00033159226797119175, "loss": 3.4655, "step": 78730 }, { "epoch": 5.349571952710966, "grad_norm": 1.3818591833114624, "learning_rate": 0.00033154980296235903, "loss": 3.274, "step": 78735 }, { "epoch": 5.349911672781628, "grad_norm": 1.2052631378173828, "learning_rate": 0.00033150733795352626, "loss": 3.4016, "step": 78740 }, { "epoch": 5.3502513928522895, "grad_norm": 1.5378036499023438, "learning_rate": 0.0003314648729446936, "loss": 3.2027, "step": 78745 }, { "epoch": 5.3505911129229515, "grad_norm": 1.4458260536193848, "learning_rate": 0.00033142240793586087, "loss": 3.3191, "step": 78750 }, { "epoch": 5.350930832993614, "grad_norm": 1.1401846408843994, "learning_rate": 0.0003313799429270281, "loss": 3.3702, "step": 78755 }, { "epoch": 5.351270553064275, "grad_norm": 1.112899899482727, "learning_rate": 0.00033133747791819543, "loss": 3.2877, "step": 78760 }, { "epoch": 5.351610273134937, "grad_norm": 1.5205464363098145, "learning_rate": 0.0003312950129093627, "loss": 3.5575, "step": 78765 }, { "epoch": 5.351949993205599, "grad_norm": 1.2706520557403564, "learning_rate": 0.00033125254790052994, "loss": 3.1302, "step": 78770 }, { "epoch": 5.35228971327626, "grad_norm": 1.142291784286499, "learning_rate": 0.0003312100828916972, "loss": 3.1843, "step": 78775 }, { "epoch": 5.352629433346922, "grad_norm": 1.312448263168335, "learning_rate": 0.00033116761788286455, "loss": 3.3731, "step": 78780 }, { "epoch": 5.352969153417584, "grad_norm": 1.5620882511138916, "learning_rate": 0.00033112515287403183, "loss": 3.1056, "step": 78785 }, { "epoch": 5.3533088734882455, "grad_norm": 1.1059499979019165, "learning_rate": 0.00033108268786519906, "loss": 3.3956, "step": 78790 }, { "epoch": 5.3536485935589075, "grad_norm": 1.2764275074005127, "learning_rate": 0.0003310402228563664, "loss": 3.1781, "step": 78795 }, { "epoch": 5.35398831362957, "grad_norm": 1.2305145263671875, "learning_rate": 0.00033099775784753367, "loss": 3.0396, "step": 78800 }, { "epoch": 5.354328033700231, "grad_norm": 1.3664946556091309, "learning_rate": 0.0003309552928387009, "loss": 3.3003, "step": 78805 }, { "epoch": 5.354667753770893, "grad_norm": 1.568671464920044, "learning_rate": 0.0003309128278298682, "loss": 3.2448, "step": 78810 }, { "epoch": 5.355007473841555, "grad_norm": 1.1440547704696655, "learning_rate": 0.0003308703628210355, "loss": 3.224, "step": 78815 }, { "epoch": 5.355347193912216, "grad_norm": 1.2858022451400757, "learning_rate": 0.00033082789781220274, "loss": 3.4255, "step": 78820 }, { "epoch": 5.355686913982878, "grad_norm": 1.2971869707107544, "learning_rate": 0.00033078543280337, "loss": 3.3907, "step": 78825 }, { "epoch": 5.35602663405354, "grad_norm": 1.224108099937439, "learning_rate": 0.00033074296779453735, "loss": 3.2738, "step": 78830 }, { "epoch": 5.3563663541242015, "grad_norm": 1.410037875175476, "learning_rate": 0.0003307005027857046, "loss": 3.3604, "step": 78835 }, { "epoch": 5.3567060741948636, "grad_norm": 1.359977126121521, "learning_rate": 0.00033065803777687186, "loss": 3.357, "step": 78840 }, { "epoch": 5.357045794265526, "grad_norm": 1.3335473537445068, "learning_rate": 0.00033061557276803914, "loss": 3.32, "step": 78845 }, { "epoch": 5.357385514336187, "grad_norm": 1.11476469039917, "learning_rate": 0.0003305731077592064, "loss": 3.2584, "step": 78850 }, { "epoch": 5.357725234406849, "grad_norm": 1.3362540006637573, "learning_rate": 0.0003305306427503737, "loss": 3.4273, "step": 78855 }, { "epoch": 5.358064954477511, "grad_norm": 1.5009100437164307, "learning_rate": 0.000330488177741541, "loss": 3.112, "step": 78860 }, { "epoch": 5.358404674548172, "grad_norm": 1.1882202625274658, "learning_rate": 0.00033044571273270826, "loss": 3.3461, "step": 78865 }, { "epoch": 5.358744394618834, "grad_norm": 1.1621510982513428, "learning_rate": 0.00033040324772387554, "loss": 3.2886, "step": 78870 }, { "epoch": 5.359084114689496, "grad_norm": 1.2281653881072998, "learning_rate": 0.0003303607827150428, "loss": 3.3788, "step": 78875 }, { "epoch": 5.3594238347601575, "grad_norm": 1.205693006515503, "learning_rate": 0.00033031831770621005, "loss": 3.4701, "step": 78880 }, { "epoch": 5.35976355483082, "grad_norm": 1.0778194665908813, "learning_rate": 0.0003302758526973774, "loss": 3.3802, "step": 78885 }, { "epoch": 5.360103274901482, "grad_norm": 1.2304543256759644, "learning_rate": 0.00033023338768854466, "loss": 3.6167, "step": 78890 }, { "epoch": 5.360442994972143, "grad_norm": 1.1387187242507935, "learning_rate": 0.0003301909226797119, "loss": 3.3035, "step": 78895 }, { "epoch": 5.360782715042805, "grad_norm": 1.1552448272705078, "learning_rate": 0.0003301484576708792, "loss": 3.223, "step": 78900 }, { "epoch": 5.361122435113466, "grad_norm": 1.3998953104019165, "learning_rate": 0.0003301059926620465, "loss": 3.3616, "step": 78905 }, { "epoch": 5.361462155184128, "grad_norm": 1.324535846710205, "learning_rate": 0.0003300635276532137, "loss": 3.1566, "step": 78910 }, { "epoch": 5.36180187525479, "grad_norm": 1.5692392587661743, "learning_rate": 0.000330021062644381, "loss": 3.3431, "step": 78915 }, { "epoch": 5.362141595325451, "grad_norm": 1.3238626718521118, "learning_rate": 0.00032997859763554834, "loss": 3.4751, "step": 78920 }, { "epoch": 5.3624813153961135, "grad_norm": 1.3535258769989014, "learning_rate": 0.00032993613262671557, "loss": 3.3552, "step": 78925 }, { "epoch": 5.362821035466776, "grad_norm": 1.1050620079040527, "learning_rate": 0.00032989366761788285, "loss": 3.2714, "step": 78930 }, { "epoch": 5.363160755537437, "grad_norm": 1.4805259704589844, "learning_rate": 0.0003298512026090502, "loss": 3.4478, "step": 78935 }, { "epoch": 5.363500475608099, "grad_norm": 1.547469973564148, "learning_rate": 0.0003298087376002174, "loss": 3.2742, "step": 78940 }, { "epoch": 5.363840195678761, "grad_norm": 1.4815680980682373, "learning_rate": 0.0003297662725913847, "loss": 3.234, "step": 78945 }, { "epoch": 5.364179915749422, "grad_norm": 1.1822190284729004, "learning_rate": 0.000329723807582552, "loss": 3.3819, "step": 78950 }, { "epoch": 5.364519635820084, "grad_norm": 1.5970275402069092, "learning_rate": 0.0003296813425737193, "loss": 3.3559, "step": 78955 }, { "epoch": 5.364859355890746, "grad_norm": 1.2600858211517334, "learning_rate": 0.0003296388775648865, "loss": 3.0878, "step": 78960 }, { "epoch": 5.365199075961407, "grad_norm": 1.4497398138046265, "learning_rate": 0.0003295964125560538, "loss": 3.2357, "step": 78965 }, { "epoch": 5.3655387960320695, "grad_norm": 1.254349708557129, "learning_rate": 0.00032955394754722114, "loss": 3.3038, "step": 78970 }, { "epoch": 5.365878516102732, "grad_norm": 1.0591795444488525, "learning_rate": 0.00032951148253838837, "loss": 3.5753, "step": 78975 }, { "epoch": 5.366218236173393, "grad_norm": 1.1571662425994873, "learning_rate": 0.00032946901752955565, "loss": 3.169, "step": 78980 }, { "epoch": 5.366557956244055, "grad_norm": 1.352041482925415, "learning_rate": 0.000329426552520723, "loss": 3.1678, "step": 78985 }, { "epoch": 5.366897676314717, "grad_norm": 1.1446959972381592, "learning_rate": 0.0003293840875118902, "loss": 3.0876, "step": 78990 }, { "epoch": 5.367237396385378, "grad_norm": 1.2290507555007935, "learning_rate": 0.0003293416225030575, "loss": 3.2375, "step": 78995 }, { "epoch": 5.36757711645604, "grad_norm": 1.5202126502990723, "learning_rate": 0.00032929915749422477, "loss": 3.2462, "step": 79000 }, { "epoch": 5.367916836526702, "grad_norm": 1.5476551055908203, "learning_rate": 0.00032925669248539205, "loss": 3.2538, "step": 79005 }, { "epoch": 5.3682565565973634, "grad_norm": 1.1799266338348389, "learning_rate": 0.0003292142274765593, "loss": 3.5095, "step": 79010 }, { "epoch": 5.3685962766680255, "grad_norm": 1.601030707359314, "learning_rate": 0.0003291717624677266, "loss": 3.2721, "step": 79015 }, { "epoch": 5.368935996738688, "grad_norm": 1.6340149641036987, "learning_rate": 0.0003291292974588939, "loss": 3.3678, "step": 79020 }, { "epoch": 5.369275716809349, "grad_norm": 1.1900548934936523, "learning_rate": 0.00032908683245006117, "loss": 3.2288, "step": 79025 }, { "epoch": 5.369615436880011, "grad_norm": 1.4324140548706055, "learning_rate": 0.00032904436744122845, "loss": 3.4257, "step": 79030 }, { "epoch": 5.369955156950673, "grad_norm": 1.2502388954162598, "learning_rate": 0.0003290019024323957, "loss": 3.4108, "step": 79035 }, { "epoch": 5.370294877021334, "grad_norm": 1.5052589178085327, "learning_rate": 0.000328959437423563, "loss": 3.4608, "step": 79040 }, { "epoch": 5.370634597091996, "grad_norm": 1.2887874841690063, "learning_rate": 0.0003289169724147303, "loss": 3.3153, "step": 79045 }, { "epoch": 5.370974317162658, "grad_norm": 1.4166297912597656, "learning_rate": 0.0003288745074058975, "loss": 3.1532, "step": 79050 }, { "epoch": 5.3713140372333195, "grad_norm": 1.4340871572494507, "learning_rate": 0.00032883204239706485, "loss": 3.3405, "step": 79055 }, { "epoch": 5.3716537573039815, "grad_norm": 1.1237084865570068, "learning_rate": 0.00032878957738823213, "loss": 3.2496, "step": 79060 }, { "epoch": 5.371993477374644, "grad_norm": 1.2787796258926392, "learning_rate": 0.00032874711237939935, "loss": 3.4098, "step": 79065 }, { "epoch": 5.372333197445305, "grad_norm": 1.4032303094863892, "learning_rate": 0.00032870464737056663, "loss": 3.2101, "step": 79070 }, { "epoch": 5.372672917515967, "grad_norm": 1.0041978359222412, "learning_rate": 0.00032866218236173397, "loss": 3.2686, "step": 79075 }, { "epoch": 5.373012637586629, "grad_norm": 1.0795390605926514, "learning_rate": 0.0003286197173529012, "loss": 3.2367, "step": 79080 }, { "epoch": 5.37335235765729, "grad_norm": 1.2084274291992188, "learning_rate": 0.0003285772523440685, "loss": 3.2556, "step": 79085 }, { "epoch": 5.373692077727952, "grad_norm": 1.4809757471084595, "learning_rate": 0.0003285347873352358, "loss": 3.2285, "step": 79090 }, { "epoch": 5.374031797798614, "grad_norm": 1.167805790901184, "learning_rate": 0.00032849232232640303, "loss": 3.1114, "step": 79095 }, { "epoch": 5.3743715178692755, "grad_norm": 1.1549429893493652, "learning_rate": 0.0003284498573175703, "loss": 3.1018, "step": 79100 }, { "epoch": 5.3747112379399375, "grad_norm": 1.2793583869934082, "learning_rate": 0.0003284073923087376, "loss": 3.2584, "step": 79105 }, { "epoch": 5.3750509580106, "grad_norm": 1.184059739112854, "learning_rate": 0.0003283649272999049, "loss": 3.2484, "step": 79110 }, { "epoch": 5.375390678081261, "grad_norm": 1.334389090538025, "learning_rate": 0.00032832246229107215, "loss": 3.1487, "step": 79115 }, { "epoch": 5.375730398151923, "grad_norm": 1.5574349164962769, "learning_rate": 0.00032827999728223943, "loss": 3.3881, "step": 79120 }, { "epoch": 5.376070118222585, "grad_norm": 1.2073614597320557, "learning_rate": 0.00032823753227340677, "loss": 3.272, "step": 79125 }, { "epoch": 5.376409838293246, "grad_norm": 1.2751357555389404, "learning_rate": 0.000328195067264574, "loss": 3.4071, "step": 79130 }, { "epoch": 5.376749558363908, "grad_norm": 1.4635334014892578, "learning_rate": 0.0003281526022557413, "loss": 3.2826, "step": 79135 }, { "epoch": 5.37708927843457, "grad_norm": 1.3119572401046753, "learning_rate": 0.00032811013724690855, "loss": 3.3223, "step": 79140 }, { "epoch": 5.3774289985052315, "grad_norm": 1.390650987625122, "learning_rate": 0.00032806767223807583, "loss": 3.517, "step": 79145 }, { "epoch": 5.377768718575894, "grad_norm": 1.4341509342193604, "learning_rate": 0.0003280252072292431, "loss": 3.2791, "step": 79150 }, { "epoch": 5.378108438646556, "grad_norm": 1.2661869525909424, "learning_rate": 0.0003279827422204104, "loss": 3.3013, "step": 79155 }, { "epoch": 5.378448158717217, "grad_norm": 1.1314949989318848, "learning_rate": 0.0003279402772115777, "loss": 3.2913, "step": 79160 }, { "epoch": 5.378787878787879, "grad_norm": 1.325214147567749, "learning_rate": 0.00032789781220274496, "loss": 3.2737, "step": 79165 }, { "epoch": 5.379127598858541, "grad_norm": 1.1005421876907349, "learning_rate": 0.00032785534719391224, "loss": 3.5177, "step": 79170 }, { "epoch": 5.379467318929202, "grad_norm": 1.4351587295532227, "learning_rate": 0.00032781288218507946, "loss": 3.1035, "step": 79175 }, { "epoch": 5.379807038999864, "grad_norm": 1.1771129369735718, "learning_rate": 0.0003277704171762468, "loss": 3.4687, "step": 79180 }, { "epoch": 5.380146759070525, "grad_norm": 1.267701506614685, "learning_rate": 0.0003277279521674141, "loss": 3.2447, "step": 79185 }, { "epoch": 5.3804864791411875, "grad_norm": 1.080335021018982, "learning_rate": 0.0003276854871585813, "loss": 3.2958, "step": 79190 }, { "epoch": 5.38082619921185, "grad_norm": 1.2345532178878784, "learning_rate": 0.00032764302214974864, "loss": 3.358, "step": 79195 }, { "epoch": 5.381165919282511, "grad_norm": 1.4466882944107056, "learning_rate": 0.0003276005571409159, "loss": 3.4259, "step": 79200 }, { "epoch": 5.381505639353173, "grad_norm": 1.133174180984497, "learning_rate": 0.00032755809213208314, "loss": 3.2657, "step": 79205 }, { "epoch": 5.381845359423835, "grad_norm": 1.2316970825195312, "learning_rate": 0.0003275156271232504, "loss": 3.2934, "step": 79210 }, { "epoch": 5.382185079494496, "grad_norm": 1.3877537250518799, "learning_rate": 0.00032747316211441776, "loss": 3.288, "step": 79215 }, { "epoch": 5.382524799565158, "grad_norm": 1.0376207828521729, "learning_rate": 0.000327430697105585, "loss": 3.2797, "step": 79220 }, { "epoch": 5.38286451963582, "grad_norm": 1.1565359830856323, "learning_rate": 0.00032738823209675226, "loss": 3.2581, "step": 79225 }, { "epoch": 5.383204239706481, "grad_norm": 1.4601778984069824, "learning_rate": 0.0003273457670879196, "loss": 3.5465, "step": 79230 }, { "epoch": 5.3835439597771435, "grad_norm": 1.7043157815933228, "learning_rate": 0.0003273033020790868, "loss": 3.523, "step": 79235 }, { "epoch": 5.383883679847806, "grad_norm": 1.3026658296585083, "learning_rate": 0.0003272608370702541, "loss": 3.4716, "step": 79240 }, { "epoch": 5.384223399918467, "grad_norm": 1.169973373413086, "learning_rate": 0.00032721837206142144, "loss": 3.2559, "step": 79245 }, { "epoch": 5.384563119989129, "grad_norm": 1.1291824579238892, "learning_rate": 0.00032717590705258866, "loss": 3.1551, "step": 79250 }, { "epoch": 5.384902840059791, "grad_norm": 1.5589754581451416, "learning_rate": 0.00032713344204375594, "loss": 3.07, "step": 79255 }, { "epoch": 5.385242560130452, "grad_norm": 1.1108776330947876, "learning_rate": 0.0003270909770349232, "loss": 3.3074, "step": 79260 }, { "epoch": 5.385582280201114, "grad_norm": 1.4511170387268066, "learning_rate": 0.0003270485120260905, "loss": 3.3473, "step": 79265 }, { "epoch": 5.385922000271776, "grad_norm": 1.2660369873046875, "learning_rate": 0.0003270060470172578, "loss": 3.2064, "step": 79270 }, { "epoch": 5.386261720342437, "grad_norm": 1.2827807664871216, "learning_rate": 0.00032696358200842506, "loss": 3.358, "step": 79275 }, { "epoch": 5.3866014404130995, "grad_norm": 1.2085453271865845, "learning_rate": 0.00032692111699959234, "loss": 3.3698, "step": 79280 }, { "epoch": 5.386941160483762, "grad_norm": 1.2137513160705566, "learning_rate": 0.0003268786519907596, "loss": 3.0645, "step": 79285 }, { "epoch": 5.387280880554423, "grad_norm": 1.043774127960205, "learning_rate": 0.0003268361869819269, "loss": 3.2404, "step": 79290 }, { "epoch": 5.387620600625085, "grad_norm": 1.1039941310882568, "learning_rate": 0.0003267937219730942, "loss": 3.2872, "step": 79295 }, { "epoch": 5.387960320695747, "grad_norm": 1.188077688217163, "learning_rate": 0.00032675125696426146, "loss": 3.0953, "step": 79300 }, { "epoch": 5.388300040766408, "grad_norm": 1.4974017143249512, "learning_rate": 0.00032670879195542874, "loss": 3.2188, "step": 79305 }, { "epoch": 5.38863976083707, "grad_norm": 1.0272079706192017, "learning_rate": 0.000326666326946596, "loss": 3.3674, "step": 79310 }, { "epoch": 5.388979480907732, "grad_norm": 1.4568508863449097, "learning_rate": 0.0003266238619377633, "loss": 3.2838, "step": 79315 }, { "epoch": 5.3893192009783935, "grad_norm": 1.2882128953933716, "learning_rate": 0.0003265813969289306, "loss": 3.2964, "step": 79320 }, { "epoch": 5.3896589210490555, "grad_norm": 1.471676230430603, "learning_rate": 0.00032653893192009786, "loss": 3.4953, "step": 79325 }, { "epoch": 5.389998641119718, "grad_norm": 1.1855436563491821, "learning_rate": 0.0003264964669112651, "loss": 3.4149, "step": 79330 }, { "epoch": 5.390338361190379, "grad_norm": 1.3749938011169434, "learning_rate": 0.0003264540019024324, "loss": 3.164, "step": 79335 }, { "epoch": 5.390678081261041, "grad_norm": 0.9690799713134766, "learning_rate": 0.0003264115368935997, "loss": 3.3873, "step": 79340 }, { "epoch": 5.391017801331703, "grad_norm": 1.3393347263336182, "learning_rate": 0.00032636907188476693, "loss": 3.209, "step": 79345 }, { "epoch": 5.391357521402364, "grad_norm": 1.7589446306228638, "learning_rate": 0.00032632660687593426, "loss": 3.391, "step": 79350 }, { "epoch": 5.391697241473026, "grad_norm": 1.2730087041854858, "learning_rate": 0.00032628414186710154, "loss": 3.1948, "step": 79355 }, { "epoch": 5.392036961543688, "grad_norm": 1.24524986743927, "learning_rate": 0.00032624167685826877, "loss": 3.1895, "step": 79360 }, { "epoch": 5.3923766816143495, "grad_norm": 1.4402273893356323, "learning_rate": 0.00032619921184943605, "loss": 3.348, "step": 79365 }, { "epoch": 5.3927164016850115, "grad_norm": 1.292214035987854, "learning_rate": 0.0003261567468406034, "loss": 3.0903, "step": 79370 }, { "epoch": 5.393056121755674, "grad_norm": 1.3047469854354858, "learning_rate": 0.0003261142818317706, "loss": 3.3652, "step": 79375 }, { "epoch": 5.393395841826335, "grad_norm": 1.308550238609314, "learning_rate": 0.0003260718168229379, "loss": 3.0782, "step": 79380 }, { "epoch": 5.393735561896997, "grad_norm": 1.4067223072052002, "learning_rate": 0.0003260293518141052, "loss": 3.3064, "step": 79385 }, { "epoch": 5.394075281967659, "grad_norm": 1.2889710664749146, "learning_rate": 0.00032598688680527245, "loss": 3.4791, "step": 79390 }, { "epoch": 5.39441500203832, "grad_norm": 1.4763576984405518, "learning_rate": 0.00032594442179643973, "loss": 3.2668, "step": 79395 }, { "epoch": 5.394754722108982, "grad_norm": 1.3083432912826538, "learning_rate": 0.000325901956787607, "loss": 3.3482, "step": 79400 }, { "epoch": 5.395094442179644, "grad_norm": 1.272352933883667, "learning_rate": 0.0003258594917787743, "loss": 3.2118, "step": 79405 }, { "epoch": 5.3954341622503055, "grad_norm": 1.3343170881271362, "learning_rate": 0.00032581702676994157, "loss": 3.5134, "step": 79410 }, { "epoch": 5.3957738823209676, "grad_norm": 1.5118037462234497, "learning_rate": 0.00032577456176110885, "loss": 3.3845, "step": 79415 }, { "epoch": 5.39611360239163, "grad_norm": 1.033260464668274, "learning_rate": 0.00032573209675227613, "loss": 3.3707, "step": 79420 }, { "epoch": 5.396453322462291, "grad_norm": 1.0871343612670898, "learning_rate": 0.0003256896317434434, "loss": 3.2629, "step": 79425 }, { "epoch": 5.396793042532953, "grad_norm": 1.2532744407653809, "learning_rate": 0.0003256471667346107, "loss": 3.3064, "step": 79430 }, { "epoch": 5.397132762603615, "grad_norm": 1.5933091640472412, "learning_rate": 0.0003256047017257779, "loss": 3.4142, "step": 79435 }, { "epoch": 5.397472482674276, "grad_norm": 0.9937152862548828, "learning_rate": 0.00032556223671694525, "loss": 3.5879, "step": 79440 }, { "epoch": 5.397812202744938, "grad_norm": 1.1478215456008911, "learning_rate": 0.00032551977170811253, "loss": 3.2634, "step": 79445 }, { "epoch": 5.3981519228156, "grad_norm": 1.35811185836792, "learning_rate": 0.00032547730669927976, "loss": 3.1718, "step": 79450 }, { "epoch": 5.3984916428862615, "grad_norm": 1.8815473318099976, "learning_rate": 0.0003254348416904471, "loss": 3.1328, "step": 79455 }, { "epoch": 5.398831362956924, "grad_norm": 1.22874915599823, "learning_rate": 0.00032539237668161437, "loss": 3.4493, "step": 79460 }, { "epoch": 5.399171083027586, "grad_norm": 1.8695545196533203, "learning_rate": 0.00032534991167278165, "loss": 2.9573, "step": 79465 }, { "epoch": 5.399510803098247, "grad_norm": 1.227026104927063, "learning_rate": 0.0003253074466639489, "loss": 3.3366, "step": 79470 }, { "epoch": 5.399850523168909, "grad_norm": 1.4575879573822021, "learning_rate": 0.0003252649816551162, "loss": 3.3289, "step": 79475 }, { "epoch": 5.400190243239571, "grad_norm": 1.111857295036316, "learning_rate": 0.0003252225166462835, "loss": 3.3449, "step": 79480 }, { "epoch": 5.400529963310232, "grad_norm": 1.172694444656372, "learning_rate": 0.0003251800516374507, "loss": 2.9696, "step": 79485 }, { "epoch": 5.400869683380894, "grad_norm": 1.2693356275558472, "learning_rate": 0.00032513758662861805, "loss": 3.4862, "step": 79490 }, { "epoch": 5.401209403451556, "grad_norm": 1.371881365776062, "learning_rate": 0.00032509512161978533, "loss": 3.4083, "step": 79495 }, { "epoch": 5.4015491235222175, "grad_norm": 1.293839693069458, "learning_rate": 0.00032505265661095256, "loss": 3.3274, "step": 79500 }, { "epoch": 5.40188884359288, "grad_norm": 1.5353577136993408, "learning_rate": 0.00032501019160211984, "loss": 3.2112, "step": 79505 }, { "epoch": 5.402228563663542, "grad_norm": 1.1325762271881104, "learning_rate": 0.00032496772659328717, "loss": 3.1838, "step": 79510 }, { "epoch": 5.402568283734203, "grad_norm": 1.3986209630966187, "learning_rate": 0.0003249252615844544, "loss": 3.2967, "step": 79515 }, { "epoch": 5.402908003804865, "grad_norm": 1.4164273738861084, "learning_rate": 0.0003248827965756217, "loss": 3.2854, "step": 79520 }, { "epoch": 5.403247723875527, "grad_norm": 1.4825373888015747, "learning_rate": 0.000324840331566789, "loss": 3.3772, "step": 79525 }, { "epoch": 5.403587443946188, "grad_norm": 1.6423746347427368, "learning_rate": 0.00032479786655795624, "loss": 3.3951, "step": 79530 }, { "epoch": 5.40392716401685, "grad_norm": 1.296527624130249, "learning_rate": 0.0003247554015491235, "loss": 3.0833, "step": 79535 }, { "epoch": 5.404266884087512, "grad_norm": 1.546745777130127, "learning_rate": 0.00032471293654029085, "loss": 3.2701, "step": 79540 }, { "epoch": 5.4046066041581735, "grad_norm": 1.0500608682632446, "learning_rate": 0.0003246704715314581, "loss": 3.3635, "step": 79545 }, { "epoch": 5.404946324228836, "grad_norm": 1.1979663372039795, "learning_rate": 0.00032462800652262536, "loss": 3.3694, "step": 79550 }, { "epoch": 5.405286044299498, "grad_norm": 1.3832135200500488, "learning_rate": 0.00032458554151379264, "loss": 3.556, "step": 79555 }, { "epoch": 5.405625764370159, "grad_norm": 1.1880598068237305, "learning_rate": 0.0003245430765049599, "loss": 3.418, "step": 79560 }, { "epoch": 5.405965484440821, "grad_norm": 1.3728761672973633, "learning_rate": 0.0003245006114961272, "loss": 3.2142, "step": 79565 }, { "epoch": 5.406305204511483, "grad_norm": 1.2132161855697632, "learning_rate": 0.0003244581464872945, "loss": 3.4557, "step": 79570 }, { "epoch": 5.406644924582144, "grad_norm": 1.5649821758270264, "learning_rate": 0.00032441568147846176, "loss": 3.2379, "step": 79575 }, { "epoch": 5.406984644652806, "grad_norm": 1.4445713758468628, "learning_rate": 0.00032437321646962904, "loss": 3.4263, "step": 79580 }, { "epoch": 5.4073243647234674, "grad_norm": 1.2392544746398926, "learning_rate": 0.0003243307514607963, "loss": 3.0899, "step": 79585 }, { "epoch": 5.4076640847941295, "grad_norm": 1.2218985557556152, "learning_rate": 0.00032428828645196354, "loss": 3.2433, "step": 79590 }, { "epoch": 5.408003804864792, "grad_norm": 1.3780280351638794, "learning_rate": 0.0003242458214431309, "loss": 2.9195, "step": 79595 }, { "epoch": 5.408343524935453, "grad_norm": 1.0677846670150757, "learning_rate": 0.00032420335643429816, "loss": 3.6981, "step": 79600 }, { "epoch": 5.408683245006115, "grad_norm": 1.0758845806121826, "learning_rate": 0.0003241608914254654, "loss": 3.1989, "step": 79605 }, { "epoch": 5.409022965076777, "grad_norm": 1.2220507860183716, "learning_rate": 0.0003241184264166327, "loss": 3.2567, "step": 79610 }, { "epoch": 5.409362685147438, "grad_norm": 1.1678779125213623, "learning_rate": 0.0003240759614078, "loss": 2.9435, "step": 79615 }, { "epoch": 5.4097024052181, "grad_norm": 1.2669925689697266, "learning_rate": 0.0003240334963989672, "loss": 3.0942, "step": 79620 }, { "epoch": 5.410042125288762, "grad_norm": 1.233315110206604, "learning_rate": 0.0003239910313901345, "loss": 3.5565, "step": 79625 }, { "epoch": 5.4103818453594235, "grad_norm": 1.3170162439346313, "learning_rate": 0.00032394856638130184, "loss": 3.2628, "step": 79630 }, { "epoch": 5.4107215654300855, "grad_norm": 1.1089534759521484, "learning_rate": 0.0003239061013724691, "loss": 3.3808, "step": 79635 }, { "epoch": 5.411061285500748, "grad_norm": 1.2661662101745605, "learning_rate": 0.00032386363636363635, "loss": 3.1475, "step": 79640 }, { "epoch": 5.411401005571409, "grad_norm": 1.143887996673584, "learning_rate": 0.0003238211713548037, "loss": 3.2482, "step": 79645 }, { "epoch": 5.411740725642071, "grad_norm": 1.2243540287017822, "learning_rate": 0.00032377870634597096, "loss": 3.0973, "step": 79650 }, { "epoch": 5.412080445712733, "grad_norm": 1.088247537612915, "learning_rate": 0.0003237362413371382, "loss": 3.4812, "step": 79655 }, { "epoch": 5.412420165783394, "grad_norm": 1.2394884824752808, "learning_rate": 0.00032369377632830547, "loss": 3.1889, "step": 79660 }, { "epoch": 5.412759885854056, "grad_norm": 1.3661881685256958, "learning_rate": 0.0003236513113194728, "loss": 3.445, "step": 79665 }, { "epoch": 5.413099605924718, "grad_norm": 1.376011610031128, "learning_rate": 0.00032360884631064, "loss": 3.3706, "step": 79670 }, { "epoch": 5.4134393259953795, "grad_norm": 1.2634919881820679, "learning_rate": 0.0003235663813018073, "loss": 3.3693, "step": 79675 }, { "epoch": 5.4137790460660415, "grad_norm": 1.152849555015564, "learning_rate": 0.00032352391629297464, "loss": 3.1889, "step": 79680 }, { "epoch": 5.414118766136704, "grad_norm": 1.0897337198257446, "learning_rate": 0.00032348145128414187, "loss": 3.2072, "step": 79685 }, { "epoch": 5.414458486207365, "grad_norm": 1.2316769361495972, "learning_rate": 0.00032343898627530915, "loss": 3.2962, "step": 79690 }, { "epoch": 5.414798206278027, "grad_norm": 1.1978535652160645, "learning_rate": 0.0003233965212664764, "loss": 3.3069, "step": 79695 }, { "epoch": 5.415137926348689, "grad_norm": 1.6829544305801392, "learning_rate": 0.0003233540562576437, "loss": 3.2434, "step": 79700 }, { "epoch": 5.41547764641935, "grad_norm": 1.0318158864974976, "learning_rate": 0.000323311591248811, "loss": 3.3197, "step": 79705 }, { "epoch": 5.415817366490012, "grad_norm": 1.4100862741470337, "learning_rate": 0.00032326912623997827, "loss": 3.2527, "step": 79710 }, { "epoch": 5.416157086560674, "grad_norm": 1.5704660415649414, "learning_rate": 0.00032322666123114555, "loss": 3.2936, "step": 79715 }, { "epoch": 5.4164968066313355, "grad_norm": 1.2022483348846436, "learning_rate": 0.0003231841962223128, "loss": 3.2753, "step": 79720 }, { "epoch": 5.416836526701998, "grad_norm": 1.3255212306976318, "learning_rate": 0.0003231417312134801, "loss": 3.3907, "step": 79725 }, { "epoch": 5.41717624677266, "grad_norm": 1.271436095237732, "learning_rate": 0.00032309926620464733, "loss": 3.504, "step": 79730 }, { "epoch": 5.417515966843321, "grad_norm": 1.1040031909942627, "learning_rate": 0.00032305680119581467, "loss": 3.2729, "step": 79735 }, { "epoch": 5.417855686913983, "grad_norm": 1.3514121770858765, "learning_rate": 0.00032301433618698195, "loss": 3.1788, "step": 79740 }, { "epoch": 5.418195406984645, "grad_norm": 1.5417801141738892, "learning_rate": 0.0003229718711781492, "loss": 3.2723, "step": 79745 }, { "epoch": 5.418535127055306, "grad_norm": 1.5934474468231201, "learning_rate": 0.0003229294061693165, "loss": 3.3792, "step": 79750 }, { "epoch": 5.418874847125968, "grad_norm": 1.812728762626648, "learning_rate": 0.0003228869411604838, "loss": 3.5083, "step": 79755 }, { "epoch": 5.41921456719663, "grad_norm": 1.3796814680099487, "learning_rate": 0.000322844476151651, "loss": 3.0043, "step": 79760 }, { "epoch": 5.4195542872672915, "grad_norm": 1.347819209098816, "learning_rate": 0.0003228020111428183, "loss": 3.2669, "step": 79765 }, { "epoch": 5.419894007337954, "grad_norm": 1.2786163091659546, "learning_rate": 0.00032275954613398563, "loss": 3.2428, "step": 79770 }, { "epoch": 5.420233727408616, "grad_norm": 1.2339155673980713, "learning_rate": 0.00032271708112515285, "loss": 3.3738, "step": 79775 }, { "epoch": 5.420573447479277, "grad_norm": 1.236051321029663, "learning_rate": 0.00032267461611632013, "loss": 3.5169, "step": 79780 }, { "epoch": 5.420913167549939, "grad_norm": 1.1796399354934692, "learning_rate": 0.00032263215110748747, "loss": 3.4815, "step": 79785 }, { "epoch": 5.421252887620601, "grad_norm": 1.5066286325454712, "learning_rate": 0.0003225896860986547, "loss": 3.2799, "step": 79790 }, { "epoch": 5.421592607691262, "grad_norm": 1.4774295091629028, "learning_rate": 0.000322547221089822, "loss": 3.0855, "step": 79795 }, { "epoch": 5.421932327761924, "grad_norm": 1.3211051225662231, "learning_rate": 0.00032250475608098925, "loss": 3.2097, "step": 79800 }, { "epoch": 5.422272047832586, "grad_norm": 1.0713539123535156, "learning_rate": 0.0003224622910721566, "loss": 3.038, "step": 79805 }, { "epoch": 5.4226117679032475, "grad_norm": 1.489831805229187, "learning_rate": 0.0003224198260633238, "loss": 3.2875, "step": 79810 }, { "epoch": 5.42295148797391, "grad_norm": 2.072183132171631, "learning_rate": 0.0003223773610544911, "loss": 3.4574, "step": 79815 }, { "epoch": 5.423291208044572, "grad_norm": 1.3713948726654053, "learning_rate": 0.00032233489604565843, "loss": 3.3161, "step": 79820 }, { "epoch": 5.423630928115233, "grad_norm": 1.3267600536346436, "learning_rate": 0.00032229243103682565, "loss": 3.5384, "step": 79825 }, { "epoch": 5.423970648185895, "grad_norm": 1.3356283903121948, "learning_rate": 0.00032224996602799293, "loss": 3.4517, "step": 79830 }, { "epoch": 5.424310368256557, "grad_norm": 1.1571558713912964, "learning_rate": 0.00032220750101916027, "loss": 3.5184, "step": 79835 }, { "epoch": 5.424650088327218, "grad_norm": 1.3163567781448364, "learning_rate": 0.0003221650360103275, "loss": 3.4073, "step": 79840 }, { "epoch": 5.42498980839788, "grad_norm": 1.1784906387329102, "learning_rate": 0.0003221225710014948, "loss": 3.4556, "step": 79845 }, { "epoch": 5.425329528468542, "grad_norm": 0.9584699273109436, "learning_rate": 0.00032208010599266205, "loss": 3.3264, "step": 79850 }, { "epoch": 5.4256692485392035, "grad_norm": 1.321907877922058, "learning_rate": 0.00032203764098382933, "loss": 3.4454, "step": 79855 }, { "epoch": 5.426008968609866, "grad_norm": 1.5826489925384521, "learning_rate": 0.0003219951759749966, "loss": 3.1736, "step": 79860 }, { "epoch": 5.426348688680527, "grad_norm": 1.449029564857483, "learning_rate": 0.0003219527109661639, "loss": 3.432, "step": 79865 }, { "epoch": 5.426688408751189, "grad_norm": 1.3440769910812378, "learning_rate": 0.0003219102459573312, "loss": 3.026, "step": 79870 }, { "epoch": 5.427028128821851, "grad_norm": 1.5989893674850464, "learning_rate": 0.00032186778094849846, "loss": 3.2704, "step": 79875 }, { "epoch": 5.427367848892512, "grad_norm": 0.9623376727104187, "learning_rate": 0.00032182531593966574, "loss": 3.2047, "step": 79880 }, { "epoch": 5.427707568963174, "grad_norm": 1.1855897903442383, "learning_rate": 0.00032178285093083296, "loss": 3.0702, "step": 79885 }, { "epoch": 5.428047289033836, "grad_norm": 1.3059473037719727, "learning_rate": 0.0003217403859220003, "loss": 3.3985, "step": 79890 }, { "epoch": 5.4283870091044975, "grad_norm": 1.0672186613082886, "learning_rate": 0.0003216979209131676, "loss": 3.4333, "step": 79895 }, { "epoch": 5.4287267291751595, "grad_norm": 1.534350872039795, "learning_rate": 0.0003216554559043348, "loss": 3.3216, "step": 79900 }, { "epoch": 5.429066449245822, "grad_norm": 1.1762787103652954, "learning_rate": 0.00032161299089550214, "loss": 3.2856, "step": 79905 }, { "epoch": 5.429406169316483, "grad_norm": 1.2804484367370605, "learning_rate": 0.0003215705258866694, "loss": 3.4049, "step": 79910 }, { "epoch": 5.429745889387145, "grad_norm": 1.3958382606506348, "learning_rate": 0.00032152806087783664, "loss": 3.3794, "step": 79915 }, { "epoch": 5.430085609457807, "grad_norm": 1.5539840459823608, "learning_rate": 0.0003214855958690039, "loss": 2.8657, "step": 79920 }, { "epoch": 5.430425329528468, "grad_norm": 1.0677516460418701, "learning_rate": 0.00032144313086017126, "loss": 3.3385, "step": 79925 }, { "epoch": 5.43076504959913, "grad_norm": 1.3579615354537964, "learning_rate": 0.0003214006658513385, "loss": 3.1168, "step": 79930 }, { "epoch": 5.431104769669792, "grad_norm": 1.3859540224075317, "learning_rate": 0.00032135820084250576, "loss": 2.9212, "step": 79935 }, { "epoch": 5.4314444897404535, "grad_norm": 1.2900866270065308, "learning_rate": 0.0003213157358336731, "loss": 3.3384, "step": 79940 }, { "epoch": 5.4317842098111155, "grad_norm": 1.384185791015625, "learning_rate": 0.0003212732708248403, "loss": 3.2988, "step": 79945 }, { "epoch": 5.432123929881778, "grad_norm": 1.1474066972732544, "learning_rate": 0.0003212308058160076, "loss": 3.1466, "step": 79950 }, { "epoch": 5.432463649952439, "grad_norm": 1.3203563690185547, "learning_rate": 0.0003211883408071749, "loss": 3.2591, "step": 79955 }, { "epoch": 5.432803370023101, "grad_norm": 1.8804587125778198, "learning_rate": 0.00032114587579834216, "loss": 3.4186, "step": 79960 }, { "epoch": 5.433143090093763, "grad_norm": 1.6775972843170166, "learning_rate": 0.00032110341078950944, "loss": 3.387, "step": 79965 }, { "epoch": 5.433482810164424, "grad_norm": 1.1447421312332153, "learning_rate": 0.0003210609457806767, "loss": 3.3804, "step": 79970 }, { "epoch": 5.433822530235086, "grad_norm": 1.2897475957870483, "learning_rate": 0.00032101848077184406, "loss": 3.2718, "step": 79975 }, { "epoch": 5.434162250305748, "grad_norm": 1.4057409763336182, "learning_rate": 0.0003209760157630113, "loss": 3.1292, "step": 79980 }, { "epoch": 5.4345019703764095, "grad_norm": 1.6931812763214111, "learning_rate": 0.00032093355075417856, "loss": 3.1516, "step": 79985 }, { "epoch": 5.4348416904470715, "grad_norm": 1.0659286975860596, "learning_rate": 0.00032089108574534584, "loss": 3.5282, "step": 79990 }, { "epoch": 5.435181410517734, "grad_norm": 0.9914144277572632, "learning_rate": 0.0003208486207365131, "loss": 3.1743, "step": 79995 }, { "epoch": 5.435521130588395, "grad_norm": 1.2782702445983887, "learning_rate": 0.0003208061557276804, "loss": 3.286, "step": 80000 }, { "epoch": 5.435860850659057, "grad_norm": 1.272284746170044, "learning_rate": 0.0003207636907188477, "loss": 3.441, "step": 80005 }, { "epoch": 5.436200570729719, "grad_norm": 1.2393553256988525, "learning_rate": 0.00032072122571001496, "loss": 3.3748, "step": 80010 }, { "epoch": 5.43654029080038, "grad_norm": 1.1418752670288086, "learning_rate": 0.00032067876070118224, "loss": 3.4039, "step": 80015 }, { "epoch": 5.436880010871042, "grad_norm": 1.4154223203659058, "learning_rate": 0.0003206362956923495, "loss": 3.501, "step": 80020 }, { "epoch": 5.437219730941704, "grad_norm": 1.273054599761963, "learning_rate": 0.00032059383068351675, "loss": 3.1551, "step": 80025 }, { "epoch": 5.4375594510123655, "grad_norm": 1.7043113708496094, "learning_rate": 0.0003205513656746841, "loss": 3.5195, "step": 80030 }, { "epoch": 5.437899171083028, "grad_norm": 1.1864820718765259, "learning_rate": 0.00032050890066585136, "loss": 3.4557, "step": 80035 }, { "epoch": 5.43823889115369, "grad_norm": 1.0683954954147339, "learning_rate": 0.0003204664356570186, "loss": 3.2393, "step": 80040 }, { "epoch": 5.438578611224351, "grad_norm": 1.1302566528320312, "learning_rate": 0.0003204239706481859, "loss": 3.1855, "step": 80045 }, { "epoch": 5.438918331295013, "grad_norm": 1.265739917755127, "learning_rate": 0.0003203815056393532, "loss": 3.3206, "step": 80050 }, { "epoch": 5.439258051365675, "grad_norm": 1.4336073398590088, "learning_rate": 0.00032033904063052043, "loss": 3.4321, "step": 80055 }, { "epoch": 5.439597771436336, "grad_norm": 1.0618550777435303, "learning_rate": 0.0003202965756216877, "loss": 3.2371, "step": 80060 }, { "epoch": 5.439937491506998, "grad_norm": 1.3753983974456787, "learning_rate": 0.00032025411061285504, "loss": 3.1239, "step": 80065 }, { "epoch": 5.44027721157766, "grad_norm": 1.239648699760437, "learning_rate": 0.00032021164560402227, "loss": 3.1628, "step": 80070 }, { "epoch": 5.4406169316483215, "grad_norm": 1.2373065948486328, "learning_rate": 0.00032016918059518955, "loss": 3.161, "step": 80075 }, { "epoch": 5.440956651718984, "grad_norm": 1.424689531326294, "learning_rate": 0.0003201267155863569, "loss": 3.3713, "step": 80080 }, { "epoch": 5.441296371789646, "grad_norm": 1.1756417751312256, "learning_rate": 0.0003200842505775241, "loss": 3.5121, "step": 80085 }, { "epoch": 5.441636091860307, "grad_norm": 1.3146592378616333, "learning_rate": 0.0003200417855686914, "loss": 3.0929, "step": 80090 }, { "epoch": 5.441975811930969, "grad_norm": 1.4529073238372803, "learning_rate": 0.0003199993205598587, "loss": 3.2957, "step": 80095 }, { "epoch": 5.442315532001631, "grad_norm": 1.5921425819396973, "learning_rate": 0.00031995685555102595, "loss": 3.2968, "step": 80100 }, { "epoch": 5.442655252072292, "grad_norm": 1.1038532257080078, "learning_rate": 0.00031991439054219323, "loss": 3.2913, "step": 80105 }, { "epoch": 5.442994972142954, "grad_norm": 1.4582252502441406, "learning_rate": 0.0003198719255333605, "loss": 3.3435, "step": 80110 }, { "epoch": 5.443334692213616, "grad_norm": 1.1143614053726196, "learning_rate": 0.0003198294605245278, "loss": 3.45, "step": 80115 }, { "epoch": 5.4436744122842775, "grad_norm": 1.2267379760742188, "learning_rate": 0.00031978699551569507, "loss": 3.3217, "step": 80120 }, { "epoch": 5.44401413235494, "grad_norm": 1.3403421640396118, "learning_rate": 0.00031974453050686235, "loss": 3.1028, "step": 80125 }, { "epoch": 5.444353852425602, "grad_norm": 1.6951777935028076, "learning_rate": 0.00031970206549802963, "loss": 3.4838, "step": 80130 }, { "epoch": 5.444693572496263, "grad_norm": 1.3652595281600952, "learning_rate": 0.0003196596004891969, "loss": 3.3228, "step": 80135 }, { "epoch": 5.445033292566925, "grad_norm": 1.6031497716903687, "learning_rate": 0.0003196171354803642, "loss": 3.6198, "step": 80140 }, { "epoch": 5.445373012637587, "grad_norm": 2.0008764266967773, "learning_rate": 0.00031957467047153147, "loss": 3.402, "step": 80145 }, { "epoch": 5.445712732708248, "grad_norm": 1.294735074043274, "learning_rate": 0.00031953220546269875, "loss": 3.3946, "step": 80150 }, { "epoch": 5.44605245277891, "grad_norm": 1.1270842552185059, "learning_rate": 0.00031948974045386603, "loss": 3.2071, "step": 80155 }, { "epoch": 5.446392172849572, "grad_norm": 1.1805452108383179, "learning_rate": 0.0003194472754450333, "loss": 3.274, "step": 80160 }, { "epoch": 5.4467318929202335, "grad_norm": 1.0161343812942505, "learning_rate": 0.0003194048104362006, "loss": 3.4721, "step": 80165 }, { "epoch": 5.447071612990896, "grad_norm": 1.3940180540084839, "learning_rate": 0.00031936234542736787, "loss": 3.4493, "step": 80170 }, { "epoch": 5.447411333061558, "grad_norm": 1.6069996356964111, "learning_rate": 0.00031931988041853515, "loss": 3.4505, "step": 80175 }, { "epoch": 5.447751053132219, "grad_norm": 1.4663290977478027, "learning_rate": 0.0003192774154097024, "loss": 3.2115, "step": 80180 }, { "epoch": 5.448090773202881, "grad_norm": 1.341871976852417, "learning_rate": 0.0003192349504008697, "loss": 3.4438, "step": 80185 }, { "epoch": 5.448430493273543, "grad_norm": 1.0573053359985352, "learning_rate": 0.000319192485392037, "loss": 3.3982, "step": 80190 }, { "epoch": 5.448770213344204, "grad_norm": 1.4023330211639404, "learning_rate": 0.0003191500203832042, "loss": 3.3012, "step": 80195 }, { "epoch": 5.449109933414866, "grad_norm": 1.2809338569641113, "learning_rate": 0.00031910755537437155, "loss": 3.3021, "step": 80200 }, { "epoch": 5.449449653485528, "grad_norm": 1.2811342477798462, "learning_rate": 0.00031906509036553883, "loss": 3.2571, "step": 80205 }, { "epoch": 5.4497893735561895, "grad_norm": 1.5614358186721802, "learning_rate": 0.00031902262535670606, "loss": 3.1697, "step": 80210 }, { "epoch": 5.450129093626852, "grad_norm": 1.3671590089797974, "learning_rate": 0.00031898016034787334, "loss": 3.6102, "step": 80215 }, { "epoch": 5.450468813697514, "grad_norm": 1.3301833868026733, "learning_rate": 0.00031893769533904067, "loss": 3.2254, "step": 80220 }, { "epoch": 5.450808533768175, "grad_norm": 1.5678224563598633, "learning_rate": 0.0003188952303302079, "loss": 3.1366, "step": 80225 }, { "epoch": 5.451148253838837, "grad_norm": 1.4919284582138062, "learning_rate": 0.0003188527653213752, "loss": 3.3276, "step": 80230 }, { "epoch": 5.451487973909499, "grad_norm": 1.2214864492416382, "learning_rate": 0.0003188103003125425, "loss": 3.4261, "step": 80235 }, { "epoch": 5.45182769398016, "grad_norm": 1.1287113428115845, "learning_rate": 0.00031876783530370974, "loss": 3.2093, "step": 80240 }, { "epoch": 5.452167414050822, "grad_norm": 1.3465898036956787, "learning_rate": 0.000318725370294877, "loss": 3.246, "step": 80245 }, { "epoch": 5.452507134121484, "grad_norm": 1.259839415550232, "learning_rate": 0.0003186829052860443, "loss": 3.1214, "step": 80250 }, { "epoch": 5.4528468541921455, "grad_norm": 1.1421773433685303, "learning_rate": 0.0003186404402772116, "loss": 3.6052, "step": 80255 }, { "epoch": 5.453186574262808, "grad_norm": 1.678975224494934, "learning_rate": 0.00031859797526837886, "loss": 3.2671, "step": 80260 }, { "epoch": 5.45352629433347, "grad_norm": 1.2934027910232544, "learning_rate": 0.00031855551025954614, "loss": 3.2561, "step": 80265 }, { "epoch": 5.453866014404131, "grad_norm": 2.373805284500122, "learning_rate": 0.0003185130452507134, "loss": 3.2855, "step": 80270 }, { "epoch": 5.454205734474793, "grad_norm": 1.2924928665161133, "learning_rate": 0.0003184705802418807, "loss": 3.2192, "step": 80275 }, { "epoch": 5.454545454545454, "grad_norm": 1.4736053943634033, "learning_rate": 0.000318428115233048, "loss": 3.3213, "step": 80280 }, { "epoch": 5.454885174616116, "grad_norm": 1.3701212406158447, "learning_rate": 0.0003183856502242152, "loss": 3.289, "step": 80285 }, { "epoch": 5.455224894686778, "grad_norm": 1.5962772369384766, "learning_rate": 0.00031834318521538254, "loss": 3.0967, "step": 80290 }, { "epoch": 5.4555646147574395, "grad_norm": 1.1575164794921875, "learning_rate": 0.0003183007202065498, "loss": 3.5981, "step": 80295 }, { "epoch": 5.4559043348281016, "grad_norm": 1.263176679611206, "learning_rate": 0.00031825825519771704, "loss": 3.5357, "step": 80300 }, { "epoch": 5.456244054898764, "grad_norm": 1.4114141464233398, "learning_rate": 0.0003182157901888844, "loss": 3.3984, "step": 80305 }, { "epoch": 5.456583774969425, "grad_norm": 1.2011035680770874, "learning_rate": 0.00031817332518005166, "loss": 3.2839, "step": 80310 }, { "epoch": 5.456923495040087, "grad_norm": 1.3564320802688599, "learning_rate": 0.00031813086017121894, "loss": 3.1755, "step": 80315 }, { "epoch": 5.457263215110749, "grad_norm": 1.2517393827438354, "learning_rate": 0.00031808839516238617, "loss": 3.0916, "step": 80320 }, { "epoch": 5.45760293518141, "grad_norm": 1.513993740081787, "learning_rate": 0.0003180459301535535, "loss": 3.3304, "step": 80325 }, { "epoch": 5.457942655252072, "grad_norm": 1.323142170906067, "learning_rate": 0.0003180034651447208, "loss": 3.5855, "step": 80330 }, { "epoch": 5.458282375322734, "grad_norm": 1.2549288272857666, "learning_rate": 0.000317961000135888, "loss": 3.4157, "step": 80335 }, { "epoch": 5.4586220953933955, "grad_norm": 1.4576215744018555, "learning_rate": 0.00031791853512705534, "loss": 3.2546, "step": 80340 }, { "epoch": 5.458961815464058, "grad_norm": 1.4014958143234253, "learning_rate": 0.0003178760701182226, "loss": 3.4256, "step": 80345 }, { "epoch": 5.45930153553472, "grad_norm": 1.2431888580322266, "learning_rate": 0.00031783360510938985, "loss": 3.2331, "step": 80350 }, { "epoch": 5.459641255605381, "grad_norm": 1.5006036758422852, "learning_rate": 0.0003177911401005571, "loss": 3.4146, "step": 80355 }, { "epoch": 5.459980975676043, "grad_norm": 1.5317840576171875, "learning_rate": 0.00031774867509172446, "loss": 3.1534, "step": 80360 }, { "epoch": 5.460320695746705, "grad_norm": 1.1838405132293701, "learning_rate": 0.0003177062100828917, "loss": 3.4925, "step": 80365 }, { "epoch": 5.460660415817366, "grad_norm": 1.60854172706604, "learning_rate": 0.00031766374507405897, "loss": 3.2401, "step": 80370 }, { "epoch": 5.461000135888028, "grad_norm": 1.2503507137298584, "learning_rate": 0.0003176212800652263, "loss": 3.4146, "step": 80375 }, { "epoch": 5.46133985595869, "grad_norm": 1.4581913948059082, "learning_rate": 0.0003175788150563935, "loss": 3.4019, "step": 80380 }, { "epoch": 5.4616795760293515, "grad_norm": 1.1722489595413208, "learning_rate": 0.0003175363500475608, "loss": 3.2973, "step": 80385 }, { "epoch": 5.462019296100014, "grad_norm": 1.4835127592086792, "learning_rate": 0.00031749388503872814, "loss": 3.4921, "step": 80390 }, { "epoch": 5.462359016170676, "grad_norm": 1.2560213804244995, "learning_rate": 0.00031745142002989537, "loss": 3.382, "step": 80395 }, { "epoch": 5.462698736241337, "grad_norm": 1.19749915599823, "learning_rate": 0.00031740895502106265, "loss": 3.329, "step": 80400 }, { "epoch": 5.463038456311999, "grad_norm": 1.2832525968551636, "learning_rate": 0.0003173664900122299, "loss": 3.3148, "step": 80405 }, { "epoch": 5.463378176382661, "grad_norm": 1.1609008312225342, "learning_rate": 0.0003173240250033972, "loss": 3.4557, "step": 80410 }, { "epoch": 5.463717896453322, "grad_norm": 1.328698992729187, "learning_rate": 0.0003172815599945645, "loss": 3.5368, "step": 80415 }, { "epoch": 5.464057616523984, "grad_norm": 1.3218036890029907, "learning_rate": 0.00031723909498573177, "loss": 3.2756, "step": 80420 }, { "epoch": 5.464397336594646, "grad_norm": 1.1152310371398926, "learning_rate": 0.00031719662997689905, "loss": 2.8929, "step": 80425 }, { "epoch": 5.4647370566653075, "grad_norm": 1.591627836227417, "learning_rate": 0.0003171541649680663, "loss": 3.4196, "step": 80430 }, { "epoch": 5.46507677673597, "grad_norm": 0.9939836859703064, "learning_rate": 0.0003171116999592336, "loss": 3.489, "step": 80435 }, { "epoch": 5.465416496806632, "grad_norm": 1.3307465314865112, "learning_rate": 0.00031706923495040083, "loss": 3.5097, "step": 80440 }, { "epoch": 5.465756216877293, "grad_norm": 1.4954819679260254, "learning_rate": 0.00031702676994156817, "loss": 3.7156, "step": 80445 }, { "epoch": 5.466095936947955, "grad_norm": 1.2938321828842163, "learning_rate": 0.00031698430493273545, "loss": 3.6614, "step": 80450 }, { "epoch": 5.466435657018617, "grad_norm": 1.0735969543457031, "learning_rate": 0.0003169418399239027, "loss": 3.3761, "step": 80455 }, { "epoch": 5.466775377089278, "grad_norm": 1.2211148738861084, "learning_rate": 0.00031689937491507, "loss": 3.4544, "step": 80460 }, { "epoch": 5.46711509715994, "grad_norm": 1.5965991020202637, "learning_rate": 0.0003168569099062373, "loss": 3.5961, "step": 80465 }, { "epoch": 5.467454817230602, "grad_norm": 1.2498908042907715, "learning_rate": 0.0003168144448974045, "loss": 3.226, "step": 80470 }, { "epoch": 5.4677945373012635, "grad_norm": 1.289735198020935, "learning_rate": 0.0003167719798885718, "loss": 3.2147, "step": 80475 }, { "epoch": 5.468134257371926, "grad_norm": 1.4800596237182617, "learning_rate": 0.00031672951487973913, "loss": 3.4156, "step": 80480 }, { "epoch": 5.468473977442588, "grad_norm": 2.011638641357422, "learning_rate": 0.0003166870498709064, "loss": 3.1812, "step": 80485 }, { "epoch": 5.468813697513249, "grad_norm": 1.5431478023529053, "learning_rate": 0.00031664458486207363, "loss": 3.3412, "step": 80490 }, { "epoch": 5.469153417583911, "grad_norm": 1.7137082815170288, "learning_rate": 0.00031660211985324097, "loss": 3.0971, "step": 80495 }, { "epoch": 5.469493137654573, "grad_norm": 1.2043765783309937, "learning_rate": 0.00031655965484440825, "loss": 3.0959, "step": 80500 }, { "epoch": 5.469832857725234, "grad_norm": 1.158798098564148, "learning_rate": 0.0003165171898355755, "loss": 3.3864, "step": 80505 }, { "epoch": 5.470172577795896, "grad_norm": 1.1147736310958862, "learning_rate": 0.00031647472482674275, "loss": 3.3456, "step": 80510 }, { "epoch": 5.470512297866558, "grad_norm": 1.7620073556900024, "learning_rate": 0.0003164322598179101, "loss": 3.3932, "step": 80515 }, { "epoch": 5.4708520179372195, "grad_norm": 1.5559165477752686, "learning_rate": 0.0003163897948090773, "loss": 3.1325, "step": 80520 }, { "epoch": 5.471191738007882, "grad_norm": 1.576769232749939, "learning_rate": 0.0003163473298002446, "loss": 3.2386, "step": 80525 }, { "epoch": 5.471531458078544, "grad_norm": 1.0054069757461548, "learning_rate": 0.00031630486479141193, "loss": 3.3394, "step": 80530 }, { "epoch": 5.471871178149205, "grad_norm": 1.0989611148834229, "learning_rate": 0.00031626239978257915, "loss": 3.193, "step": 80535 }, { "epoch": 5.472210898219867, "grad_norm": 1.5050866603851318, "learning_rate": 0.00031621993477374643, "loss": 3.2821, "step": 80540 }, { "epoch": 5.472550618290528, "grad_norm": 1.5744456052780151, "learning_rate": 0.0003161774697649137, "loss": 3.4029, "step": 80545 }, { "epoch": 5.47289033836119, "grad_norm": 1.2902494668960571, "learning_rate": 0.000316135004756081, "loss": 3.3649, "step": 80550 }, { "epoch": 5.473230058431852, "grad_norm": 1.0875086784362793, "learning_rate": 0.0003160925397472483, "loss": 3.328, "step": 80555 }, { "epoch": 5.4735697785025135, "grad_norm": 1.3491700887680054, "learning_rate": 0.00031605007473841555, "loss": 3.3223, "step": 80560 }, { "epoch": 5.4739094985731755, "grad_norm": 1.6898112297058105, "learning_rate": 0.00031600760972958283, "loss": 3.2108, "step": 80565 }, { "epoch": 5.474249218643838, "grad_norm": 1.1158100366592407, "learning_rate": 0.0003159651447207501, "loss": 3.452, "step": 80570 }, { "epoch": 5.474588938714499, "grad_norm": 1.2810574769973755, "learning_rate": 0.0003159226797119174, "loss": 3.3499, "step": 80575 }, { "epoch": 5.474928658785161, "grad_norm": 1.2913084030151367, "learning_rate": 0.0003158802147030846, "loss": 3.5502, "step": 80580 }, { "epoch": 5.475268378855823, "grad_norm": 1.403201699256897, "learning_rate": 0.00031583774969425196, "loss": 3.5109, "step": 80585 }, { "epoch": 5.475608098926484, "grad_norm": 1.2168385982513428, "learning_rate": 0.00031579528468541924, "loss": 3.4547, "step": 80590 }, { "epoch": 5.475947818997146, "grad_norm": 1.5521544218063354, "learning_rate": 0.00031575281967658646, "loss": 3.4775, "step": 80595 }, { "epoch": 5.476287539067808, "grad_norm": 1.1849013566970825, "learning_rate": 0.0003157103546677538, "loss": 3.4907, "step": 80600 }, { "epoch": 5.4766272591384695, "grad_norm": 1.1624094247817993, "learning_rate": 0.0003156678896589211, "loss": 3.1384, "step": 80605 }, { "epoch": 5.476966979209132, "grad_norm": 1.0960602760314941, "learning_rate": 0.0003156254246500883, "loss": 3.2625, "step": 80610 }, { "epoch": 5.477306699279794, "grad_norm": 1.6395210027694702, "learning_rate": 0.0003155829596412556, "loss": 3.4782, "step": 80615 }, { "epoch": 5.477646419350455, "grad_norm": 1.0537503957748413, "learning_rate": 0.0003155404946324229, "loss": 3.6208, "step": 80620 }, { "epoch": 5.477986139421117, "grad_norm": 1.2897799015045166, "learning_rate": 0.00031549802962359014, "loss": 3.6267, "step": 80625 }, { "epoch": 5.478325859491779, "grad_norm": 1.1833018064498901, "learning_rate": 0.0003154555646147574, "loss": 3.195, "step": 80630 }, { "epoch": 5.47866557956244, "grad_norm": 1.1661380529403687, "learning_rate": 0.00031541309960592476, "loss": 3.2786, "step": 80635 }, { "epoch": 5.479005299633102, "grad_norm": 1.2523701190948486, "learning_rate": 0.000315370634597092, "loss": 3.5516, "step": 80640 }, { "epoch": 5.479345019703764, "grad_norm": 1.2911663055419922, "learning_rate": 0.00031532816958825926, "loss": 3.3004, "step": 80645 }, { "epoch": 5.4796847397744255, "grad_norm": 1.055404543876648, "learning_rate": 0.00031528570457942654, "loss": 3.4092, "step": 80650 }, { "epoch": 5.480024459845088, "grad_norm": 1.324367642402649, "learning_rate": 0.0003152432395705939, "loss": 3.3673, "step": 80655 }, { "epoch": 5.48036417991575, "grad_norm": 1.1800976991653442, "learning_rate": 0.0003152007745617611, "loss": 3.1728, "step": 80660 }, { "epoch": 5.480703899986411, "grad_norm": 1.527087926864624, "learning_rate": 0.0003151583095529284, "loss": 3.2836, "step": 80665 }, { "epoch": 5.481043620057073, "grad_norm": 1.1819223165512085, "learning_rate": 0.0003151158445440957, "loss": 3.4106, "step": 80670 }, { "epoch": 5.481383340127735, "grad_norm": 1.3146494626998901, "learning_rate": 0.00031507337953526294, "loss": 3.3941, "step": 80675 }, { "epoch": 5.481723060198396, "grad_norm": 1.3556865453720093, "learning_rate": 0.0003150309145264302, "loss": 3.3164, "step": 80680 }, { "epoch": 5.482062780269058, "grad_norm": 1.394602656364441, "learning_rate": 0.00031498844951759756, "loss": 3.4785, "step": 80685 }, { "epoch": 5.48240250033972, "grad_norm": 1.3043218851089478, "learning_rate": 0.0003149459845087648, "loss": 3.1416, "step": 80690 }, { "epoch": 5.4827422204103815, "grad_norm": 1.5191595554351807, "learning_rate": 0.00031490351949993206, "loss": 3.5097, "step": 80695 }, { "epoch": 5.483081940481044, "grad_norm": 1.0497941970825195, "learning_rate": 0.00031486105449109934, "loss": 3.4098, "step": 80700 }, { "epoch": 5.483421660551706, "grad_norm": 0.936039388179779, "learning_rate": 0.0003148185894822666, "loss": 3.1407, "step": 80705 }, { "epoch": 5.483761380622367, "grad_norm": 0.9559125900268555, "learning_rate": 0.0003147761244734339, "loss": 3.4118, "step": 80710 }, { "epoch": 5.484101100693029, "grad_norm": 1.2498624324798584, "learning_rate": 0.0003147336594646012, "loss": 3.2858, "step": 80715 }, { "epoch": 5.484440820763691, "grad_norm": 1.3577156066894531, "learning_rate": 0.00031469119445576846, "loss": 3.5257, "step": 80720 }, { "epoch": 5.484780540834352, "grad_norm": 1.1434508562088013, "learning_rate": 0.00031464872944693574, "loss": 3.2971, "step": 80725 }, { "epoch": 5.485120260905014, "grad_norm": 0.9927330017089844, "learning_rate": 0.000314606264438103, "loss": 3.5549, "step": 80730 }, { "epoch": 5.485459980975676, "grad_norm": 1.3848605155944824, "learning_rate": 0.00031456379942927025, "loss": 3.449, "step": 80735 }, { "epoch": 5.4857997010463375, "grad_norm": 1.2149591445922852, "learning_rate": 0.0003145213344204376, "loss": 3.2686, "step": 80740 }, { "epoch": 5.486139421117, "grad_norm": 1.1831129789352417, "learning_rate": 0.00031447886941160486, "loss": 3.1024, "step": 80745 }, { "epoch": 5.486479141187662, "grad_norm": 1.2165579795837402, "learning_rate": 0.0003144364044027721, "loss": 3.4661, "step": 80750 }, { "epoch": 5.486818861258323, "grad_norm": 1.171761155128479, "learning_rate": 0.0003143939393939394, "loss": 3.3769, "step": 80755 }, { "epoch": 5.487158581328985, "grad_norm": 1.262731671333313, "learning_rate": 0.0003143514743851067, "loss": 3.1513, "step": 80760 }, { "epoch": 5.487498301399647, "grad_norm": 1.3336892127990723, "learning_rate": 0.00031430900937627393, "loss": 3.1763, "step": 80765 }, { "epoch": 5.487838021470308, "grad_norm": 1.2121586799621582, "learning_rate": 0.0003142665443674412, "loss": 3.5224, "step": 80770 }, { "epoch": 5.48817774154097, "grad_norm": 1.065772533416748, "learning_rate": 0.00031422407935860854, "loss": 2.9596, "step": 80775 }, { "epoch": 5.488517461611632, "grad_norm": 1.6752734184265137, "learning_rate": 0.00031418161434977577, "loss": 3.2009, "step": 80780 }, { "epoch": 5.4888571816822935, "grad_norm": 1.3368479013442993, "learning_rate": 0.00031413914934094305, "loss": 3.2007, "step": 80785 }, { "epoch": 5.489196901752956, "grad_norm": 1.1610239744186401, "learning_rate": 0.0003140966843321104, "loss": 3.2525, "step": 80790 }, { "epoch": 5.489536621823618, "grad_norm": 1.3962366580963135, "learning_rate": 0.0003140542193232776, "loss": 3.3739, "step": 80795 }, { "epoch": 5.489876341894279, "grad_norm": 1.1467036008834839, "learning_rate": 0.0003140117543144449, "loss": 3.1199, "step": 80800 }, { "epoch": 5.490216061964941, "grad_norm": 1.3451886177062988, "learning_rate": 0.00031396928930561217, "loss": 3.3567, "step": 80805 }, { "epoch": 5.490555782035603, "grad_norm": 1.8010594844818115, "learning_rate": 0.00031392682429677945, "loss": 3.556, "step": 80810 }, { "epoch": 5.490895502106264, "grad_norm": 1.1620780229568481, "learning_rate": 0.00031388435928794673, "loss": 3.369, "step": 80815 }, { "epoch": 5.491235222176926, "grad_norm": 1.6970545053482056, "learning_rate": 0.000313841894279114, "loss": 3.1481, "step": 80820 }, { "epoch": 5.491574942247588, "grad_norm": 1.2854913473129272, "learning_rate": 0.00031379942927028134, "loss": 3.2071, "step": 80825 }, { "epoch": 5.4919146623182495, "grad_norm": 1.5076748132705688, "learning_rate": 0.00031375696426144857, "loss": 3.2522, "step": 80830 }, { "epoch": 5.492254382388912, "grad_norm": 1.0956509113311768, "learning_rate": 0.00031371449925261585, "loss": 3.2356, "step": 80835 }, { "epoch": 5.492594102459574, "grad_norm": 1.1936697959899902, "learning_rate": 0.00031367203424378313, "loss": 3.5029, "step": 80840 }, { "epoch": 5.492933822530235, "grad_norm": 1.4902629852294922, "learning_rate": 0.0003136295692349504, "loss": 3.0589, "step": 80845 }, { "epoch": 5.493273542600897, "grad_norm": 1.3232405185699463, "learning_rate": 0.0003135871042261177, "loss": 3.2412, "step": 80850 }, { "epoch": 5.493613262671559, "grad_norm": 1.0948896408081055, "learning_rate": 0.00031354463921728497, "loss": 3.1187, "step": 80855 }, { "epoch": 5.49395298274222, "grad_norm": 1.0352457761764526, "learning_rate": 0.00031350217420845225, "loss": 3.5541, "step": 80860 }, { "epoch": 5.494292702812882, "grad_norm": 1.6631284952163696, "learning_rate": 0.00031345970919961953, "loss": 3.2737, "step": 80865 }, { "epoch": 5.494632422883544, "grad_norm": 1.3588049411773682, "learning_rate": 0.0003134172441907868, "loss": 3.5933, "step": 80870 }, { "epoch": 5.4949721429542056, "grad_norm": 1.6218422651290894, "learning_rate": 0.00031337477918195404, "loss": 3.3968, "step": 80875 }, { "epoch": 5.495311863024868, "grad_norm": 1.4590418338775635, "learning_rate": 0.00031333231417312137, "loss": 3.4647, "step": 80880 }, { "epoch": 5.49565158309553, "grad_norm": 1.1408554315567017, "learning_rate": 0.00031328984916428865, "loss": 3.0999, "step": 80885 }, { "epoch": 5.495991303166191, "grad_norm": 1.2193968296051025, "learning_rate": 0.0003132473841554559, "loss": 3.4147, "step": 80890 }, { "epoch": 5.496331023236853, "grad_norm": 1.222273349761963, "learning_rate": 0.0003132049191466232, "loss": 3.2609, "step": 80895 }, { "epoch": 5.496670743307515, "grad_norm": 1.462052345275879, "learning_rate": 0.0003131624541377905, "loss": 3.0521, "step": 80900 }, { "epoch": 5.497010463378176, "grad_norm": 1.4349968433380127, "learning_rate": 0.0003131199891289577, "loss": 3.5701, "step": 80905 }, { "epoch": 5.497350183448838, "grad_norm": 1.3760151863098145, "learning_rate": 0.000313077524120125, "loss": 3.1696, "step": 80910 }, { "epoch": 5.4976899035195, "grad_norm": 1.2563103437423706, "learning_rate": 0.00031303505911129233, "loss": 3.6626, "step": 80915 }, { "epoch": 5.498029623590162, "grad_norm": 1.0741536617279053, "learning_rate": 0.00031299259410245956, "loss": 3.3186, "step": 80920 }, { "epoch": 5.498369343660824, "grad_norm": 1.4656704664230347, "learning_rate": 0.00031295012909362684, "loss": 3.3128, "step": 80925 }, { "epoch": 5.498709063731486, "grad_norm": 1.804511308670044, "learning_rate": 0.00031290766408479417, "loss": 3.556, "step": 80930 }, { "epoch": 5.499048783802147, "grad_norm": 1.3304274082183838, "learning_rate": 0.0003128651990759614, "loss": 3.4302, "step": 80935 }, { "epoch": 5.499388503872809, "grad_norm": 1.0373867750167847, "learning_rate": 0.0003128227340671287, "loss": 3.3118, "step": 80940 }, { "epoch": 5.499728223943471, "grad_norm": 1.027099847793579, "learning_rate": 0.000312780269058296, "loss": 3.3341, "step": 80945 }, { "epoch": 5.500067944014132, "grad_norm": 1.3531339168548584, "learning_rate": 0.00031273780404946324, "loss": 3.2935, "step": 80950 }, { "epoch": 5.500407664084794, "grad_norm": 1.4622185230255127, "learning_rate": 0.0003126953390406305, "loss": 3.3778, "step": 80955 }, { "epoch": 5.500747384155456, "grad_norm": 1.4142513275146484, "learning_rate": 0.0003126528740317978, "loss": 3.3821, "step": 80960 }, { "epoch": 5.501087104226118, "grad_norm": 1.1309610605239868, "learning_rate": 0.0003126104090229651, "loss": 2.9964, "step": 80965 }, { "epoch": 5.50142682429678, "grad_norm": 1.1432780027389526, "learning_rate": 0.00031256794401413236, "loss": 3.4847, "step": 80970 }, { "epoch": 5.501766544367442, "grad_norm": 1.19589364528656, "learning_rate": 0.00031252547900529964, "loss": 3.187, "step": 80975 }, { "epoch": 5.502106264438103, "grad_norm": 1.1717517375946045, "learning_rate": 0.0003124830139964669, "loss": 3.4661, "step": 80980 }, { "epoch": 5.502445984508765, "grad_norm": 1.2302898168563843, "learning_rate": 0.0003124405489876342, "loss": 3.1996, "step": 80985 }, { "epoch": 5.502785704579426, "grad_norm": 1.970080852508545, "learning_rate": 0.0003123980839788015, "loss": 3.0932, "step": 80990 }, { "epoch": 5.503125424650088, "grad_norm": 1.2214518785476685, "learning_rate": 0.0003123556189699687, "loss": 3.0124, "step": 80995 }, { "epoch": 5.50346514472075, "grad_norm": 1.2024757862091064, "learning_rate": 0.00031231315396113604, "loss": 3.441, "step": 81000 }, { "epoch": 5.5038048647914115, "grad_norm": 1.1494160890579224, "learning_rate": 0.0003122706889523033, "loss": 3.3982, "step": 81005 }, { "epoch": 5.504144584862074, "grad_norm": 1.1622494459152222, "learning_rate": 0.0003122282239434706, "loss": 3.1657, "step": 81010 }, { "epoch": 5.504484304932736, "grad_norm": 1.5642244815826416, "learning_rate": 0.0003121857589346379, "loss": 3.2152, "step": 81015 }, { "epoch": 5.504824025003397, "grad_norm": 1.135150671005249, "learning_rate": 0.00031214329392580516, "loss": 3.4005, "step": 81020 }, { "epoch": 5.505163745074059, "grad_norm": 1.336294174194336, "learning_rate": 0.00031210082891697244, "loss": 3.2158, "step": 81025 }, { "epoch": 5.505503465144721, "grad_norm": 0.9847615957260132, "learning_rate": 0.00031205836390813967, "loss": 3.236, "step": 81030 }, { "epoch": 5.505843185215382, "grad_norm": 1.1590484380722046, "learning_rate": 0.000312015898899307, "loss": 3.3168, "step": 81035 }, { "epoch": 5.506182905286044, "grad_norm": 1.251577377319336, "learning_rate": 0.0003119734338904743, "loss": 3.3656, "step": 81040 }, { "epoch": 5.506522625356706, "grad_norm": 1.2973157167434692, "learning_rate": 0.0003119309688816415, "loss": 3.3634, "step": 81045 }, { "epoch": 5.5068623454273675, "grad_norm": 1.0884023904800415, "learning_rate": 0.00031188850387280884, "loss": 3.3347, "step": 81050 }, { "epoch": 5.50720206549803, "grad_norm": 1.3355547189712524, "learning_rate": 0.0003118460388639761, "loss": 3.4458, "step": 81055 }, { "epoch": 5.507541785568692, "grad_norm": 1.3031435012817383, "learning_rate": 0.00031180357385514335, "loss": 3.1996, "step": 81060 }, { "epoch": 5.507881505639353, "grad_norm": 1.0833555459976196, "learning_rate": 0.0003117611088463106, "loss": 3.3972, "step": 81065 }, { "epoch": 5.508221225710015, "grad_norm": 1.038313627243042, "learning_rate": 0.00031171864383747796, "loss": 3.3326, "step": 81070 }, { "epoch": 5.508560945780677, "grad_norm": 1.2575916051864624, "learning_rate": 0.0003116761788286452, "loss": 3.3067, "step": 81075 }, { "epoch": 5.508900665851338, "grad_norm": 1.2077677249908447, "learning_rate": 0.00031163371381981247, "loss": 3.1038, "step": 81080 }, { "epoch": 5.509240385922, "grad_norm": 1.4400616884231567, "learning_rate": 0.0003115912488109798, "loss": 3.2804, "step": 81085 }, { "epoch": 5.509580105992662, "grad_norm": 1.2610852718353271, "learning_rate": 0.000311548783802147, "loss": 3.2658, "step": 81090 }, { "epoch": 5.5099198260633235, "grad_norm": 1.3679310083389282, "learning_rate": 0.0003115063187933143, "loss": 3.4104, "step": 81095 }, { "epoch": 5.510259546133986, "grad_norm": 1.2247010469436646, "learning_rate": 0.0003114638537844816, "loss": 3.4649, "step": 81100 }, { "epoch": 5.510599266204648, "grad_norm": 1.459299087524414, "learning_rate": 0.00031142138877564887, "loss": 3.4762, "step": 81105 }, { "epoch": 5.510938986275309, "grad_norm": 1.3448395729064941, "learning_rate": 0.00031137892376681615, "loss": 3.4355, "step": 81110 }, { "epoch": 5.511278706345971, "grad_norm": 1.1592835187911987, "learning_rate": 0.0003113364587579834, "loss": 3.1791, "step": 81115 }, { "epoch": 5.511618426416633, "grad_norm": 1.0549042224884033, "learning_rate": 0.0003112939937491507, "loss": 3.2769, "step": 81120 }, { "epoch": 5.511958146487294, "grad_norm": 1.3485382795333862, "learning_rate": 0.000311251528740318, "loss": 3.0753, "step": 81125 }, { "epoch": 5.512297866557956, "grad_norm": 1.520370602607727, "learning_rate": 0.00031120906373148527, "loss": 3.3794, "step": 81130 }, { "epoch": 5.512637586628618, "grad_norm": 1.1395751237869263, "learning_rate": 0.0003111665987226525, "loss": 3.3455, "step": 81135 }, { "epoch": 5.5129773066992795, "grad_norm": 1.255660891532898, "learning_rate": 0.0003111241337138198, "loss": 3.2377, "step": 81140 }, { "epoch": 5.513317026769942, "grad_norm": 1.8602204322814941, "learning_rate": 0.0003110816687049871, "loss": 3.4933, "step": 81145 }, { "epoch": 5.513656746840604, "grad_norm": 1.409056544303894, "learning_rate": 0.00031103920369615433, "loss": 3.381, "step": 81150 }, { "epoch": 5.513996466911265, "grad_norm": 1.022095799446106, "learning_rate": 0.00031099673868732167, "loss": 3.1459, "step": 81155 }, { "epoch": 5.514336186981927, "grad_norm": 1.4027656316757202, "learning_rate": 0.00031095427367848895, "loss": 3.6666, "step": 81160 }, { "epoch": 5.514675907052589, "grad_norm": 1.315589189529419, "learning_rate": 0.0003109118086696562, "loss": 3.5163, "step": 81165 }, { "epoch": 5.51501562712325, "grad_norm": 1.7052340507507324, "learning_rate": 0.00031086934366082345, "loss": 3.4168, "step": 81170 }, { "epoch": 5.515355347193912, "grad_norm": 1.5926969051361084, "learning_rate": 0.0003108268786519908, "loss": 3.1814, "step": 81175 }, { "epoch": 5.515695067264574, "grad_norm": 1.1800366640090942, "learning_rate": 0.00031078441364315807, "loss": 3.4081, "step": 81180 }, { "epoch": 5.516034787335236, "grad_norm": 1.539967656135559, "learning_rate": 0.0003107419486343253, "loss": 3.4156, "step": 81185 }, { "epoch": 5.516374507405898, "grad_norm": 1.2232921123504639, "learning_rate": 0.00031069948362549263, "loss": 3.4736, "step": 81190 }, { "epoch": 5.516714227476559, "grad_norm": 1.4000074863433838, "learning_rate": 0.0003106570186166599, "loss": 3.38, "step": 81195 }, { "epoch": 5.517053947547221, "grad_norm": 1.184725284576416, "learning_rate": 0.00031061455360782713, "loss": 3.408, "step": 81200 }, { "epoch": 5.517393667617883, "grad_norm": 1.377653956413269, "learning_rate": 0.0003105720885989944, "loss": 3.2147, "step": 81205 }, { "epoch": 5.517733387688544, "grad_norm": 1.4658023118972778, "learning_rate": 0.00031052962359016175, "loss": 3.5377, "step": 81210 }, { "epoch": 5.518073107759206, "grad_norm": 1.1957592964172363, "learning_rate": 0.000310487158581329, "loss": 3.4429, "step": 81215 }, { "epoch": 5.518412827829868, "grad_norm": 1.2356411218643188, "learning_rate": 0.00031044469357249625, "loss": 3.4814, "step": 81220 }, { "epoch": 5.5187525479005295, "grad_norm": 1.2910871505737305, "learning_rate": 0.0003104022285636636, "loss": 3.5979, "step": 81225 }, { "epoch": 5.519092267971192, "grad_norm": 1.4239152669906616, "learning_rate": 0.0003103597635548308, "loss": 3.2716, "step": 81230 }, { "epoch": 5.519431988041854, "grad_norm": 1.367651104927063, "learning_rate": 0.0003103172985459981, "loss": 3.5461, "step": 81235 }, { "epoch": 5.519771708112515, "grad_norm": 1.101148009300232, "learning_rate": 0.00031027483353716543, "loss": 3.6385, "step": 81240 }, { "epoch": 5.520111428183177, "grad_norm": 1.0958737134933472, "learning_rate": 0.00031023236852833265, "loss": 3.5719, "step": 81245 }, { "epoch": 5.520451148253839, "grad_norm": 1.3776546716690063, "learning_rate": 0.00031018990351949993, "loss": 3.6009, "step": 81250 }, { "epoch": 5.5207908683245, "grad_norm": 1.2025457620620728, "learning_rate": 0.0003101474385106672, "loss": 3.2927, "step": 81255 }, { "epoch": 5.521130588395162, "grad_norm": 1.9396922588348389, "learning_rate": 0.0003101049735018345, "loss": 3.5443, "step": 81260 }, { "epoch": 5.521470308465824, "grad_norm": 1.383796215057373, "learning_rate": 0.0003100625084930018, "loss": 3.2715, "step": 81265 }, { "epoch": 5.5218100285364855, "grad_norm": 1.4550343751907349, "learning_rate": 0.00031002004348416905, "loss": 3.2583, "step": 81270 }, { "epoch": 5.522149748607148, "grad_norm": 1.5099334716796875, "learning_rate": 0.00030997757847533633, "loss": 3.2162, "step": 81275 }, { "epoch": 5.52248946867781, "grad_norm": 1.351415991783142, "learning_rate": 0.0003099351134665036, "loss": 3.4773, "step": 81280 }, { "epoch": 5.522829188748471, "grad_norm": 1.148837685585022, "learning_rate": 0.0003098926484576709, "loss": 3.4624, "step": 81285 }, { "epoch": 5.523168908819133, "grad_norm": 1.6982401609420776, "learning_rate": 0.0003098501834488381, "loss": 3.5207, "step": 81290 }, { "epoch": 5.523508628889795, "grad_norm": 1.432640790939331, "learning_rate": 0.00030980771844000545, "loss": 3.3073, "step": 81295 }, { "epoch": 5.523848348960456, "grad_norm": 1.171907663345337, "learning_rate": 0.00030976525343117274, "loss": 3.3014, "step": 81300 }, { "epoch": 5.524188069031118, "grad_norm": 1.039070963859558, "learning_rate": 0.00030972278842233996, "loss": 3.3797, "step": 81305 }, { "epoch": 5.52452778910178, "grad_norm": 1.646653652191162, "learning_rate": 0.0003096803234135073, "loss": 3.4782, "step": 81310 }, { "epoch": 5.5248675091724415, "grad_norm": 1.2454804182052612, "learning_rate": 0.0003096378584046746, "loss": 3.2726, "step": 81315 }, { "epoch": 5.525207229243104, "grad_norm": 1.5323026180267334, "learning_rate": 0.0003095953933958418, "loss": 3.1417, "step": 81320 }, { "epoch": 5.525546949313766, "grad_norm": 1.2655538320541382, "learning_rate": 0.0003095529283870091, "loss": 3.2751, "step": 81325 }, { "epoch": 5.525886669384427, "grad_norm": 1.0770084857940674, "learning_rate": 0.0003095104633781764, "loss": 3.2441, "step": 81330 }, { "epoch": 5.526226389455089, "grad_norm": 1.2275582551956177, "learning_rate": 0.00030946799836934364, "loss": 3.5605, "step": 81335 }, { "epoch": 5.526566109525751, "grad_norm": 1.0622150897979736, "learning_rate": 0.0003094255333605109, "loss": 3.3923, "step": 81340 }, { "epoch": 5.526905829596412, "grad_norm": 1.563137412071228, "learning_rate": 0.00030938306835167826, "loss": 3.2872, "step": 81345 }, { "epoch": 5.527245549667074, "grad_norm": 1.2145485877990723, "learning_rate": 0.00030934060334284554, "loss": 3.5341, "step": 81350 }, { "epoch": 5.527585269737736, "grad_norm": 1.3980175256729126, "learning_rate": 0.00030929813833401276, "loss": 3.4251, "step": 81355 }, { "epoch": 5.5279249898083975, "grad_norm": 1.220550298690796, "learning_rate": 0.00030925567332518004, "loss": 3.1501, "step": 81360 }, { "epoch": 5.52826470987906, "grad_norm": 1.3049856424331665, "learning_rate": 0.0003092132083163474, "loss": 3.3508, "step": 81365 }, { "epoch": 5.528604429949722, "grad_norm": 1.4663723707199097, "learning_rate": 0.0003091707433075146, "loss": 3.0479, "step": 81370 }, { "epoch": 5.528944150020383, "grad_norm": 1.153400182723999, "learning_rate": 0.0003091282782986819, "loss": 3.2796, "step": 81375 }, { "epoch": 5.529283870091045, "grad_norm": 1.3362469673156738, "learning_rate": 0.0003090858132898492, "loss": 3.2545, "step": 81380 }, { "epoch": 5.529623590161707, "grad_norm": 1.362809419631958, "learning_rate": 0.00030904334828101644, "loss": 3.4509, "step": 81385 }, { "epoch": 5.529963310232368, "grad_norm": 1.0592477321624756, "learning_rate": 0.0003090008832721837, "loss": 3.3048, "step": 81390 }, { "epoch": 5.53030303030303, "grad_norm": 1.3030407428741455, "learning_rate": 0.000308958418263351, "loss": 3.2145, "step": 81395 }, { "epoch": 5.530642750373692, "grad_norm": 1.4407631158828735, "learning_rate": 0.0003089159532545183, "loss": 3.2456, "step": 81400 }, { "epoch": 5.5309824704443535, "grad_norm": 1.1152758598327637, "learning_rate": 0.00030887348824568556, "loss": 3.3407, "step": 81405 }, { "epoch": 5.531322190515016, "grad_norm": 1.4705510139465332, "learning_rate": 0.00030883102323685284, "loss": 3.3916, "step": 81410 }, { "epoch": 5.531661910585678, "grad_norm": 1.277665138244629, "learning_rate": 0.0003087885582280201, "loss": 3.5059, "step": 81415 }, { "epoch": 5.532001630656339, "grad_norm": 1.1365159749984741, "learning_rate": 0.0003087460932191874, "loss": 3.496, "step": 81420 }, { "epoch": 5.532341350727001, "grad_norm": 1.2778998613357544, "learning_rate": 0.0003087036282103547, "loss": 3.2094, "step": 81425 }, { "epoch": 5.532681070797663, "grad_norm": 1.1371132135391235, "learning_rate": 0.0003086611632015219, "loss": 3.1889, "step": 81430 }, { "epoch": 5.533020790868324, "grad_norm": 1.1292535066604614, "learning_rate": 0.00030861869819268924, "loss": 3.4653, "step": 81435 }, { "epoch": 5.533360510938986, "grad_norm": 1.2141034603118896, "learning_rate": 0.0003085762331838565, "loss": 3.4168, "step": 81440 }, { "epoch": 5.533700231009648, "grad_norm": 1.4002890586853027, "learning_rate": 0.00030853376817502375, "loss": 3.4444, "step": 81445 }, { "epoch": 5.5340399510803095, "grad_norm": 1.3477728366851807, "learning_rate": 0.0003084913031661911, "loss": 3.135, "step": 81450 }, { "epoch": 5.534379671150972, "grad_norm": 1.1886651515960693, "learning_rate": 0.00030844883815735836, "loss": 3.3815, "step": 81455 }, { "epoch": 5.534719391221634, "grad_norm": 1.3269942998886108, "learning_rate": 0.0003084063731485256, "loss": 3.3351, "step": 81460 }, { "epoch": 5.535059111292295, "grad_norm": 1.4418367147445679, "learning_rate": 0.00030836390813969287, "loss": 3.4289, "step": 81465 }, { "epoch": 5.535398831362957, "grad_norm": 1.3103488683700562, "learning_rate": 0.0003083214431308602, "loss": 3.2282, "step": 81470 }, { "epoch": 5.535738551433619, "grad_norm": 1.1797515153884888, "learning_rate": 0.00030827897812202743, "loss": 3.4788, "step": 81475 }, { "epoch": 5.53607827150428, "grad_norm": 1.3711068630218506, "learning_rate": 0.0003082365131131947, "loss": 3.3831, "step": 81480 }, { "epoch": 5.536417991574942, "grad_norm": 1.3164228200912476, "learning_rate": 0.00030819404810436204, "loss": 3.3236, "step": 81485 }, { "epoch": 5.536757711645604, "grad_norm": 1.0542515516281128, "learning_rate": 0.00030815158309552927, "loss": 3.2895, "step": 81490 }, { "epoch": 5.537097431716266, "grad_norm": 1.3082833290100098, "learning_rate": 0.00030810911808669655, "loss": 3.479, "step": 81495 }, { "epoch": 5.537437151786928, "grad_norm": 1.2279331684112549, "learning_rate": 0.00030806665307786383, "loss": 3.2605, "step": 81500 }, { "epoch": 5.53777687185759, "grad_norm": 1.2009673118591309, "learning_rate": 0.0003080241880690311, "loss": 3.7073, "step": 81505 }, { "epoch": 5.538116591928251, "grad_norm": 1.393409252166748, "learning_rate": 0.0003079817230601984, "loss": 3.1548, "step": 81510 }, { "epoch": 5.538456311998913, "grad_norm": 1.4772628545761108, "learning_rate": 0.00030793925805136567, "loss": 3.3653, "step": 81515 }, { "epoch": 5.538796032069575, "grad_norm": 1.333341360092163, "learning_rate": 0.000307896793042533, "loss": 3.2908, "step": 81520 }, { "epoch": 5.539135752140236, "grad_norm": 1.0846182107925415, "learning_rate": 0.00030785432803370023, "loss": 3.5011, "step": 81525 }, { "epoch": 5.539475472210898, "grad_norm": 1.2617590427398682, "learning_rate": 0.0003078118630248675, "loss": 3.466, "step": 81530 }, { "epoch": 5.53981519228156, "grad_norm": 1.2546167373657227, "learning_rate": 0.00030776939801603484, "loss": 3.4668, "step": 81535 }, { "epoch": 5.540154912352222, "grad_norm": 1.231514573097229, "learning_rate": 0.00030772693300720207, "loss": 3.4129, "step": 81540 }, { "epoch": 5.540494632422884, "grad_norm": 0.9424011707305908, "learning_rate": 0.00030768446799836935, "loss": 3.3153, "step": 81545 }, { "epoch": 5.540834352493546, "grad_norm": 1.3159689903259277, "learning_rate": 0.00030764200298953663, "loss": 3.3705, "step": 81550 }, { "epoch": 5.541174072564207, "grad_norm": 1.2950767278671265, "learning_rate": 0.0003075995379807039, "loss": 3.3533, "step": 81555 }, { "epoch": 5.541513792634869, "grad_norm": 1.0124588012695312, "learning_rate": 0.0003075570729718712, "loss": 3.4952, "step": 81560 }, { "epoch": 5.541853512705531, "grad_norm": 1.2166502475738525, "learning_rate": 0.00030751460796303847, "loss": 3.0778, "step": 81565 }, { "epoch": 5.542193232776192, "grad_norm": 1.3087561130523682, "learning_rate": 0.00030747214295420575, "loss": 3.377, "step": 81570 }, { "epoch": 5.542532952846854, "grad_norm": 1.6853660345077515, "learning_rate": 0.00030742967794537303, "loss": 3.0737, "step": 81575 }, { "epoch": 5.542872672917516, "grad_norm": 1.1248931884765625, "learning_rate": 0.0003073872129365403, "loss": 3.3028, "step": 81580 }, { "epoch": 5.543212392988178, "grad_norm": 1.4390708208084106, "learning_rate": 0.00030734474792770754, "loss": 3.4253, "step": 81585 }, { "epoch": 5.54355211305884, "grad_norm": 1.6941789388656616, "learning_rate": 0.00030730228291887487, "loss": 3.2747, "step": 81590 }, { "epoch": 5.543891833129502, "grad_norm": 1.2230950593948364, "learning_rate": 0.00030725981791004215, "loss": 3.2459, "step": 81595 }, { "epoch": 5.544231553200163, "grad_norm": 1.0339089632034302, "learning_rate": 0.0003072173529012094, "loss": 3.2565, "step": 81600 }, { "epoch": 5.544571273270825, "grad_norm": 1.191004753112793, "learning_rate": 0.0003071748878923767, "loss": 3.4834, "step": 81605 }, { "epoch": 5.544910993341487, "grad_norm": 1.1774137020111084, "learning_rate": 0.000307132422883544, "loss": 3.4165, "step": 81610 }, { "epoch": 5.545250713412148, "grad_norm": 1.3438529968261719, "learning_rate": 0.0003070899578747112, "loss": 3.1476, "step": 81615 }, { "epoch": 5.54559043348281, "grad_norm": 1.840828776359558, "learning_rate": 0.0003070474928658785, "loss": 3.1003, "step": 81620 }, { "epoch": 5.545930153553472, "grad_norm": 1.4093077182769775, "learning_rate": 0.00030700502785704583, "loss": 3.4048, "step": 81625 }, { "epoch": 5.546269873624134, "grad_norm": 1.5667113065719604, "learning_rate": 0.00030696256284821306, "loss": 3.3187, "step": 81630 }, { "epoch": 5.546609593694796, "grad_norm": 1.0979255437850952, "learning_rate": 0.00030692009783938034, "loss": 3.1214, "step": 81635 }, { "epoch": 5.546949313765458, "grad_norm": 1.939426064491272, "learning_rate": 0.00030687763283054767, "loss": 2.9113, "step": 81640 }, { "epoch": 5.547289033836119, "grad_norm": 1.3364202976226807, "learning_rate": 0.0003068351678217149, "loss": 3.1961, "step": 81645 }, { "epoch": 5.547628753906781, "grad_norm": 1.030707836151123, "learning_rate": 0.0003067927028128822, "loss": 3.5627, "step": 81650 }, { "epoch": 5.547968473977443, "grad_norm": 1.4111521244049072, "learning_rate": 0.00030675023780404946, "loss": 3.0222, "step": 81655 }, { "epoch": 5.548308194048104, "grad_norm": 1.0123318433761597, "learning_rate": 0.00030670777279521674, "loss": 3.432, "step": 81660 }, { "epoch": 5.548647914118766, "grad_norm": 1.28514564037323, "learning_rate": 0.000306665307786384, "loss": 3.2735, "step": 81665 }, { "epoch": 5.5489876341894275, "grad_norm": 1.4599714279174805, "learning_rate": 0.0003066228427775513, "loss": 3.4093, "step": 81670 }, { "epoch": 5.54932735426009, "grad_norm": 1.1827930212020874, "learning_rate": 0.0003065803777687186, "loss": 3.0605, "step": 81675 }, { "epoch": 5.549667074330752, "grad_norm": 1.6764378547668457, "learning_rate": 0.00030653791275988586, "loss": 3.3525, "step": 81680 }, { "epoch": 5.550006794401413, "grad_norm": 1.1083892583847046, "learning_rate": 0.00030649544775105314, "loss": 3.2684, "step": 81685 }, { "epoch": 5.550346514472075, "grad_norm": 1.2425450086593628, "learning_rate": 0.0003064529827422204, "loss": 3.2841, "step": 81690 }, { "epoch": 5.550686234542737, "grad_norm": 1.5577975511550903, "learning_rate": 0.0003064105177333877, "loss": 3.2624, "step": 81695 }, { "epoch": 5.551025954613398, "grad_norm": 1.401360273361206, "learning_rate": 0.000306368052724555, "loss": 3.4559, "step": 81700 }, { "epoch": 5.55136567468406, "grad_norm": 1.3416887521743774, "learning_rate": 0.00030632558771572226, "loss": 3.3206, "step": 81705 }, { "epoch": 5.551705394754722, "grad_norm": 1.2656792402267456, "learning_rate": 0.00030628312270688954, "loss": 3.0722, "step": 81710 }, { "epoch": 5.5520451148253835, "grad_norm": 1.3309649229049683, "learning_rate": 0.0003062406576980568, "loss": 3.3973, "step": 81715 }, { "epoch": 5.552384834896046, "grad_norm": 1.4353333711624146, "learning_rate": 0.0003061981926892241, "loss": 3.3805, "step": 81720 }, { "epoch": 5.552724554966708, "grad_norm": 1.3103504180908203, "learning_rate": 0.0003061557276803913, "loss": 3.5754, "step": 81725 }, { "epoch": 5.553064275037369, "grad_norm": 1.2950383424758911, "learning_rate": 0.00030611326267155866, "loss": 3.1438, "step": 81730 }, { "epoch": 5.553403995108031, "grad_norm": 1.7176481485366821, "learning_rate": 0.00030607079766272594, "loss": 3.4254, "step": 81735 }, { "epoch": 5.553743715178693, "grad_norm": 1.4045255184173584, "learning_rate": 0.00030602833265389317, "loss": 3.5687, "step": 81740 }, { "epoch": 5.554083435249354, "grad_norm": 1.0733636617660522, "learning_rate": 0.0003059858676450605, "loss": 3.3968, "step": 81745 }, { "epoch": 5.554423155320016, "grad_norm": 1.64539635181427, "learning_rate": 0.0003059434026362278, "loss": 3.1571, "step": 81750 }, { "epoch": 5.554762875390678, "grad_norm": 0.9944605231285095, "learning_rate": 0.000305900937627395, "loss": 3.2199, "step": 81755 }, { "epoch": 5.5551025954613396, "grad_norm": 1.102731704711914, "learning_rate": 0.0003058584726185623, "loss": 3.3439, "step": 81760 }, { "epoch": 5.555442315532002, "grad_norm": 1.478013277053833, "learning_rate": 0.0003058160076097296, "loss": 3.1458, "step": 81765 }, { "epoch": 5.555782035602664, "grad_norm": 1.3099013566970825, "learning_rate": 0.00030577354260089685, "loss": 3.2215, "step": 81770 }, { "epoch": 5.556121755673325, "grad_norm": 1.6925766468048096, "learning_rate": 0.0003057310775920641, "loss": 3.4292, "step": 81775 }, { "epoch": 5.556461475743987, "grad_norm": 1.5001899003982544, "learning_rate": 0.00030568861258323146, "loss": 3.4066, "step": 81780 }, { "epoch": 5.556801195814649, "grad_norm": 1.1872578859329224, "learning_rate": 0.0003056461475743987, "loss": 3.5483, "step": 81785 }, { "epoch": 5.55714091588531, "grad_norm": 1.3276021480560303, "learning_rate": 0.00030560368256556597, "loss": 3.2485, "step": 81790 }, { "epoch": 5.557480635955972, "grad_norm": 1.0117759704589844, "learning_rate": 0.0003055612175567333, "loss": 3.1226, "step": 81795 }, { "epoch": 5.557820356026634, "grad_norm": 1.3807814121246338, "learning_rate": 0.0003055187525479005, "loss": 3.3168, "step": 81800 }, { "epoch": 5.558160076097296, "grad_norm": 1.2077306509017944, "learning_rate": 0.0003054762875390678, "loss": 3.3711, "step": 81805 }, { "epoch": 5.558499796167958, "grad_norm": 1.3171372413635254, "learning_rate": 0.0003054338225302351, "loss": 3.2383, "step": 81810 }, { "epoch": 5.55883951623862, "grad_norm": 1.194007158279419, "learning_rate": 0.00030539135752140237, "loss": 3.4224, "step": 81815 }, { "epoch": 5.559179236309281, "grad_norm": 1.4784232378005981, "learning_rate": 0.00030534889251256965, "loss": 3.3784, "step": 81820 }, { "epoch": 5.559518956379943, "grad_norm": 1.3016306161880493, "learning_rate": 0.0003053064275037369, "loss": 3.314, "step": 81825 }, { "epoch": 5.559858676450605, "grad_norm": 1.2291980981826782, "learning_rate": 0.0003052639624949042, "loss": 3.2886, "step": 81830 }, { "epoch": 5.560198396521266, "grad_norm": 1.2843225002288818, "learning_rate": 0.0003052214974860715, "loss": 3.3474, "step": 81835 }, { "epoch": 5.560538116591928, "grad_norm": 1.3376375436782837, "learning_rate": 0.00030517903247723877, "loss": 3.4251, "step": 81840 }, { "epoch": 5.56087783666259, "grad_norm": 1.5237754583358765, "learning_rate": 0.000305136567468406, "loss": 3.3884, "step": 81845 }, { "epoch": 5.561217556733252, "grad_norm": 1.3060450553894043, "learning_rate": 0.0003050941024595733, "loss": 3.261, "step": 81850 }, { "epoch": 5.561557276803914, "grad_norm": 1.2985341548919678, "learning_rate": 0.0003050516374507406, "loss": 3.542, "step": 81855 }, { "epoch": 5.561896996874576, "grad_norm": 1.3529070615768433, "learning_rate": 0.0003050091724419079, "loss": 3.2767, "step": 81860 }, { "epoch": 5.562236716945237, "grad_norm": 1.2185012102127075, "learning_rate": 0.00030496670743307517, "loss": 2.9309, "step": 81865 }, { "epoch": 5.562576437015899, "grad_norm": 1.2511885166168213, "learning_rate": 0.00030492424242424245, "loss": 3.4014, "step": 81870 }, { "epoch": 5.56291615708656, "grad_norm": 1.2026753425598145, "learning_rate": 0.0003048817774154097, "loss": 3.1117, "step": 81875 }, { "epoch": 5.563255877157222, "grad_norm": 1.439871072769165, "learning_rate": 0.00030483931240657695, "loss": 3.0455, "step": 81880 }, { "epoch": 5.563595597227884, "grad_norm": 1.5747836828231812, "learning_rate": 0.0003047968473977443, "loss": 3.1087, "step": 81885 }, { "epoch": 5.5639353172985455, "grad_norm": 1.2732428312301636, "learning_rate": 0.00030475438238891157, "loss": 3.3166, "step": 81890 }, { "epoch": 5.564275037369208, "grad_norm": 1.35360848903656, "learning_rate": 0.0003047119173800788, "loss": 3.085, "step": 81895 }, { "epoch": 5.56461475743987, "grad_norm": 1.315690517425537, "learning_rate": 0.00030466945237124613, "loss": 3.2648, "step": 81900 }, { "epoch": 5.564954477510531, "grad_norm": 1.5959118604660034, "learning_rate": 0.0003046269873624134, "loss": 3.335, "step": 81905 }, { "epoch": 5.565294197581193, "grad_norm": 1.1805903911590576, "learning_rate": 0.00030458452235358063, "loss": 3.4337, "step": 81910 }, { "epoch": 5.565633917651855, "grad_norm": 1.2728146314620972, "learning_rate": 0.0003045420573447479, "loss": 3.3042, "step": 81915 }, { "epoch": 5.565973637722516, "grad_norm": 1.467048168182373, "learning_rate": 0.00030449959233591525, "loss": 2.8662, "step": 81920 }, { "epoch": 5.566313357793178, "grad_norm": 1.2625483274459839, "learning_rate": 0.0003044571273270825, "loss": 3.3626, "step": 81925 }, { "epoch": 5.56665307786384, "grad_norm": 1.139878273010254, "learning_rate": 0.00030441466231824975, "loss": 3.1359, "step": 81930 }, { "epoch": 5.5669927979345015, "grad_norm": 1.5137674808502197, "learning_rate": 0.0003043721973094171, "loss": 3.3541, "step": 81935 }, { "epoch": 5.567332518005164, "grad_norm": 1.0288506746292114, "learning_rate": 0.0003043297323005843, "loss": 3.5838, "step": 81940 }, { "epoch": 5.567672238075826, "grad_norm": 1.4528627395629883, "learning_rate": 0.0003042872672917516, "loss": 3.2812, "step": 81945 }, { "epoch": 5.568011958146487, "grad_norm": 1.2732234001159668, "learning_rate": 0.0003042448022829189, "loss": 3.4778, "step": 81950 }, { "epoch": 5.568351678217149, "grad_norm": 1.142655849456787, "learning_rate": 0.00030420233727408615, "loss": 3.1113, "step": 81955 }, { "epoch": 5.568691398287811, "grad_norm": 1.205733299255371, "learning_rate": 0.00030415987226525343, "loss": 3.3405, "step": 81960 }, { "epoch": 5.569031118358472, "grad_norm": 1.2819589376449585, "learning_rate": 0.0003041174072564207, "loss": 3.3499, "step": 81965 }, { "epoch": 5.569370838429134, "grad_norm": 1.1785691976547241, "learning_rate": 0.000304074942247588, "loss": 3.4442, "step": 81970 }, { "epoch": 5.569710558499796, "grad_norm": 1.4950311183929443, "learning_rate": 0.0003040324772387553, "loss": 3.3541, "step": 81975 }, { "epoch": 5.5700502785704575, "grad_norm": 1.3339749574661255, "learning_rate": 0.00030399001222992255, "loss": 3.3435, "step": 81980 }, { "epoch": 5.57038999864112, "grad_norm": 1.1484612226486206, "learning_rate": 0.0003039475472210898, "loss": 3.3768, "step": 81985 }, { "epoch": 5.570729718711782, "grad_norm": 1.17263925075531, "learning_rate": 0.0003039050822122571, "loss": 3.1258, "step": 81990 }, { "epoch": 5.571069438782443, "grad_norm": 1.2824783325195312, "learning_rate": 0.0003038626172034244, "loss": 3.4037, "step": 81995 }, { "epoch": 5.571409158853105, "grad_norm": 1.3432821035385132, "learning_rate": 0.0003038201521945916, "loss": 3.3574, "step": 82000 }, { "epoch": 5.571748878923767, "grad_norm": 1.2935856580734253, "learning_rate": 0.00030377768718575895, "loss": 3.1681, "step": 82005 }, { "epoch": 5.572088598994428, "grad_norm": 1.3718441724777222, "learning_rate": 0.00030373522217692623, "loss": 3.3294, "step": 82010 }, { "epoch": 5.57242831906509, "grad_norm": 1.5954043865203857, "learning_rate": 0.00030369275716809346, "loss": 3.4802, "step": 82015 }, { "epoch": 5.572768039135752, "grad_norm": 1.437857985496521, "learning_rate": 0.00030365029215926074, "loss": 3.2113, "step": 82020 }, { "epoch": 5.5731077592064135, "grad_norm": 1.0484669208526611, "learning_rate": 0.0003036078271504281, "loss": 3.4836, "step": 82025 }, { "epoch": 5.573447479277076, "grad_norm": 1.1727224588394165, "learning_rate": 0.00030356536214159536, "loss": 3.5246, "step": 82030 }, { "epoch": 5.573787199347738, "grad_norm": 1.3188402652740479, "learning_rate": 0.0003035228971327626, "loss": 3.6977, "step": 82035 }, { "epoch": 5.574126919418399, "grad_norm": 1.2306790351867676, "learning_rate": 0.0003034804321239299, "loss": 3.501, "step": 82040 }, { "epoch": 5.574466639489061, "grad_norm": 1.0948047637939453, "learning_rate": 0.0003034379671150972, "loss": 3.342, "step": 82045 }, { "epoch": 5.574806359559723, "grad_norm": 1.3197349309921265, "learning_rate": 0.0003033955021062644, "loss": 3.3762, "step": 82050 }, { "epoch": 5.575146079630384, "grad_norm": 1.1251091957092285, "learning_rate": 0.0003033530370974317, "loss": 3.2047, "step": 82055 }, { "epoch": 5.575485799701046, "grad_norm": 1.436057686805725, "learning_rate": 0.00030331057208859904, "loss": 3.2703, "step": 82060 }, { "epoch": 5.575825519771708, "grad_norm": 1.2112449407577515, "learning_rate": 0.00030326810707976626, "loss": 3.1345, "step": 82065 }, { "epoch": 5.57616523984237, "grad_norm": 1.07374906539917, "learning_rate": 0.00030322564207093354, "loss": 3.3207, "step": 82070 }, { "epoch": 5.576504959913032, "grad_norm": 1.3054370880126953, "learning_rate": 0.0003031831770621009, "loss": 3.2367, "step": 82075 }, { "epoch": 5.576844679983694, "grad_norm": 1.22004234790802, "learning_rate": 0.0003031407120532681, "loss": 3.4041, "step": 82080 }, { "epoch": 5.577184400054355, "grad_norm": 1.6246700286865234, "learning_rate": 0.0003030982470444354, "loss": 3.4828, "step": 82085 }, { "epoch": 5.577524120125017, "grad_norm": 1.0712448358535767, "learning_rate": 0.0003030557820356027, "loss": 3.4937, "step": 82090 }, { "epoch": 5.577863840195679, "grad_norm": 1.2675436735153198, "learning_rate": 0.00030301331702676994, "loss": 3.1719, "step": 82095 }, { "epoch": 5.57820356026634, "grad_norm": 1.6526834964752197, "learning_rate": 0.0003029708520179372, "loss": 3.5552, "step": 82100 }, { "epoch": 5.578543280337002, "grad_norm": 1.2006207704544067, "learning_rate": 0.0003029283870091045, "loss": 3.4088, "step": 82105 }, { "epoch": 5.578883000407664, "grad_norm": 1.1511335372924805, "learning_rate": 0.0003028859220002718, "loss": 3.3096, "step": 82110 }, { "epoch": 5.579222720478326, "grad_norm": 1.3863846063613892, "learning_rate": 0.00030284345699143906, "loss": 3.0687, "step": 82115 }, { "epoch": 5.579562440548988, "grad_norm": 1.4453606605529785, "learning_rate": 0.00030280099198260634, "loss": 3.4445, "step": 82120 }, { "epoch": 5.57990216061965, "grad_norm": 1.2367883920669556, "learning_rate": 0.0003027585269737736, "loss": 3.2394, "step": 82125 }, { "epoch": 5.580241880690311, "grad_norm": 1.2943414449691772, "learning_rate": 0.0003027160619649409, "loss": 3.2381, "step": 82130 }, { "epoch": 5.580581600760973, "grad_norm": 1.4886302947998047, "learning_rate": 0.0003026735969561082, "loss": 3.3107, "step": 82135 }, { "epoch": 5.580921320831635, "grad_norm": 1.3593058586120605, "learning_rate": 0.0003026311319472754, "loss": 3.348, "step": 82140 }, { "epoch": 5.581261040902296, "grad_norm": 1.1746699810028076, "learning_rate": 0.00030258866693844274, "loss": 3.5731, "step": 82145 }, { "epoch": 5.581600760972958, "grad_norm": 1.409923791885376, "learning_rate": 0.00030254620192961, "loss": 3.5256, "step": 82150 }, { "epoch": 5.58194048104362, "grad_norm": 1.300205945968628, "learning_rate": 0.00030250373692077725, "loss": 3.4448, "step": 82155 }, { "epoch": 5.582280201114282, "grad_norm": 1.4251686334609985, "learning_rate": 0.0003024612719119446, "loss": 3.5444, "step": 82160 }, { "epoch": 5.582619921184944, "grad_norm": 1.3200020790100098, "learning_rate": 0.00030241880690311186, "loss": 3.311, "step": 82165 }, { "epoch": 5.582959641255606, "grad_norm": 1.4753364324569702, "learning_rate": 0.0003023763418942791, "loss": 3.4024, "step": 82170 }, { "epoch": 5.583299361326267, "grad_norm": 1.2917718887329102, "learning_rate": 0.00030233387688544637, "loss": 3.5768, "step": 82175 }, { "epoch": 5.583639081396929, "grad_norm": 1.3088723421096802, "learning_rate": 0.0003022914118766137, "loss": 3.0356, "step": 82180 }, { "epoch": 5.583978801467591, "grad_norm": 1.255998969078064, "learning_rate": 0.00030224894686778093, "loss": 3.1777, "step": 82185 }, { "epoch": 5.584318521538252, "grad_norm": 1.3669878244400024, "learning_rate": 0.0003022064818589482, "loss": 3.29, "step": 82190 }, { "epoch": 5.584658241608914, "grad_norm": 1.269801139831543, "learning_rate": 0.00030216401685011554, "loss": 3.1285, "step": 82195 }, { "epoch": 5.584997961679576, "grad_norm": 1.0575687885284424, "learning_rate": 0.0003021215518412828, "loss": 3.5596, "step": 82200 }, { "epoch": 5.585337681750238, "grad_norm": 1.1783527135849, "learning_rate": 0.00030207908683245005, "loss": 3.3518, "step": 82205 }, { "epoch": 5.5856774018209, "grad_norm": 1.2362430095672607, "learning_rate": 0.00030203662182361733, "loss": 2.9982, "step": 82210 }, { "epoch": 5.586017121891562, "grad_norm": 1.3968141078948975, "learning_rate": 0.00030199415681478466, "loss": 3.4285, "step": 82215 }, { "epoch": 5.586356841962223, "grad_norm": 1.20963454246521, "learning_rate": 0.0003019516918059519, "loss": 3.2073, "step": 82220 }, { "epoch": 5.586696562032885, "grad_norm": 1.6400481462478638, "learning_rate": 0.00030190922679711917, "loss": 3.4317, "step": 82225 }, { "epoch": 5.587036282103547, "grad_norm": 1.3835211992263794, "learning_rate": 0.0003018667617882865, "loss": 3.3474, "step": 82230 }, { "epoch": 5.587376002174208, "grad_norm": 1.2598377466201782, "learning_rate": 0.00030182429677945373, "loss": 3.5041, "step": 82235 }, { "epoch": 5.58771572224487, "grad_norm": 1.4943974018096924, "learning_rate": 0.000301781831770621, "loss": 3.0804, "step": 82240 }, { "epoch": 5.588055442315532, "grad_norm": 1.1149282455444336, "learning_rate": 0.0003017393667617883, "loss": 3.476, "step": 82245 }, { "epoch": 5.588395162386194, "grad_norm": 1.2134684324264526, "learning_rate": 0.00030169690175295557, "loss": 3.2443, "step": 82250 }, { "epoch": 5.588734882456856, "grad_norm": 1.1145930290222168, "learning_rate": 0.00030165443674412285, "loss": 3.4936, "step": 82255 }, { "epoch": 5.589074602527518, "grad_norm": 1.085497260093689, "learning_rate": 0.00030161197173529013, "loss": 3.556, "step": 82260 }, { "epoch": 5.589414322598179, "grad_norm": 1.0991761684417725, "learning_rate": 0.0003015695067264574, "loss": 3.2388, "step": 82265 }, { "epoch": 5.589754042668841, "grad_norm": 1.6068496704101562, "learning_rate": 0.0003015270417176247, "loss": 3.4169, "step": 82270 }, { "epoch": 5.590093762739503, "grad_norm": 1.3626370429992676, "learning_rate": 0.00030148457670879197, "loss": 3.1629, "step": 82275 }, { "epoch": 5.590433482810164, "grad_norm": 1.578779935836792, "learning_rate": 0.0003014421116999592, "loss": 3.2105, "step": 82280 }, { "epoch": 5.590773202880826, "grad_norm": 1.2276721000671387, "learning_rate": 0.00030139964669112653, "loss": 3.0171, "step": 82285 }, { "epoch": 5.591112922951488, "grad_norm": 1.3536232709884644, "learning_rate": 0.0003013571816822938, "loss": 3.335, "step": 82290 }, { "epoch": 5.59145264302215, "grad_norm": 1.5043028593063354, "learning_rate": 0.00030131471667346104, "loss": 3.1487, "step": 82295 }, { "epoch": 5.591792363092812, "grad_norm": 1.17481529712677, "learning_rate": 0.00030127225166462837, "loss": 3.3655, "step": 82300 }, { "epoch": 5.592132083163474, "grad_norm": 1.1811535358428955, "learning_rate": 0.00030122978665579565, "loss": 3.2691, "step": 82305 }, { "epoch": 5.592471803234135, "grad_norm": 1.1850241422653198, "learning_rate": 0.0003011873216469629, "loss": 3.2743, "step": 82310 }, { "epoch": 5.592811523304797, "grad_norm": 1.7550324201583862, "learning_rate": 0.00030114485663813016, "loss": 3.3978, "step": 82315 }, { "epoch": 5.593151243375459, "grad_norm": 1.2469446659088135, "learning_rate": 0.0003011023916292975, "loss": 3.2807, "step": 82320 }, { "epoch": 5.59349096344612, "grad_norm": 1.4454033374786377, "learning_rate": 0.0003010599266204647, "loss": 3.2031, "step": 82325 }, { "epoch": 5.593830683516782, "grad_norm": 1.4171258211135864, "learning_rate": 0.000301017461611632, "loss": 3.4916, "step": 82330 }, { "epoch": 5.594170403587444, "grad_norm": 1.3760815858840942, "learning_rate": 0.00030097499660279933, "loss": 3.2975, "step": 82335 }, { "epoch": 5.594510123658106, "grad_norm": 1.86489999294281, "learning_rate": 0.00030093253159396656, "loss": 3.5135, "step": 82340 }, { "epoch": 5.594849843728768, "grad_norm": 1.5315496921539307, "learning_rate": 0.00030089006658513384, "loss": 3.3791, "step": 82345 }, { "epoch": 5.595189563799429, "grad_norm": 1.1517740488052368, "learning_rate": 0.0003008476015763011, "loss": 2.9719, "step": 82350 }, { "epoch": 5.595529283870091, "grad_norm": 1.392667293548584, "learning_rate": 0.0003008051365674684, "loss": 3.6636, "step": 82355 }, { "epoch": 5.595869003940753, "grad_norm": 1.0760644674301147, "learning_rate": 0.0003007626715586357, "loss": 3.4977, "step": 82360 }, { "epoch": 5.596208724011414, "grad_norm": 1.24616539478302, "learning_rate": 0.00030072020654980296, "loss": 3.6215, "step": 82365 }, { "epoch": 5.596548444082076, "grad_norm": 1.5016275644302368, "learning_rate": 0.0003006777415409703, "loss": 3.3812, "step": 82370 }, { "epoch": 5.596888164152738, "grad_norm": 1.190857172012329, "learning_rate": 0.0003006352765321375, "loss": 3.249, "step": 82375 }, { "epoch": 5.5972278842234, "grad_norm": 1.5618826150894165, "learning_rate": 0.0003005928115233048, "loss": 3.456, "step": 82380 }, { "epoch": 5.597567604294062, "grad_norm": 1.556362271308899, "learning_rate": 0.00030055034651447213, "loss": 3.1828, "step": 82385 }, { "epoch": 5.597907324364724, "grad_norm": 1.2324042320251465, "learning_rate": 0.00030050788150563936, "loss": 3.253, "step": 82390 }, { "epoch": 5.598247044435385, "grad_norm": 1.0699528455734253, "learning_rate": 0.00030046541649680664, "loss": 3.4108, "step": 82395 }, { "epoch": 5.598586764506047, "grad_norm": 1.3951334953308105, "learning_rate": 0.0003004229514879739, "loss": 3.393, "step": 82400 }, { "epoch": 5.598926484576709, "grad_norm": 1.319448709487915, "learning_rate": 0.0003003804864791412, "loss": 3.1651, "step": 82405 }, { "epoch": 5.59926620464737, "grad_norm": 1.0271908044815063, "learning_rate": 0.0003003380214703085, "loss": 3.2467, "step": 82410 }, { "epoch": 5.599605924718032, "grad_norm": 1.607937216758728, "learning_rate": 0.00030029555646147576, "loss": 3.4964, "step": 82415 }, { "epoch": 5.599945644788694, "grad_norm": 1.3099535703659058, "learning_rate": 0.00030025309145264304, "loss": 3.3604, "step": 82420 }, { "epoch": 5.600285364859356, "grad_norm": 1.2279659509658813, "learning_rate": 0.0003002106264438103, "loss": 3.3409, "step": 82425 }, { "epoch": 5.600625084930018, "grad_norm": 1.2832237482070923, "learning_rate": 0.0003001681614349776, "loss": 3.3706, "step": 82430 }, { "epoch": 5.60096480500068, "grad_norm": 1.278006672859192, "learning_rate": 0.0003001256964261448, "loss": 3.306, "step": 82435 }, { "epoch": 5.601304525071341, "grad_norm": 1.0958905220031738, "learning_rate": 0.00030008323141731216, "loss": 3.1421, "step": 82440 }, { "epoch": 5.601644245142003, "grad_norm": 1.0584303140640259, "learning_rate": 0.00030004076640847944, "loss": 3.4573, "step": 82445 }, { "epoch": 5.601983965212665, "grad_norm": 1.5471128225326538, "learning_rate": 0.00029999830139964666, "loss": 3.3453, "step": 82450 }, { "epoch": 5.602323685283326, "grad_norm": 1.3700765371322632, "learning_rate": 0.000299955836390814, "loss": 3.5088, "step": 82455 }, { "epoch": 5.602663405353988, "grad_norm": 1.458030343055725, "learning_rate": 0.0002999133713819813, "loss": 3.2621, "step": 82460 }, { "epoch": 5.60300312542465, "grad_norm": 1.57577383518219, "learning_rate": 0.0002998709063731485, "loss": 3.322, "step": 82465 }, { "epoch": 5.603342845495312, "grad_norm": 1.4495488405227661, "learning_rate": 0.0002998284413643158, "loss": 3.4662, "step": 82470 }, { "epoch": 5.603682565565974, "grad_norm": 1.242174744606018, "learning_rate": 0.0002997859763554831, "loss": 3.3776, "step": 82475 }, { "epoch": 5.604022285636636, "grad_norm": 1.3263972997665405, "learning_rate": 0.00029974351134665035, "loss": 3.2114, "step": 82480 }, { "epoch": 5.604362005707297, "grad_norm": 1.1031943559646606, "learning_rate": 0.0002997010463378176, "loss": 3.1385, "step": 82485 }, { "epoch": 5.604701725777959, "grad_norm": 1.1566969156265259, "learning_rate": 0.00029965858132898496, "loss": 3.2026, "step": 82490 }, { "epoch": 5.605041445848621, "grad_norm": 1.6261649131774902, "learning_rate": 0.0002996161163201522, "loss": 3.2973, "step": 82495 }, { "epoch": 5.605381165919282, "grad_norm": 1.243943452835083, "learning_rate": 0.00029957365131131947, "loss": 3.3136, "step": 82500 }, { "epoch": 5.605720885989944, "grad_norm": 1.5379161834716797, "learning_rate": 0.00029953118630248675, "loss": 3.3944, "step": 82505 }, { "epoch": 5.606060606060606, "grad_norm": 1.3196688890457153, "learning_rate": 0.000299488721293654, "loss": 3.3333, "step": 82510 }, { "epoch": 5.606400326131268, "grad_norm": 1.0440278053283691, "learning_rate": 0.0002994462562848213, "loss": 3.4271, "step": 82515 }, { "epoch": 5.60674004620193, "grad_norm": 1.0833468437194824, "learning_rate": 0.0002994037912759886, "loss": 3.431, "step": 82520 }, { "epoch": 5.607079766272592, "grad_norm": 1.0271936655044556, "learning_rate": 0.00029936132626715587, "loss": 3.2012, "step": 82525 }, { "epoch": 5.607419486343253, "grad_norm": 1.5323373079299927, "learning_rate": 0.00029931886125832315, "loss": 3.5079, "step": 82530 }, { "epoch": 5.607759206413915, "grad_norm": 0.923676609992981, "learning_rate": 0.0002992763962494904, "loss": 3.3217, "step": 82535 }, { "epoch": 5.608098926484577, "grad_norm": 1.7647422552108765, "learning_rate": 0.0002992339312406577, "loss": 3.2823, "step": 82540 }, { "epoch": 5.608438646555238, "grad_norm": 1.1513692140579224, "learning_rate": 0.000299191466231825, "loss": 3.4057, "step": 82545 }, { "epoch": 5.6087783666259, "grad_norm": 1.4026188850402832, "learning_rate": 0.00029914900122299227, "loss": 3.5341, "step": 82550 }, { "epoch": 5.6091180866965615, "grad_norm": 1.5008121728897095, "learning_rate": 0.00029910653621415955, "loss": 3.0209, "step": 82555 }, { "epoch": 5.609457806767224, "grad_norm": 0.9938173294067383, "learning_rate": 0.0002990640712053268, "loss": 3.2814, "step": 82560 }, { "epoch": 5.609797526837886, "grad_norm": 1.1167340278625488, "learning_rate": 0.0002990216061964941, "loss": 3.3857, "step": 82565 }, { "epoch": 5.610137246908547, "grad_norm": 1.1567120552062988, "learning_rate": 0.0002989791411876614, "loss": 3.3368, "step": 82570 }, { "epoch": 5.610476966979209, "grad_norm": 1.481968641281128, "learning_rate": 0.0002989366761788286, "loss": 3.055, "step": 82575 }, { "epoch": 5.610816687049871, "grad_norm": 1.485141396522522, "learning_rate": 0.00029889421116999595, "loss": 3.3156, "step": 82580 }, { "epoch": 5.611156407120532, "grad_norm": 1.2292927503585815, "learning_rate": 0.0002988517461611632, "loss": 3.4717, "step": 82585 }, { "epoch": 5.611496127191194, "grad_norm": 1.4095081090927124, "learning_rate": 0.00029880928115233045, "loss": 3.255, "step": 82590 }, { "epoch": 5.611835847261856, "grad_norm": 1.1145846843719482, "learning_rate": 0.0002987668161434978, "loss": 3.273, "step": 82595 }, { "epoch": 5.6121755673325175, "grad_norm": 1.527082920074463, "learning_rate": 0.00029872435113466507, "loss": 3.1115, "step": 82600 }, { "epoch": 5.61251528740318, "grad_norm": 1.2270311117172241, "learning_rate": 0.0002986818861258323, "loss": 3.31, "step": 82605 }, { "epoch": 5.612855007473842, "grad_norm": 1.3406890630722046, "learning_rate": 0.0002986394211169996, "loss": 3.1703, "step": 82610 }, { "epoch": 5.613194727544503, "grad_norm": 1.5012462139129639, "learning_rate": 0.0002985969561081669, "loss": 3.256, "step": 82615 }, { "epoch": 5.613534447615165, "grad_norm": 1.6226725578308105, "learning_rate": 0.00029855449109933413, "loss": 3.3039, "step": 82620 }, { "epoch": 5.613874167685827, "grad_norm": 1.4297528266906738, "learning_rate": 0.0002985120260905014, "loss": 3.4946, "step": 82625 }, { "epoch": 5.614213887756488, "grad_norm": 1.3577100038528442, "learning_rate": 0.00029846956108166875, "loss": 3.5072, "step": 82630 }, { "epoch": 5.61455360782715, "grad_norm": 1.0777363777160645, "learning_rate": 0.000298427096072836, "loss": 3.2034, "step": 82635 }, { "epoch": 5.614893327897812, "grad_norm": 1.1803404092788696, "learning_rate": 0.00029838463106400325, "loss": 3.1065, "step": 82640 }, { "epoch": 5.615233047968474, "grad_norm": 1.2848963737487793, "learning_rate": 0.00029834216605517053, "loss": 3.3142, "step": 82645 }, { "epoch": 5.615572768039136, "grad_norm": 1.1369764804840088, "learning_rate": 0.0002982997010463378, "loss": 3.3404, "step": 82650 }, { "epoch": 5.615912488109798, "grad_norm": 1.3363709449768066, "learning_rate": 0.0002982572360375051, "loss": 3.2866, "step": 82655 }, { "epoch": 5.616252208180459, "grad_norm": 1.0482004880905151, "learning_rate": 0.0002982147710286724, "loss": 3.3969, "step": 82660 }, { "epoch": 5.616591928251121, "grad_norm": 1.4823265075683594, "learning_rate": 0.00029817230601983965, "loss": 3.1883, "step": 82665 }, { "epoch": 5.616931648321783, "grad_norm": 1.3101602792739868, "learning_rate": 0.00029812984101100693, "loss": 3.1109, "step": 82670 }, { "epoch": 5.617271368392444, "grad_norm": 1.2397046089172363, "learning_rate": 0.0002980873760021742, "loss": 3.3399, "step": 82675 }, { "epoch": 5.617611088463106, "grad_norm": 1.2901731729507446, "learning_rate": 0.00029804491099334144, "loss": 3.2503, "step": 82680 }, { "epoch": 5.617950808533768, "grad_norm": 1.709553599357605, "learning_rate": 0.0002980024459845088, "loss": 3.4068, "step": 82685 }, { "epoch": 5.61829052860443, "grad_norm": 1.196617603302002, "learning_rate": 0.00029795998097567605, "loss": 3.2173, "step": 82690 }, { "epoch": 5.618630248675092, "grad_norm": 1.0347033739089966, "learning_rate": 0.0002979175159668433, "loss": 3.3116, "step": 82695 }, { "epoch": 5.618969968745754, "grad_norm": 1.7315304279327393, "learning_rate": 0.0002978750509580106, "loss": 3.6125, "step": 82700 }, { "epoch": 5.619309688816415, "grad_norm": 1.711930513381958, "learning_rate": 0.0002978325859491779, "loss": 3.409, "step": 82705 }, { "epoch": 5.619649408887077, "grad_norm": 1.2535178661346436, "learning_rate": 0.0002977901209403452, "loss": 3.3456, "step": 82710 }, { "epoch": 5.619989128957739, "grad_norm": 1.1264163255691528, "learning_rate": 0.00029774765593151245, "loss": 3.1711, "step": 82715 }, { "epoch": 5.6203288490284, "grad_norm": 1.1674057245254517, "learning_rate": 0.00029770519092267973, "loss": 3.0669, "step": 82720 }, { "epoch": 5.620668569099062, "grad_norm": 1.1323386430740356, "learning_rate": 0.000297662725913847, "loss": 3.5177, "step": 82725 }, { "epoch": 5.621008289169724, "grad_norm": 1.7198121547698975, "learning_rate": 0.00029762026090501424, "loss": 3.1259, "step": 82730 }, { "epoch": 5.621348009240386, "grad_norm": 1.040710687637329, "learning_rate": 0.0002975777958961816, "loss": 3.502, "step": 82735 }, { "epoch": 5.621687729311048, "grad_norm": 1.048002004623413, "learning_rate": 0.00029753533088734886, "loss": 3.4191, "step": 82740 }, { "epoch": 5.62202744938171, "grad_norm": 1.3005152940750122, "learning_rate": 0.0002974928658785161, "loss": 3.2312, "step": 82745 }, { "epoch": 5.622367169452371, "grad_norm": 1.058902382850647, "learning_rate": 0.0002974504008696834, "loss": 3.3691, "step": 82750 }, { "epoch": 5.622706889523033, "grad_norm": 1.2218610048294067, "learning_rate": 0.0002974079358608507, "loss": 3.3433, "step": 82755 }, { "epoch": 5.623046609593695, "grad_norm": 1.233353614807129, "learning_rate": 0.0002973654708520179, "loss": 3.4577, "step": 82760 }, { "epoch": 5.623386329664356, "grad_norm": 1.2388323545455933, "learning_rate": 0.0002973230058431852, "loss": 3.2872, "step": 82765 }, { "epoch": 5.623726049735018, "grad_norm": 1.3901528120040894, "learning_rate": 0.00029728054083435254, "loss": 3.3109, "step": 82770 }, { "epoch": 5.62406576980568, "grad_norm": 1.3351260423660278, "learning_rate": 0.00029723807582551976, "loss": 3.2081, "step": 82775 }, { "epoch": 5.624405489876342, "grad_norm": 1.6343884468078613, "learning_rate": 0.00029719561081668704, "loss": 3.4012, "step": 82780 }, { "epoch": 5.624745209947004, "grad_norm": 1.3003596067428589, "learning_rate": 0.0002971531458078544, "loss": 3.717, "step": 82785 }, { "epoch": 5.625084930017666, "grad_norm": 1.211570143699646, "learning_rate": 0.0002971106807990216, "loss": 3.3044, "step": 82790 }, { "epoch": 5.625424650088327, "grad_norm": 1.424596905708313, "learning_rate": 0.0002970682157901889, "loss": 3.3311, "step": 82795 }, { "epoch": 5.625764370158989, "grad_norm": 1.3616067171096802, "learning_rate": 0.00029702575078135616, "loss": 3.5631, "step": 82800 }, { "epoch": 5.626104090229651, "grad_norm": 1.244400978088379, "learning_rate": 0.00029698328577252344, "loss": 3.3474, "step": 82805 }, { "epoch": 5.626443810300312, "grad_norm": 1.2564525604248047, "learning_rate": 0.0002969408207636907, "loss": 3.1898, "step": 82810 }, { "epoch": 5.626783530370974, "grad_norm": 1.7254060506820679, "learning_rate": 0.000296898355754858, "loss": 3.3383, "step": 82815 }, { "epoch": 5.627123250441636, "grad_norm": 1.4834526777267456, "learning_rate": 0.0002968558907460253, "loss": 3.1685, "step": 82820 }, { "epoch": 5.627462970512298, "grad_norm": 1.3365620374679565, "learning_rate": 0.00029681342573719256, "loss": 3.333, "step": 82825 }, { "epoch": 5.62780269058296, "grad_norm": 1.2752920389175415, "learning_rate": 0.00029677096072835984, "loss": 3.3365, "step": 82830 }, { "epoch": 5.628142410653622, "grad_norm": 1.2571879625320435, "learning_rate": 0.00029672849571952707, "loss": 3.0984, "step": 82835 }, { "epoch": 5.628482130724283, "grad_norm": 1.1060904264450073, "learning_rate": 0.0002966860307106944, "loss": 3.505, "step": 82840 }, { "epoch": 5.628821850794945, "grad_norm": 1.1062434911727905, "learning_rate": 0.0002966435657018617, "loss": 3.3942, "step": 82845 }, { "epoch": 5.629161570865607, "grad_norm": 1.0402122735977173, "learning_rate": 0.0002966011006930289, "loss": 3.2295, "step": 82850 }, { "epoch": 5.629501290936268, "grad_norm": 1.4110432863235474, "learning_rate": 0.00029655863568419624, "loss": 3.6781, "step": 82855 }, { "epoch": 5.62984101100693, "grad_norm": 1.495198369026184, "learning_rate": 0.0002965161706753635, "loss": 3.3804, "step": 82860 }, { "epoch": 5.630180731077592, "grad_norm": 1.0218099355697632, "learning_rate": 0.00029647370566653075, "loss": 3.1861, "step": 82865 }, { "epoch": 5.630520451148254, "grad_norm": 1.1788206100463867, "learning_rate": 0.00029643124065769803, "loss": 3.231, "step": 82870 }, { "epoch": 5.630860171218916, "grad_norm": 1.4146885871887207, "learning_rate": 0.00029638877564886536, "loss": 3.1692, "step": 82875 }, { "epoch": 5.631199891289578, "grad_norm": 1.6266839504241943, "learning_rate": 0.00029634631064003264, "loss": 3.4417, "step": 82880 }, { "epoch": 5.631539611360239, "grad_norm": 1.1401245594024658, "learning_rate": 0.00029630384563119987, "loss": 3.2407, "step": 82885 }, { "epoch": 5.631879331430901, "grad_norm": 2.075840950012207, "learning_rate": 0.0002962613806223672, "loss": 3.4342, "step": 82890 }, { "epoch": 5.632219051501563, "grad_norm": 1.748193621635437, "learning_rate": 0.0002962189156135345, "loss": 3.1842, "step": 82895 }, { "epoch": 5.632558771572224, "grad_norm": 1.278035283088684, "learning_rate": 0.0002961764506047017, "loss": 3.2566, "step": 82900 }, { "epoch": 5.632898491642886, "grad_norm": 0.8876035809516907, "learning_rate": 0.000296133985595869, "loss": 3.567, "step": 82905 }, { "epoch": 5.633238211713548, "grad_norm": 1.2858202457427979, "learning_rate": 0.0002960915205870363, "loss": 3.3619, "step": 82910 }, { "epoch": 5.63357793178421, "grad_norm": 1.101723551750183, "learning_rate": 0.00029604905557820355, "loss": 3.5246, "step": 82915 }, { "epoch": 5.633917651854872, "grad_norm": 1.3194868564605713, "learning_rate": 0.00029600659056937083, "loss": 3.3365, "step": 82920 }, { "epoch": 5.634257371925534, "grad_norm": 1.3602514266967773, "learning_rate": 0.00029596412556053816, "loss": 3.1748, "step": 82925 }, { "epoch": 5.634597091996195, "grad_norm": 1.3017265796661377, "learning_rate": 0.0002959216605517054, "loss": 3.295, "step": 82930 }, { "epoch": 5.634936812066857, "grad_norm": 1.4104254245758057, "learning_rate": 0.00029587919554287267, "loss": 3.2892, "step": 82935 }, { "epoch": 5.635276532137519, "grad_norm": 1.0956993103027344, "learning_rate": 0.00029583673053404, "loss": 2.7828, "step": 82940 }, { "epoch": 5.63561625220818, "grad_norm": 1.5695557594299316, "learning_rate": 0.00029579426552520723, "loss": 3.2555, "step": 82945 }, { "epoch": 5.635955972278842, "grad_norm": 1.474748134613037, "learning_rate": 0.0002957518005163745, "loss": 3.312, "step": 82950 }, { "epoch": 5.6362956923495044, "grad_norm": 1.1230875253677368, "learning_rate": 0.0002957093355075418, "loss": 3.3919, "step": 82955 }, { "epoch": 5.636635412420166, "grad_norm": 1.6526291370391846, "learning_rate": 0.00029566687049870907, "loss": 3.5564, "step": 82960 }, { "epoch": 5.636975132490828, "grad_norm": 1.031165599822998, "learning_rate": 0.00029562440548987635, "loss": 3.1379, "step": 82965 }, { "epoch": 5.63731485256149, "grad_norm": 1.024538278579712, "learning_rate": 0.00029558194048104363, "loss": 3.2554, "step": 82970 }, { "epoch": 5.637654572632151, "grad_norm": 1.3625011444091797, "learning_rate": 0.0002955394754722109, "loss": 3.7332, "step": 82975 }, { "epoch": 5.637994292702813, "grad_norm": 1.3307034969329834, "learning_rate": 0.0002954970104633782, "loss": 3.1995, "step": 82980 }, { "epoch": 5.638334012773475, "grad_norm": 1.6350915431976318, "learning_rate": 0.00029545454545454547, "loss": 3.3635, "step": 82985 }, { "epoch": 5.638673732844136, "grad_norm": 1.2623913288116455, "learning_rate": 0.0002954120804457127, "loss": 3.4005, "step": 82990 }, { "epoch": 5.639013452914798, "grad_norm": 0.9952086806297302, "learning_rate": 0.00029536961543688003, "loss": 3.1127, "step": 82995 }, { "epoch": 5.6393531729854605, "grad_norm": 1.1297533512115479, "learning_rate": 0.0002953271504280473, "loss": 3.3873, "step": 83000 }, { "epoch": 5.639692893056122, "grad_norm": 1.3426226377487183, "learning_rate": 0.00029528468541921454, "loss": 3.2346, "step": 83005 }, { "epoch": 5.640032613126784, "grad_norm": 1.2692629098892212, "learning_rate": 0.00029524222041038187, "loss": 3.5482, "step": 83010 }, { "epoch": 5.640372333197446, "grad_norm": 1.4262038469314575, "learning_rate": 0.00029519975540154915, "loss": 3.3723, "step": 83015 }, { "epoch": 5.640712053268107, "grad_norm": 1.5466433763504028, "learning_rate": 0.0002951572903927164, "loss": 3.1475, "step": 83020 }, { "epoch": 5.641051773338769, "grad_norm": 1.424906611442566, "learning_rate": 0.00029511482538388366, "loss": 3.4656, "step": 83025 }, { "epoch": 5.64139149340943, "grad_norm": 1.0843021869659424, "learning_rate": 0.000295072360375051, "loss": 3.2741, "step": 83030 }, { "epoch": 5.641731213480092, "grad_norm": 1.2213646173477173, "learning_rate": 0.0002950298953662182, "loss": 3.3298, "step": 83035 }, { "epoch": 5.642070933550754, "grad_norm": 1.256172776222229, "learning_rate": 0.0002949874303573855, "loss": 3.4134, "step": 83040 }, { "epoch": 5.642410653621416, "grad_norm": 1.7062958478927612, "learning_rate": 0.00029494496534855283, "loss": 3.2443, "step": 83045 }, { "epoch": 5.642750373692078, "grad_norm": 1.257333517074585, "learning_rate": 0.0002949025003397201, "loss": 3.4521, "step": 83050 }, { "epoch": 5.64309009376274, "grad_norm": 1.4397404193878174, "learning_rate": 0.00029486003533088734, "loss": 3.3726, "step": 83055 }, { "epoch": 5.643429813833401, "grad_norm": 1.1830624341964722, "learning_rate": 0.0002948175703220546, "loss": 3.3525, "step": 83060 }, { "epoch": 5.643769533904063, "grad_norm": 1.0040833950042725, "learning_rate": 0.00029477510531322195, "loss": 3.3765, "step": 83065 }, { "epoch": 5.644109253974725, "grad_norm": 1.1466096639633179, "learning_rate": 0.0002947326403043892, "loss": 3.205, "step": 83070 }, { "epoch": 5.644448974045386, "grad_norm": 1.1090710163116455, "learning_rate": 0.00029469017529555646, "loss": 3.1777, "step": 83075 }, { "epoch": 5.644788694116048, "grad_norm": 1.3064370155334473, "learning_rate": 0.0002946477102867238, "loss": 3.1167, "step": 83080 }, { "epoch": 5.64512841418671, "grad_norm": 1.75274658203125, "learning_rate": 0.000294605245277891, "loss": 3.2667, "step": 83085 }, { "epoch": 5.645468134257372, "grad_norm": 1.3274004459381104, "learning_rate": 0.0002945627802690583, "loss": 3.3414, "step": 83090 }, { "epoch": 5.645807854328034, "grad_norm": 1.2393970489501953, "learning_rate": 0.0002945203152602256, "loss": 3.2556, "step": 83095 }, { "epoch": 5.646147574398696, "grad_norm": 1.3978815078735352, "learning_rate": 0.00029447785025139286, "loss": 3.3593, "step": 83100 }, { "epoch": 5.646487294469357, "grad_norm": 1.3851810693740845, "learning_rate": 0.00029443538524256014, "loss": 3.2146, "step": 83105 }, { "epoch": 5.646827014540019, "grad_norm": 1.0506614446640015, "learning_rate": 0.0002943929202337274, "loss": 3.2402, "step": 83110 }, { "epoch": 5.647166734610681, "grad_norm": 1.4311813116073608, "learning_rate": 0.0002943504552248947, "loss": 3.5453, "step": 83115 }, { "epoch": 5.647506454681342, "grad_norm": 1.2093185186386108, "learning_rate": 0.000294307990216062, "loss": 3.3153, "step": 83120 }, { "epoch": 5.647846174752004, "grad_norm": 1.6099454164505005, "learning_rate": 0.0002942740182089958, "loss": 3.2493, "step": 83125 }, { "epoch": 5.648185894822666, "grad_norm": 1.3894503116607666, "learning_rate": 0.0002942315532001631, "loss": 3.439, "step": 83130 }, { "epoch": 5.648525614893328, "grad_norm": 1.1730670928955078, "learning_rate": 0.0002941890881913303, "loss": 3.3156, "step": 83135 }, { "epoch": 5.64886533496399, "grad_norm": 1.1042180061340332, "learning_rate": 0.00029414662318249764, "loss": 3.3045, "step": 83140 }, { "epoch": 5.649205055034652, "grad_norm": 1.3452318906784058, "learning_rate": 0.0002941041581736649, "loss": 3.0523, "step": 83145 }, { "epoch": 5.649544775105313, "grad_norm": 1.1574515104293823, "learning_rate": 0.00029406169316483215, "loss": 3.3814, "step": 83150 }, { "epoch": 5.649884495175975, "grad_norm": 1.3067015409469604, "learning_rate": 0.0002940192281559995, "loss": 3.2908, "step": 83155 }, { "epoch": 5.650224215246637, "grad_norm": 1.6485264301300049, "learning_rate": 0.00029397676314716676, "loss": 3.5839, "step": 83160 }, { "epoch": 5.650563935317298, "grad_norm": 1.4357872009277344, "learning_rate": 0.000293934298138334, "loss": 3.4223, "step": 83165 }, { "epoch": 5.65090365538796, "grad_norm": 1.233688235282898, "learning_rate": 0.00029389183312950127, "loss": 3.2381, "step": 83170 }, { "epoch": 5.651243375458622, "grad_norm": 1.5941405296325684, "learning_rate": 0.0002938493681206686, "loss": 3.3858, "step": 83175 }, { "epoch": 5.651583095529284, "grad_norm": 1.4996979236602783, "learning_rate": 0.00029380690311183583, "loss": 3.0555, "step": 83180 }, { "epoch": 5.651922815599946, "grad_norm": 1.730315923690796, "learning_rate": 0.0002937644381030031, "loss": 3.3074, "step": 83185 }, { "epoch": 5.652262535670608, "grad_norm": 1.699953556060791, "learning_rate": 0.00029372197309417044, "loss": 3.1338, "step": 83190 }, { "epoch": 5.652602255741269, "grad_norm": 1.124931812286377, "learning_rate": 0.00029367950808533767, "loss": 3.2485, "step": 83195 }, { "epoch": 5.652941975811931, "grad_norm": 0.9985764622688293, "learning_rate": 0.00029363704307650495, "loss": 3.312, "step": 83200 }, { "epoch": 5.653281695882593, "grad_norm": 1.1648412942886353, "learning_rate": 0.0002935945780676723, "loss": 3.1849, "step": 83205 }, { "epoch": 5.653621415953254, "grad_norm": 1.486018180847168, "learning_rate": 0.0002935521130588395, "loss": 3.5409, "step": 83210 }, { "epoch": 5.653961136023916, "grad_norm": 1.2990663051605225, "learning_rate": 0.0002935096480500068, "loss": 3.2972, "step": 83215 }, { "epoch": 5.654300856094578, "grad_norm": 1.062756061553955, "learning_rate": 0.00029346718304117407, "loss": 3.4709, "step": 83220 }, { "epoch": 5.65464057616524, "grad_norm": 1.3293182849884033, "learning_rate": 0.00029342471803234135, "loss": 3.3621, "step": 83225 }, { "epoch": 5.654980296235902, "grad_norm": 1.1174044609069824, "learning_rate": 0.00029338225302350863, "loss": 3.1219, "step": 83230 }, { "epoch": 5.655320016306564, "grad_norm": 1.256151556968689, "learning_rate": 0.0002933397880146759, "loss": 3.2795, "step": 83235 }, { "epoch": 5.655659736377225, "grad_norm": 1.529794454574585, "learning_rate": 0.0002932973230058432, "loss": 3.3433, "step": 83240 }, { "epoch": 5.655999456447887, "grad_norm": 1.2966632843017578, "learning_rate": 0.00029325485799701047, "loss": 3.2768, "step": 83245 }, { "epoch": 5.656339176518548, "grad_norm": 1.0373021364212036, "learning_rate": 0.00029321239298817775, "loss": 2.9757, "step": 83250 }, { "epoch": 5.65667889658921, "grad_norm": 1.4392356872558594, "learning_rate": 0.00029316992797934503, "loss": 3.5327, "step": 83255 }, { "epoch": 5.657018616659872, "grad_norm": 1.4495371580123901, "learning_rate": 0.0002931274629705123, "loss": 3.1715, "step": 83260 }, { "epoch": 5.657358336730534, "grad_norm": 1.3383029699325562, "learning_rate": 0.0002930849979616796, "loss": 3.4588, "step": 83265 }, { "epoch": 5.657698056801196, "grad_norm": 1.0424457788467407, "learning_rate": 0.00029304253295284687, "loss": 3.5514, "step": 83270 }, { "epoch": 5.658037776871858, "grad_norm": 0.9585484862327576, "learning_rate": 0.00029300006794401415, "loss": 3.0449, "step": 83275 }, { "epoch": 5.658377496942519, "grad_norm": 1.2810486555099487, "learning_rate": 0.00029295760293518143, "loss": 3.2775, "step": 83280 }, { "epoch": 5.658717217013181, "grad_norm": 1.3746498823165894, "learning_rate": 0.0002929151379263487, "loss": 3.1104, "step": 83285 }, { "epoch": 5.659056937083843, "grad_norm": 1.3403877019882202, "learning_rate": 0.00029287267291751594, "loss": 3.3913, "step": 83290 }, { "epoch": 5.659396657154504, "grad_norm": 1.1815263032913208, "learning_rate": 0.00029283020790868327, "loss": 2.8851, "step": 83295 }, { "epoch": 5.659736377225166, "grad_norm": 1.3064978122711182, "learning_rate": 0.00029278774289985055, "loss": 3.1875, "step": 83300 }, { "epoch": 5.660076097295828, "grad_norm": 1.268763542175293, "learning_rate": 0.0002927452778910178, "loss": 3.1554, "step": 83305 }, { "epoch": 5.66041581736649, "grad_norm": 1.5712188482284546, "learning_rate": 0.0002927028128821851, "loss": 3.1221, "step": 83310 }, { "epoch": 5.660755537437152, "grad_norm": 1.1095454692840576, "learning_rate": 0.0002926603478733524, "loss": 3.2329, "step": 83315 }, { "epoch": 5.661095257507814, "grad_norm": 1.593909502029419, "learning_rate": 0.0002926178828645196, "loss": 3.4436, "step": 83320 }, { "epoch": 5.661434977578475, "grad_norm": 1.4267706871032715, "learning_rate": 0.0002925754178556869, "loss": 3.4244, "step": 83325 }, { "epoch": 5.661774697649137, "grad_norm": 1.2538747787475586, "learning_rate": 0.00029253295284685423, "loss": 3.5175, "step": 83330 }, { "epoch": 5.662114417719799, "grad_norm": 1.081832766532898, "learning_rate": 0.00029249048783802146, "loss": 3.1748, "step": 83335 }, { "epoch": 5.66245413779046, "grad_norm": 1.3393313884735107, "learning_rate": 0.00029244802282918874, "loss": 3.4578, "step": 83340 }, { "epoch": 5.662793857861122, "grad_norm": 1.2737663984298706, "learning_rate": 0.00029240555782035607, "loss": 3.3642, "step": 83345 }, { "epoch": 5.663133577931784, "grad_norm": 2.189054012298584, "learning_rate": 0.0002923630928115233, "loss": 3.3224, "step": 83350 }, { "epoch": 5.663473298002446, "grad_norm": 1.2380218505859375, "learning_rate": 0.0002923206278026906, "loss": 3.4393, "step": 83355 }, { "epoch": 5.663813018073108, "grad_norm": 1.2159346342086792, "learning_rate": 0.00029227816279385786, "loss": 3.4498, "step": 83360 }, { "epoch": 5.66415273814377, "grad_norm": 1.5189253091812134, "learning_rate": 0.00029223569778502514, "loss": 3.1984, "step": 83365 }, { "epoch": 5.664492458214431, "grad_norm": 1.2110998630523682, "learning_rate": 0.0002921932327761924, "loss": 3.1353, "step": 83370 }, { "epoch": 5.664832178285093, "grad_norm": 1.0761845111846924, "learning_rate": 0.0002921507677673597, "loss": 3.2015, "step": 83375 }, { "epoch": 5.665171898355755, "grad_norm": 1.2259334325790405, "learning_rate": 0.000292108302758527, "loss": 3.1692, "step": 83380 }, { "epoch": 5.665511618426416, "grad_norm": 1.3452650308609009, "learning_rate": 0.00029206583774969426, "loss": 3.1205, "step": 83385 }, { "epoch": 5.665851338497078, "grad_norm": 1.0583935976028442, "learning_rate": 0.00029202337274086154, "loss": 3.5389, "step": 83390 }, { "epoch": 5.66619105856774, "grad_norm": 1.2560337781906128, "learning_rate": 0.00029198090773202876, "loss": 3.3173, "step": 83395 }, { "epoch": 5.666530778638402, "grad_norm": 1.6300424337387085, "learning_rate": 0.0002919384427231961, "loss": 3.0289, "step": 83400 }, { "epoch": 5.666870498709064, "grad_norm": 1.3703199625015259, "learning_rate": 0.0002918959777143634, "loss": 3.1261, "step": 83405 }, { "epoch": 5.667210218779726, "grad_norm": 1.103360891342163, "learning_rate": 0.0002918535127055306, "loss": 3.0911, "step": 83410 }, { "epoch": 5.667549938850387, "grad_norm": 1.1365253925323486, "learning_rate": 0.00029181104769669794, "loss": 3.4663, "step": 83415 }, { "epoch": 5.667889658921049, "grad_norm": 1.3159648180007935, "learning_rate": 0.0002917685826878652, "loss": 3.3591, "step": 83420 }, { "epoch": 5.668229378991711, "grad_norm": 1.3335644006729126, "learning_rate": 0.0002917261176790325, "loss": 3.2139, "step": 83425 }, { "epoch": 5.668569099062372, "grad_norm": 1.449536919593811, "learning_rate": 0.0002916836526701997, "loss": 3.3187, "step": 83430 }, { "epoch": 5.668908819133034, "grad_norm": 1.0954670906066895, "learning_rate": 0.00029164118766136706, "loss": 3.4449, "step": 83435 }, { "epoch": 5.669248539203696, "grad_norm": 1.8725440502166748, "learning_rate": 0.00029159872265253434, "loss": 3.2583, "step": 83440 }, { "epoch": 5.669588259274358, "grad_norm": 1.2070338726043701, "learning_rate": 0.00029155625764370156, "loss": 3.4455, "step": 83445 }, { "epoch": 5.66992797934502, "grad_norm": 1.3187631368637085, "learning_rate": 0.0002915137926348689, "loss": 3.0717, "step": 83450 }, { "epoch": 5.670267699415682, "grad_norm": 1.1913502216339111, "learning_rate": 0.0002914713276260362, "loss": 3.326, "step": 83455 }, { "epoch": 5.670607419486343, "grad_norm": 1.8088499307632446, "learning_rate": 0.0002914288626172034, "loss": 3.3817, "step": 83460 }, { "epoch": 5.670947139557005, "grad_norm": 1.1842774152755737, "learning_rate": 0.00029138639760837074, "loss": 3.5534, "step": 83465 }, { "epoch": 5.671286859627667, "grad_norm": 2.211717367172241, "learning_rate": 0.000291343932599538, "loss": 3.311, "step": 83470 }, { "epoch": 5.671626579698328, "grad_norm": 1.2751628160476685, "learning_rate": 0.00029130146759070525, "loss": 2.9966, "step": 83475 }, { "epoch": 5.67196629976899, "grad_norm": 1.4446654319763184, "learning_rate": 0.0002912590025818725, "loss": 3.1477, "step": 83480 }, { "epoch": 5.672306019839652, "grad_norm": 1.314406156539917, "learning_rate": 0.00029121653757303986, "loss": 3.2818, "step": 83485 }, { "epoch": 5.672645739910314, "grad_norm": 1.0966620445251465, "learning_rate": 0.0002911740725642071, "loss": 3.5338, "step": 83490 }, { "epoch": 5.672985459980976, "grad_norm": 1.6964219808578491, "learning_rate": 0.00029113160755537437, "loss": 3.2561, "step": 83495 }, { "epoch": 5.673325180051638, "grad_norm": 1.1417680978775024, "learning_rate": 0.0002910891425465417, "loss": 3.3663, "step": 83500 }, { "epoch": 5.673664900122299, "grad_norm": 1.3429216146469116, "learning_rate": 0.0002910466775377089, "loss": 3.2706, "step": 83505 }, { "epoch": 5.674004620192961, "grad_norm": 1.2203364372253418, "learning_rate": 0.0002910042125288762, "loss": 3.308, "step": 83510 }, { "epoch": 5.674344340263623, "grad_norm": 1.3050607442855835, "learning_rate": 0.0002909617475200435, "loss": 3.368, "step": 83515 }, { "epoch": 5.674684060334284, "grad_norm": 1.0828183889389038, "learning_rate": 0.00029091928251121077, "loss": 3.3721, "step": 83520 }, { "epoch": 5.675023780404946, "grad_norm": 1.024591088294983, "learning_rate": 0.00029087681750237805, "loss": 3.0826, "step": 83525 }, { "epoch": 5.6753635004756084, "grad_norm": 1.2742246389389038, "learning_rate": 0.0002908343524935453, "loss": 3.5466, "step": 83530 }, { "epoch": 5.67570322054627, "grad_norm": 1.649108648300171, "learning_rate": 0.0002907918874847126, "loss": 3.3236, "step": 83535 }, { "epoch": 5.676042940616932, "grad_norm": 1.3568059206008911, "learning_rate": 0.0002907494224758799, "loss": 3.3595, "step": 83540 }, { "epoch": 5.676382660687594, "grad_norm": 1.4382039308547974, "learning_rate": 0.00029070695746704717, "loss": 3.1656, "step": 83545 }, { "epoch": 5.676722380758255, "grad_norm": 1.3649086952209473, "learning_rate": 0.0002906644924582144, "loss": 3.0507, "step": 83550 }, { "epoch": 5.677062100828917, "grad_norm": 1.0940654277801514, "learning_rate": 0.0002906220274493817, "loss": 3.2614, "step": 83555 }, { "epoch": 5.677401820899579, "grad_norm": 1.1845816373825073, "learning_rate": 0.000290579562440549, "loss": 3.2524, "step": 83560 }, { "epoch": 5.67774154097024, "grad_norm": 1.3088537454605103, "learning_rate": 0.00029053709743171623, "loss": 3.2999, "step": 83565 }, { "epoch": 5.678081261040902, "grad_norm": 1.7349931001663208, "learning_rate": 0.00029049463242288357, "loss": 3.3554, "step": 83570 }, { "epoch": 5.6784209811115645, "grad_norm": 1.3283865451812744, "learning_rate": 0.00029045216741405085, "loss": 3.2787, "step": 83575 }, { "epoch": 5.678760701182226, "grad_norm": 1.1174629926681519, "learning_rate": 0.00029040970240521807, "loss": 3.1898, "step": 83580 }, { "epoch": 5.679100421252888, "grad_norm": 1.263651728630066, "learning_rate": 0.00029036723739638535, "loss": 3.2024, "step": 83585 }, { "epoch": 5.67944014132355, "grad_norm": 1.1141246557235718, "learning_rate": 0.0002903247723875527, "loss": 3.3423, "step": 83590 }, { "epoch": 5.679779861394211, "grad_norm": 1.098088026046753, "learning_rate": 0.00029028230737871997, "loss": 3.2238, "step": 83595 }, { "epoch": 5.680119581464873, "grad_norm": 1.210775375366211, "learning_rate": 0.0002902398423698872, "loss": 3.3194, "step": 83600 }, { "epoch": 5.680459301535535, "grad_norm": 1.730535626411438, "learning_rate": 0.00029019737736105453, "loss": 3.3206, "step": 83605 }, { "epoch": 5.680799021606196, "grad_norm": 1.2096023559570312, "learning_rate": 0.0002901549123522218, "loss": 3.4687, "step": 83610 }, { "epoch": 5.681138741676858, "grad_norm": 1.205901861190796, "learning_rate": 0.00029011244734338903, "loss": 3.4687, "step": 83615 }, { "epoch": 5.6814784617475205, "grad_norm": 0.9894187450408936, "learning_rate": 0.0002900699823345563, "loss": 3.2947, "step": 83620 }, { "epoch": 5.681818181818182, "grad_norm": 1.2304843664169312, "learning_rate": 0.00029002751732572365, "loss": 3.4673, "step": 83625 }, { "epoch": 5.682157901888844, "grad_norm": 1.525519847869873, "learning_rate": 0.0002899850523168909, "loss": 3.4127, "step": 83630 }, { "epoch": 5.682497621959506, "grad_norm": 1.117050290107727, "learning_rate": 0.00028994258730805815, "loss": 3.2826, "step": 83635 }, { "epoch": 5.682837342030167, "grad_norm": 1.6313241720199585, "learning_rate": 0.0002899001222992255, "loss": 3.5738, "step": 83640 }, { "epoch": 5.683177062100829, "grad_norm": 1.031960129737854, "learning_rate": 0.0002898576572903927, "loss": 3.4392, "step": 83645 }, { "epoch": 5.683516782171491, "grad_norm": 1.4929946660995483, "learning_rate": 0.00028981519228156, "loss": 2.9717, "step": 83650 }, { "epoch": 5.683856502242152, "grad_norm": 1.5921010971069336, "learning_rate": 0.0002897727272727273, "loss": 3.0437, "step": 83655 }, { "epoch": 5.684196222312814, "grad_norm": 0.8543689846992493, "learning_rate": 0.00028973026226389455, "loss": 3.5373, "step": 83660 }, { "epoch": 5.6845359423834765, "grad_norm": 1.327782392501831, "learning_rate": 0.00028968779725506183, "loss": 3.0752, "step": 83665 }, { "epoch": 5.684875662454138, "grad_norm": 1.162937879562378, "learning_rate": 0.0002896453322462291, "loss": 3.2613, "step": 83670 }, { "epoch": 5.6852153825248, "grad_norm": 1.2604252099990845, "learning_rate": 0.0002896028672373964, "loss": 3.229, "step": 83675 }, { "epoch": 5.685555102595462, "grad_norm": 1.8994237184524536, "learning_rate": 0.0002895604022285637, "loss": 3.4279, "step": 83680 }, { "epoch": 5.685894822666123, "grad_norm": 1.075506567955017, "learning_rate": 0.00028951793721973095, "loss": 3.443, "step": 83685 }, { "epoch": 5.686234542736785, "grad_norm": 1.6004302501678467, "learning_rate": 0.0002894754722108982, "loss": 3.3258, "step": 83690 }, { "epoch": 5.686574262807447, "grad_norm": 1.4660784006118774, "learning_rate": 0.0002894330072020655, "loss": 3.3216, "step": 83695 }, { "epoch": 5.686913982878108, "grad_norm": 1.1149319410324097, "learning_rate": 0.0002893905421932328, "loss": 3.441, "step": 83700 }, { "epoch": 5.68725370294877, "grad_norm": 1.3174396753311157, "learning_rate": 0.0002893480771844, "loss": 3.2116, "step": 83705 }, { "epoch": 5.687593423019432, "grad_norm": 1.1852397918701172, "learning_rate": 0.00028930561217556735, "loss": 3.0321, "step": 83710 }, { "epoch": 5.687933143090094, "grad_norm": 1.4845259189605713, "learning_rate": 0.00028926314716673463, "loss": 3.2531, "step": 83715 }, { "epoch": 5.688272863160756, "grad_norm": 1.3289886713027954, "learning_rate": 0.00028922068215790186, "loss": 3.3398, "step": 83720 }, { "epoch": 5.688612583231417, "grad_norm": 1.571210265159607, "learning_rate": 0.00028917821714906914, "loss": 3.4014, "step": 83725 }, { "epoch": 5.688952303302079, "grad_norm": 1.3026875257492065, "learning_rate": 0.0002891357521402365, "loss": 3.5004, "step": 83730 }, { "epoch": 5.689292023372741, "grad_norm": 1.3143351078033447, "learning_rate": 0.0002890932871314037, "loss": 3.2047, "step": 83735 }, { "epoch": 5.689631743443402, "grad_norm": 1.2955553531646729, "learning_rate": 0.000289050822122571, "loss": 3.2946, "step": 83740 }, { "epoch": 5.689971463514064, "grad_norm": 1.2305556535720825, "learning_rate": 0.0002890083571137383, "loss": 3.3872, "step": 83745 }, { "epoch": 5.690311183584726, "grad_norm": 1.1660250425338745, "learning_rate": 0.00028896589210490554, "loss": 3.3294, "step": 83750 }, { "epoch": 5.690650903655388, "grad_norm": 1.3983960151672363, "learning_rate": 0.0002889234270960728, "loss": 3.4355, "step": 83755 }, { "epoch": 5.69099062372605, "grad_norm": 0.959541380405426, "learning_rate": 0.00028888096208724016, "loss": 3.3591, "step": 83760 }, { "epoch": 5.691330343796712, "grad_norm": 1.4330308437347412, "learning_rate": 0.00028883849707840744, "loss": 3.4029, "step": 83765 }, { "epoch": 5.691670063867373, "grad_norm": 1.3411285877227783, "learning_rate": 0.00028879603206957466, "loss": 3.2866, "step": 83770 }, { "epoch": 5.692009783938035, "grad_norm": 1.1847028732299805, "learning_rate": 0.00028875356706074194, "loss": 3.2626, "step": 83775 }, { "epoch": 5.692349504008697, "grad_norm": 1.1213619709014893, "learning_rate": 0.0002887111020519093, "loss": 3.2706, "step": 83780 }, { "epoch": 5.692689224079358, "grad_norm": 1.3316582441329956, "learning_rate": 0.0002886686370430765, "loss": 3.3663, "step": 83785 }, { "epoch": 5.69302894415002, "grad_norm": 1.5513278245925903, "learning_rate": 0.0002886261720342438, "loss": 3.0366, "step": 83790 }, { "epoch": 5.693368664220682, "grad_norm": 1.2655068635940552, "learning_rate": 0.0002885837070254111, "loss": 3.4931, "step": 83795 }, { "epoch": 5.693708384291344, "grad_norm": 1.40325927734375, "learning_rate": 0.00028854124201657834, "loss": 3.2275, "step": 83800 }, { "epoch": 5.694048104362006, "grad_norm": 1.1005700826644897, "learning_rate": 0.0002884987770077456, "loss": 3.1239, "step": 83805 }, { "epoch": 5.694387824432668, "grad_norm": 1.5606948137283325, "learning_rate": 0.0002884563119989129, "loss": 3.3206, "step": 83810 }, { "epoch": 5.694727544503329, "grad_norm": 1.4279072284698486, "learning_rate": 0.0002884138469900802, "loss": 3.4675, "step": 83815 }, { "epoch": 5.695067264573991, "grad_norm": 1.388370156288147, "learning_rate": 0.00028837138198124746, "loss": 3.2842, "step": 83820 }, { "epoch": 5.695406984644653, "grad_norm": 1.2231934070587158, "learning_rate": 0.00028832891697241474, "loss": 3.1526, "step": 83825 }, { "epoch": 5.695746704715314, "grad_norm": 1.4067208766937256, "learning_rate": 0.000288286451963582, "loss": 3.3376, "step": 83830 }, { "epoch": 5.696086424785976, "grad_norm": 1.3213545083999634, "learning_rate": 0.0002882439869547493, "loss": 3.6422, "step": 83835 }, { "epoch": 5.6964261448566385, "grad_norm": 1.4461262226104736, "learning_rate": 0.0002882015219459166, "loss": 3.1928, "step": 83840 }, { "epoch": 5.6967658649273, "grad_norm": 1.4744997024536133, "learning_rate": 0.0002881590569370838, "loss": 3.2983, "step": 83845 }, { "epoch": 5.697105584997962, "grad_norm": 1.2134844064712524, "learning_rate": 0.00028811659192825114, "loss": 3.3399, "step": 83850 }, { "epoch": 5.697445305068624, "grad_norm": 1.5331467390060425, "learning_rate": 0.0002880741269194184, "loss": 3.3142, "step": 83855 }, { "epoch": 5.697785025139285, "grad_norm": 1.3931444883346558, "learning_rate": 0.00028803166191058565, "loss": 3.3207, "step": 83860 }, { "epoch": 5.698124745209947, "grad_norm": 1.572403907775879, "learning_rate": 0.000287989196901753, "loss": 3.268, "step": 83865 }, { "epoch": 5.698464465280609, "grad_norm": 1.3074382543563843, "learning_rate": 0.00028794673189292026, "loss": 3.2769, "step": 83870 }, { "epoch": 5.69880418535127, "grad_norm": 1.1796472072601318, "learning_rate": 0.0002879042668840875, "loss": 3.3681, "step": 83875 }, { "epoch": 5.699143905421932, "grad_norm": 1.469631314277649, "learning_rate": 0.00028786180187525477, "loss": 3.3654, "step": 83880 }, { "epoch": 5.6994836254925945, "grad_norm": 1.4695987701416016, "learning_rate": 0.0002878193368664221, "loss": 3.192, "step": 83885 }, { "epoch": 5.699823345563256, "grad_norm": 1.113576889038086, "learning_rate": 0.00028777687185758933, "loss": 3.1893, "step": 83890 }, { "epoch": 5.700163065633918, "grad_norm": 0.9073328971862793, "learning_rate": 0.0002877344068487566, "loss": 3.246, "step": 83895 }, { "epoch": 5.70050278570458, "grad_norm": 1.3359767198562622, "learning_rate": 0.00028769194183992394, "loss": 3.2245, "step": 83900 }, { "epoch": 5.700842505775241, "grad_norm": 1.3405157327651978, "learning_rate": 0.00028764947683109117, "loss": 3.3289, "step": 83905 }, { "epoch": 5.701182225845903, "grad_norm": 1.6808031797409058, "learning_rate": 0.00028760701182225845, "loss": 3.4664, "step": 83910 }, { "epoch": 5.701521945916565, "grad_norm": 1.1212904453277588, "learning_rate": 0.00028756454681342573, "loss": 3.4291, "step": 83915 }, { "epoch": 5.701861665987226, "grad_norm": 1.6050950288772583, "learning_rate": 0.000287522081804593, "loss": 3.2434, "step": 83920 }, { "epoch": 5.702201386057888, "grad_norm": 1.1977280378341675, "learning_rate": 0.0002874796167957603, "loss": 3.1832, "step": 83925 }, { "epoch": 5.70254110612855, "grad_norm": 1.2677950859069824, "learning_rate": 0.00028743715178692757, "loss": 3.2692, "step": 83930 }, { "epoch": 5.702880826199212, "grad_norm": 1.1521778106689453, "learning_rate": 0.0002873946867780949, "loss": 3.1911, "step": 83935 }, { "epoch": 5.703220546269874, "grad_norm": 1.2643065452575684, "learning_rate": 0.00028735222176926213, "loss": 3.0202, "step": 83940 }, { "epoch": 5.703560266340535, "grad_norm": 1.3415381908416748, "learning_rate": 0.0002873097567604294, "loss": 3.5159, "step": 83945 }, { "epoch": 5.703899986411197, "grad_norm": 1.5748339891433716, "learning_rate": 0.0002872672917515967, "loss": 3.0452, "step": 83950 }, { "epoch": 5.704239706481859, "grad_norm": 1.4791101217269897, "learning_rate": 0.00028722482674276397, "loss": 3.2554, "step": 83955 }, { "epoch": 5.70457942655252, "grad_norm": 1.6069520711898804, "learning_rate": 0.00028718236173393125, "loss": 3.1924, "step": 83960 }, { "epoch": 5.704919146623182, "grad_norm": 1.2859759330749512, "learning_rate": 0.00028713989672509853, "loss": 3.248, "step": 83965 }, { "epoch": 5.705258866693844, "grad_norm": 1.5388761758804321, "learning_rate": 0.0002870974317162658, "loss": 3.2658, "step": 83970 }, { "epoch": 5.705598586764506, "grad_norm": 1.1554808616638184, "learning_rate": 0.0002870549667074331, "loss": 3.4165, "step": 83975 }, { "epoch": 5.705938306835168, "grad_norm": 1.194992184638977, "learning_rate": 0.00028701250169860037, "loss": 3.4504, "step": 83980 }, { "epoch": 5.70627802690583, "grad_norm": 1.0891971588134766, "learning_rate": 0.0002869700366897676, "loss": 3.3002, "step": 83985 }, { "epoch": 5.706617746976491, "grad_norm": 1.1633856296539307, "learning_rate": 0.00028692757168093493, "loss": 3.2337, "step": 83990 }, { "epoch": 5.706957467047153, "grad_norm": 1.237410545349121, "learning_rate": 0.0002868851066721022, "loss": 3.3345, "step": 83995 }, { "epoch": 5.707297187117815, "grad_norm": 1.0760291814804077, "learning_rate": 0.00028684264166326944, "loss": 3.1719, "step": 84000 }, { "epoch": 5.707636907188476, "grad_norm": 1.437358021736145, "learning_rate": 0.00028680017665443677, "loss": 3.3263, "step": 84005 }, { "epoch": 5.707976627259138, "grad_norm": 1.1450082063674927, "learning_rate": 0.00028675771164560405, "loss": 3.3883, "step": 84010 }, { "epoch": 5.7083163473298, "grad_norm": 1.36725652217865, "learning_rate": 0.0002867152466367713, "loss": 3.1741, "step": 84015 }, { "epoch": 5.708656067400462, "grad_norm": 1.2946813106536865, "learning_rate": 0.00028667278162793856, "loss": 3.3981, "step": 84020 }, { "epoch": 5.708995787471124, "grad_norm": 1.1740080118179321, "learning_rate": 0.0002866303166191059, "loss": 3.1682, "step": 84025 }, { "epoch": 5.709335507541786, "grad_norm": 1.4903719425201416, "learning_rate": 0.0002865878516102731, "loss": 3.1461, "step": 84030 }, { "epoch": 5.709675227612447, "grad_norm": 1.2575631141662598, "learning_rate": 0.0002865453866014404, "loss": 3.3364, "step": 84035 }, { "epoch": 5.710014947683109, "grad_norm": 1.247846007347107, "learning_rate": 0.00028650292159260773, "loss": 3.436, "step": 84040 }, { "epoch": 5.710354667753771, "grad_norm": 1.2875159978866577, "learning_rate": 0.00028646045658377496, "loss": 3.4522, "step": 84045 }, { "epoch": 5.710694387824432, "grad_norm": 1.0834581851959229, "learning_rate": 0.00028641799157494224, "loss": 3.5924, "step": 84050 }, { "epoch": 5.711034107895094, "grad_norm": 1.579704761505127, "learning_rate": 0.00028637552656610957, "loss": 3.3952, "step": 84055 }, { "epoch": 5.711373827965756, "grad_norm": 1.2190006971359253, "learning_rate": 0.0002863330615572768, "loss": 3.2566, "step": 84060 }, { "epoch": 5.711713548036418, "grad_norm": 1.4950100183486938, "learning_rate": 0.0002862905965484441, "loss": 3.1527, "step": 84065 }, { "epoch": 5.71205326810708, "grad_norm": 1.4644900560379028, "learning_rate": 0.00028624813153961136, "loss": 3.3531, "step": 84070 }, { "epoch": 5.712392988177742, "grad_norm": 1.5372470617294312, "learning_rate": 0.00028620566653077864, "loss": 3.4572, "step": 84075 }, { "epoch": 5.712732708248403, "grad_norm": 1.0158319473266602, "learning_rate": 0.0002861632015219459, "loss": 3.5362, "step": 84080 }, { "epoch": 5.713072428319065, "grad_norm": 1.0296577215194702, "learning_rate": 0.0002861207365131132, "loss": 3.2127, "step": 84085 }, { "epoch": 5.713412148389727, "grad_norm": 1.343626618385315, "learning_rate": 0.0002860782715042805, "loss": 3.1669, "step": 84090 }, { "epoch": 5.713751868460388, "grad_norm": 1.0250276327133179, "learning_rate": 0.00028603580649544776, "loss": 3.3486, "step": 84095 }, { "epoch": 5.71409158853105, "grad_norm": 1.2114970684051514, "learning_rate": 0.00028599334148661504, "loss": 3.1563, "step": 84100 }, { "epoch": 5.7144313086017124, "grad_norm": 1.327272653579712, "learning_rate": 0.0002859508764777823, "loss": 3.3741, "step": 84105 }, { "epoch": 5.714771028672374, "grad_norm": 1.2528456449508667, "learning_rate": 0.0002859084114689496, "loss": 3.2728, "step": 84110 }, { "epoch": 5.715110748743036, "grad_norm": 1.3103288412094116, "learning_rate": 0.0002858659464601169, "loss": 3.3178, "step": 84115 }, { "epoch": 5.715450468813698, "grad_norm": 1.4192146062850952, "learning_rate": 0.00028582348145128416, "loss": 3.4419, "step": 84120 }, { "epoch": 5.715790188884359, "grad_norm": 1.787152647972107, "learning_rate": 0.00028578101644245144, "loss": 3.3104, "step": 84125 }, { "epoch": 5.716129908955021, "grad_norm": 1.5561449527740479, "learning_rate": 0.0002857385514336187, "loss": 3.2022, "step": 84130 }, { "epoch": 5.716469629025683, "grad_norm": 1.255539894104004, "learning_rate": 0.000285696086424786, "loss": 3.4048, "step": 84135 }, { "epoch": 5.716809349096344, "grad_norm": 1.1553714275360107, "learning_rate": 0.0002856536214159532, "loss": 3.527, "step": 84140 }, { "epoch": 5.717149069167006, "grad_norm": 1.5783594846725464, "learning_rate": 0.00028561115640712056, "loss": 3.493, "step": 84145 }, { "epoch": 5.7174887892376685, "grad_norm": 1.0675417184829712, "learning_rate": 0.00028556869139828784, "loss": 3.3942, "step": 84150 }, { "epoch": 5.71782850930833, "grad_norm": 1.0814639329910278, "learning_rate": 0.00028552622638945506, "loss": 3.6371, "step": 84155 }, { "epoch": 5.718168229378992, "grad_norm": 1.4008392095565796, "learning_rate": 0.0002854837613806224, "loss": 3.2417, "step": 84160 }, { "epoch": 5.718507949449654, "grad_norm": 1.0640583038330078, "learning_rate": 0.0002854412963717897, "loss": 3.4327, "step": 84165 }, { "epoch": 5.718847669520315, "grad_norm": 1.1582568883895874, "learning_rate": 0.0002853988313629569, "loss": 3.5182, "step": 84170 }, { "epoch": 5.719187389590977, "grad_norm": 1.7314834594726562, "learning_rate": 0.0002853563663541242, "loss": 3.0553, "step": 84175 }, { "epoch": 5.719527109661639, "grad_norm": 1.3355867862701416, "learning_rate": 0.0002853139013452915, "loss": 3.1758, "step": 84180 }, { "epoch": 5.7198668297323, "grad_norm": 1.1118274927139282, "learning_rate": 0.00028527143633645875, "loss": 3.4319, "step": 84185 }, { "epoch": 5.720206549802962, "grad_norm": 1.0968399047851562, "learning_rate": 0.000285228971327626, "loss": 3.1654, "step": 84190 }, { "epoch": 5.7205462698736245, "grad_norm": 1.2486218214035034, "learning_rate": 0.00028518650631879336, "loss": 3.2656, "step": 84195 }, { "epoch": 5.720885989944286, "grad_norm": 1.678756833076477, "learning_rate": 0.0002851440413099606, "loss": 3.2789, "step": 84200 }, { "epoch": 5.721225710014948, "grad_norm": 1.0732526779174805, "learning_rate": 0.00028510157630112787, "loss": 3.2578, "step": 84205 }, { "epoch": 5.72156543008561, "grad_norm": 1.191724181175232, "learning_rate": 0.00028505911129229515, "loss": 3.3685, "step": 84210 }, { "epoch": 5.721905150156271, "grad_norm": 2.7248892784118652, "learning_rate": 0.0002850166462834624, "loss": 3.4124, "step": 84215 }, { "epoch": 5.722244870226933, "grad_norm": 1.5714296102523804, "learning_rate": 0.0002849741812746297, "loss": 3.4045, "step": 84220 }, { "epoch": 5.722584590297595, "grad_norm": 1.1050894260406494, "learning_rate": 0.000284931716265797, "loss": 3.2973, "step": 84225 }, { "epoch": 5.722924310368256, "grad_norm": 1.0211206674575806, "learning_rate": 0.00028488925125696427, "loss": 3.3019, "step": 84230 }, { "epoch": 5.723264030438918, "grad_norm": 1.065761923789978, "learning_rate": 0.00028484678624813155, "loss": 3.1187, "step": 84235 }, { "epoch": 5.7236037505095805, "grad_norm": 1.2445297241210938, "learning_rate": 0.0002848043212392988, "loss": 3.4703, "step": 84240 }, { "epoch": 5.723943470580242, "grad_norm": 1.358860969543457, "learning_rate": 0.00028476185623046605, "loss": 3.3242, "step": 84245 }, { "epoch": 5.724283190650904, "grad_norm": 1.2398618459701538, "learning_rate": 0.0002847193912216334, "loss": 3.342, "step": 84250 }, { "epoch": 5.724622910721566, "grad_norm": 1.356731653213501, "learning_rate": 0.00028467692621280067, "loss": 3.0612, "step": 84255 }, { "epoch": 5.724962630792227, "grad_norm": 1.1231939792633057, "learning_rate": 0.0002846344612039679, "loss": 2.9485, "step": 84260 }, { "epoch": 5.725302350862889, "grad_norm": 1.2868402004241943, "learning_rate": 0.0002845919961951352, "loss": 3.3857, "step": 84265 }, { "epoch": 5.725642070933551, "grad_norm": 1.3501789569854736, "learning_rate": 0.0002845495311863025, "loss": 3.3094, "step": 84270 }, { "epoch": 5.725981791004212, "grad_norm": 1.2119076251983643, "learning_rate": 0.0002845070661774698, "loss": 3.3991, "step": 84275 }, { "epoch": 5.726321511074874, "grad_norm": 1.312733769416809, "learning_rate": 0.000284464601168637, "loss": 3.3143, "step": 84280 }, { "epoch": 5.7266612311455365, "grad_norm": 1.1603126525878906, "learning_rate": 0.00028442213615980435, "loss": 3.1231, "step": 84285 }, { "epoch": 5.727000951216198, "grad_norm": 1.0106850862503052, "learning_rate": 0.0002843796711509716, "loss": 3.2352, "step": 84290 }, { "epoch": 5.72734067128686, "grad_norm": 1.1433589458465576, "learning_rate": 0.00028433720614213885, "loss": 3.0187, "step": 84295 }, { "epoch": 5.727680391357522, "grad_norm": 1.0235053300857544, "learning_rate": 0.0002842947411333062, "loss": 3.3263, "step": 84300 }, { "epoch": 5.728020111428183, "grad_norm": 1.137810468673706, "learning_rate": 0.00028425227612447347, "loss": 3.33, "step": 84305 }, { "epoch": 5.728359831498845, "grad_norm": 1.8179856538772583, "learning_rate": 0.0002842098111156407, "loss": 3.3037, "step": 84310 }, { "epoch": 5.728699551569507, "grad_norm": 1.4304217100143433, "learning_rate": 0.00028416734610680803, "loss": 3.2815, "step": 84315 }, { "epoch": 5.729039271640168, "grad_norm": 1.6820049285888672, "learning_rate": 0.0002841248810979753, "loss": 3.2329, "step": 84320 }, { "epoch": 5.72937899171083, "grad_norm": 1.3933405876159668, "learning_rate": 0.00028408241608914253, "loss": 3.0459, "step": 84325 }, { "epoch": 5.7297187117814925, "grad_norm": 1.2541087865829468, "learning_rate": 0.0002840399510803098, "loss": 3.2023, "step": 84330 }, { "epoch": 5.730058431852154, "grad_norm": 1.4218573570251465, "learning_rate": 0.00028399748607147715, "loss": 3.095, "step": 84335 }, { "epoch": 5.730398151922816, "grad_norm": 1.0875837802886963, "learning_rate": 0.0002839550210626444, "loss": 3.2187, "step": 84340 }, { "epoch": 5.730737871993478, "grad_norm": 1.93548583984375, "learning_rate": 0.00028391255605381165, "loss": 3.4429, "step": 84345 }, { "epoch": 5.731077592064139, "grad_norm": 1.1451404094696045, "learning_rate": 0.000283870091044979, "loss": 3.1737, "step": 84350 }, { "epoch": 5.731417312134801, "grad_norm": 1.1955403089523315, "learning_rate": 0.0002838276260361462, "loss": 3.4413, "step": 84355 }, { "epoch": 5.731757032205463, "grad_norm": 1.2153795957565308, "learning_rate": 0.0002837851610273135, "loss": 3.0787, "step": 84360 }, { "epoch": 5.732096752276124, "grad_norm": 1.588208556175232, "learning_rate": 0.0002837426960184808, "loss": 3.3062, "step": 84365 }, { "epoch": 5.732436472346786, "grad_norm": 1.291721224784851, "learning_rate": 0.00028370023100964805, "loss": 3.1506, "step": 84370 }, { "epoch": 5.7327761924174485, "grad_norm": 1.1676305532455444, "learning_rate": 0.00028365776600081533, "loss": 3.1515, "step": 84375 }, { "epoch": 5.73311591248811, "grad_norm": 1.333276629447937, "learning_rate": 0.0002836153009919826, "loss": 3.2679, "step": 84380 }, { "epoch": 5.733455632558772, "grad_norm": 1.3009930849075317, "learning_rate": 0.0002835728359831499, "loss": 3.1714, "step": 84385 }, { "epoch": 5.733795352629433, "grad_norm": 1.3268712759017944, "learning_rate": 0.0002835303709743172, "loss": 3.4315, "step": 84390 }, { "epoch": 5.734135072700095, "grad_norm": 1.6195051670074463, "learning_rate": 0.00028348790596548445, "loss": 3.6788, "step": 84395 }, { "epoch": 5.734474792770757, "grad_norm": 1.1927467584609985, "learning_rate": 0.0002834454409566517, "loss": 3.2405, "step": 84400 }, { "epoch": 5.734814512841418, "grad_norm": 1.5060793161392212, "learning_rate": 0.000283402975947819, "loss": 3.0351, "step": 84405 }, { "epoch": 5.73515423291208, "grad_norm": 1.0902116298675537, "learning_rate": 0.0002833605109389863, "loss": 3.6105, "step": 84410 }, { "epoch": 5.7354939529827424, "grad_norm": 1.2895684242248535, "learning_rate": 0.0002833180459301535, "loss": 3.2706, "step": 84415 }, { "epoch": 5.735833673053404, "grad_norm": 1.2862046957015991, "learning_rate": 0.00028327558092132085, "loss": 3.3683, "step": 84420 }, { "epoch": 5.736173393124066, "grad_norm": 1.2476706504821777, "learning_rate": 0.00028323311591248813, "loss": 3.4521, "step": 84425 }, { "epoch": 5.736513113194728, "grad_norm": 1.1989585161209106, "learning_rate": 0.00028319065090365536, "loss": 3.2562, "step": 84430 }, { "epoch": 5.736852833265389, "grad_norm": 1.577304720878601, "learning_rate": 0.00028314818589482264, "loss": 3.2547, "step": 84435 }, { "epoch": 5.737192553336051, "grad_norm": 0.9955445528030396, "learning_rate": 0.00028310572088599, "loss": 3.4586, "step": 84440 }, { "epoch": 5.737532273406713, "grad_norm": 1.6580134630203247, "learning_rate": 0.00028306325587715725, "loss": 3.0236, "step": 84445 }, { "epoch": 5.737871993477374, "grad_norm": 1.5094265937805176, "learning_rate": 0.0002830207908683245, "loss": 3.5044, "step": 84450 }, { "epoch": 5.738211713548036, "grad_norm": 1.1918669939041138, "learning_rate": 0.0002829783258594918, "loss": 3.2654, "step": 84455 }, { "epoch": 5.7385514336186985, "grad_norm": 1.0970133543014526, "learning_rate": 0.0002829358608506591, "loss": 3.1938, "step": 84460 }, { "epoch": 5.73889115368936, "grad_norm": 1.383070707321167, "learning_rate": 0.0002828933958418263, "loss": 3.3318, "step": 84465 }, { "epoch": 5.739230873760022, "grad_norm": 1.624664068222046, "learning_rate": 0.0002828509308329936, "loss": 3.5296, "step": 84470 }, { "epoch": 5.739570593830684, "grad_norm": 1.1861224174499512, "learning_rate": 0.00028280846582416094, "loss": 3.4985, "step": 84475 }, { "epoch": 5.739910313901345, "grad_norm": 1.7401050329208374, "learning_rate": 0.00028276600081532816, "loss": 3.2337, "step": 84480 }, { "epoch": 5.740250033972007, "grad_norm": 1.1068778038024902, "learning_rate": 0.00028272353580649544, "loss": 3.2919, "step": 84485 }, { "epoch": 5.740589754042669, "grad_norm": 1.372926950454712, "learning_rate": 0.0002826810707976628, "loss": 3.5083, "step": 84490 }, { "epoch": 5.74092947411333, "grad_norm": 0.9234089851379395, "learning_rate": 0.00028263860578883, "loss": 3.3807, "step": 84495 }, { "epoch": 5.741269194183992, "grad_norm": 1.440155267715454, "learning_rate": 0.0002825961407799973, "loss": 3.3207, "step": 84500 }, { "epoch": 5.7416089142546545, "grad_norm": 1.2360278367996216, "learning_rate": 0.00028255367577116456, "loss": 2.9933, "step": 84505 }, { "epoch": 5.741948634325316, "grad_norm": 1.2758029699325562, "learning_rate": 0.00028251121076233184, "loss": 3.0358, "step": 84510 }, { "epoch": 5.742288354395978, "grad_norm": 1.1770007610321045, "learning_rate": 0.0002824687457534991, "loss": 3.3903, "step": 84515 }, { "epoch": 5.74262807446664, "grad_norm": 1.842834234237671, "learning_rate": 0.0002824262807446664, "loss": 3.374, "step": 84520 }, { "epoch": 5.742967794537301, "grad_norm": 1.1130189895629883, "learning_rate": 0.0002823838157358337, "loss": 3.4012, "step": 84525 }, { "epoch": 5.743307514607963, "grad_norm": 1.052451491355896, "learning_rate": 0.00028234135072700096, "loss": 3.3718, "step": 84530 }, { "epoch": 5.743647234678625, "grad_norm": 1.789033055305481, "learning_rate": 0.00028229888571816824, "loss": 3.2877, "step": 84535 }, { "epoch": 5.743986954749286, "grad_norm": 1.0958988666534424, "learning_rate": 0.00028225642070933547, "loss": 3.4858, "step": 84540 }, { "epoch": 5.744326674819948, "grad_norm": 1.412888765335083, "learning_rate": 0.0002822139557005028, "loss": 3.4061, "step": 84545 }, { "epoch": 5.7446663948906105, "grad_norm": 1.305027723312378, "learning_rate": 0.0002821714906916701, "loss": 3.273, "step": 84550 }, { "epoch": 5.745006114961272, "grad_norm": 1.4536864757537842, "learning_rate": 0.0002821290256828373, "loss": 3.1893, "step": 84555 }, { "epoch": 5.745345835031934, "grad_norm": 1.0856400728225708, "learning_rate": 0.00028208656067400464, "loss": 3.4312, "step": 84560 }, { "epoch": 5.745685555102596, "grad_norm": 1.6095927953720093, "learning_rate": 0.0002820440956651719, "loss": 3.2882, "step": 84565 }, { "epoch": 5.746025275173257, "grad_norm": 1.1568762063980103, "learning_rate": 0.00028200163065633915, "loss": 3.4157, "step": 84570 }, { "epoch": 5.746364995243919, "grad_norm": 1.3153403997421265, "learning_rate": 0.00028195916564750643, "loss": 3.4538, "step": 84575 }, { "epoch": 5.746704715314581, "grad_norm": 1.212103009223938, "learning_rate": 0.00028191670063867376, "loss": 3.1153, "step": 84580 }, { "epoch": 5.747044435385242, "grad_norm": 1.4356935024261475, "learning_rate": 0.000281874235629841, "loss": 3.4032, "step": 84585 }, { "epoch": 5.747384155455904, "grad_norm": 1.2305059432983398, "learning_rate": 0.00028183177062100827, "loss": 3.5674, "step": 84590 }, { "epoch": 5.7477238755265665, "grad_norm": 1.486448049545288, "learning_rate": 0.0002817893056121756, "loss": 3.2268, "step": 84595 }, { "epoch": 5.748063595597228, "grad_norm": 1.2097058296203613, "learning_rate": 0.00028174684060334283, "loss": 3.0411, "step": 84600 }, { "epoch": 5.74840331566789, "grad_norm": 1.1578798294067383, "learning_rate": 0.0002817043755945101, "loss": 3.3681, "step": 84605 }, { "epoch": 5.748743035738551, "grad_norm": 1.5245410203933716, "learning_rate": 0.00028166191058567744, "loss": 3.3174, "step": 84610 }, { "epoch": 5.749082755809213, "grad_norm": 1.2527676820755005, "learning_rate": 0.0002816194455768447, "loss": 3.2031, "step": 84615 }, { "epoch": 5.749422475879875, "grad_norm": 1.144703984260559, "learning_rate": 0.00028157698056801195, "loss": 3.2918, "step": 84620 }, { "epoch": 5.749762195950536, "grad_norm": 1.083214521408081, "learning_rate": 0.00028153451555917923, "loss": 3.3416, "step": 84625 }, { "epoch": 5.750101916021198, "grad_norm": 1.438201904296875, "learning_rate": 0.00028149205055034656, "loss": 3.2301, "step": 84630 }, { "epoch": 5.75044163609186, "grad_norm": 1.2193162441253662, "learning_rate": 0.0002814495855415138, "loss": 3.1434, "step": 84635 }, { "epoch": 5.750781356162522, "grad_norm": 1.0355169773101807, "learning_rate": 0.00028140712053268107, "loss": 3.2062, "step": 84640 }, { "epoch": 5.751121076233184, "grad_norm": 0.9595321416854858, "learning_rate": 0.0002813646555238484, "loss": 3.2106, "step": 84645 }, { "epoch": 5.751460796303846, "grad_norm": 1.038311243057251, "learning_rate": 0.00028132219051501563, "loss": 3.1233, "step": 84650 }, { "epoch": 5.751800516374507, "grad_norm": 1.3946223258972168, "learning_rate": 0.0002812797255061829, "loss": 3.3088, "step": 84655 }, { "epoch": 5.752140236445169, "grad_norm": 1.1212329864501953, "learning_rate": 0.0002812372604973502, "loss": 3.5412, "step": 84660 }, { "epoch": 5.752479956515831, "grad_norm": 1.2733662128448486, "learning_rate": 0.00028119479548851747, "loss": 3.2019, "step": 84665 }, { "epoch": 5.752819676586492, "grad_norm": 1.3765931129455566, "learning_rate": 0.00028115233047968475, "loss": 3.3425, "step": 84670 }, { "epoch": 5.753159396657154, "grad_norm": 1.4314556121826172, "learning_rate": 0.00028110986547085203, "loss": 3.1435, "step": 84675 }, { "epoch": 5.753499116727816, "grad_norm": 1.493634581565857, "learning_rate": 0.0002810674004620193, "loss": 3.4096, "step": 84680 }, { "epoch": 5.753838836798478, "grad_norm": 1.142976999282837, "learning_rate": 0.0002810249354531866, "loss": 3.127, "step": 84685 }, { "epoch": 5.75417855686914, "grad_norm": 1.4310213327407837, "learning_rate": 0.00028098247044435387, "loss": 3.3747, "step": 84690 }, { "epoch": 5.754518276939802, "grad_norm": 1.7936238050460815, "learning_rate": 0.0002809400054355211, "loss": 3.4339, "step": 84695 }, { "epoch": 5.754857997010463, "grad_norm": 1.5802721977233887, "learning_rate": 0.00028089754042668843, "loss": 2.9988, "step": 84700 }, { "epoch": 5.755197717081125, "grad_norm": 1.4637600183486938, "learning_rate": 0.0002808550754178557, "loss": 3.1987, "step": 84705 }, { "epoch": 5.755537437151787, "grad_norm": 1.5816657543182373, "learning_rate": 0.00028081261040902294, "loss": 3.2226, "step": 84710 }, { "epoch": 5.755877157222448, "grad_norm": 1.4149227142333984, "learning_rate": 0.00028077014540019027, "loss": 2.8627, "step": 84715 }, { "epoch": 5.75621687729311, "grad_norm": 1.4538347721099854, "learning_rate": 0.00028072768039135755, "loss": 3.2667, "step": 84720 }, { "epoch": 5.7565565973637725, "grad_norm": 1.363510012626648, "learning_rate": 0.0002806852153825248, "loss": 3.149, "step": 84725 }, { "epoch": 5.756896317434434, "grad_norm": 1.2621515989303589, "learning_rate": 0.00028064275037369206, "loss": 3.0963, "step": 84730 }, { "epoch": 5.757236037505096, "grad_norm": 1.1072231531143188, "learning_rate": 0.0002806002853648594, "loss": 3.1605, "step": 84735 }, { "epoch": 5.757575757575758, "grad_norm": 1.3483829498291016, "learning_rate": 0.0002805578203560266, "loss": 3.3207, "step": 84740 }, { "epoch": 5.757915477646419, "grad_norm": 1.680391788482666, "learning_rate": 0.0002805153553471939, "loss": 3.0775, "step": 84745 }, { "epoch": 5.758255197717081, "grad_norm": 0.9878749847412109, "learning_rate": 0.00028047289033836123, "loss": 3.3405, "step": 84750 }, { "epoch": 5.758594917787743, "grad_norm": 1.2288458347320557, "learning_rate": 0.00028043042532952846, "loss": 3.5703, "step": 84755 }, { "epoch": 5.758934637858404, "grad_norm": 1.416613221168518, "learning_rate": 0.00028038796032069574, "loss": 3.2163, "step": 84760 }, { "epoch": 5.759274357929066, "grad_norm": 1.3491005897521973, "learning_rate": 0.000280345495311863, "loss": 3.506, "step": 84765 }, { "epoch": 5.7596140779997285, "grad_norm": 1.1640654802322388, "learning_rate": 0.0002803030303030303, "loss": 3.205, "step": 84770 }, { "epoch": 5.75995379807039, "grad_norm": 1.49130380153656, "learning_rate": 0.0002802605652941976, "loss": 3.2464, "step": 84775 }, { "epoch": 5.760293518141052, "grad_norm": 1.399477243423462, "learning_rate": 0.00028021810028536486, "loss": 3.4784, "step": 84780 }, { "epoch": 5.760633238211714, "grad_norm": 1.3632524013519287, "learning_rate": 0.0002801756352765322, "loss": 3.3452, "step": 84785 }, { "epoch": 5.760972958282375, "grad_norm": 1.201154351234436, "learning_rate": 0.0002801331702676994, "loss": 3.2224, "step": 84790 }, { "epoch": 5.761312678353037, "grad_norm": 1.399091362953186, "learning_rate": 0.0002800907052588667, "loss": 3.3485, "step": 84795 }, { "epoch": 5.761652398423699, "grad_norm": 1.2704631090164185, "learning_rate": 0.000280048240250034, "loss": 3.1578, "step": 84800 }, { "epoch": 5.76199211849436, "grad_norm": 1.4070289134979248, "learning_rate": 0.00028000577524120126, "loss": 2.8388, "step": 84805 }, { "epoch": 5.762331838565022, "grad_norm": 1.232312798500061, "learning_rate": 0.00027996331023236854, "loss": 3.1561, "step": 84810 }, { "epoch": 5.7626715586356845, "grad_norm": 1.3863377571105957, "learning_rate": 0.0002799208452235358, "loss": 3.4964, "step": 84815 }, { "epoch": 5.763011278706346, "grad_norm": 1.3534613847732544, "learning_rate": 0.0002798783802147031, "loss": 3.5377, "step": 84820 }, { "epoch": 5.763350998777008, "grad_norm": 0.9934512376785278, "learning_rate": 0.0002798359152058704, "loss": 3.2449, "step": 84825 }, { "epoch": 5.76369071884767, "grad_norm": 1.5168591737747192, "learning_rate": 0.00027979345019703766, "loss": 3.4437, "step": 84830 }, { "epoch": 5.764030438918331, "grad_norm": 1.1827921867370605, "learning_rate": 0.0002797509851882049, "loss": 3.2606, "step": 84835 }, { "epoch": 5.764370158988993, "grad_norm": 1.1574031114578247, "learning_rate": 0.0002797085201793722, "loss": 3.1234, "step": 84840 }, { "epoch": 5.764709879059655, "grad_norm": 1.52364182472229, "learning_rate": 0.0002796660551705395, "loss": 3.3289, "step": 84845 }, { "epoch": 5.765049599130316, "grad_norm": 1.263849139213562, "learning_rate": 0.0002796235901617067, "loss": 3.1145, "step": 84850 }, { "epoch": 5.765389319200978, "grad_norm": 1.3936432600021362, "learning_rate": 0.00027958112515287406, "loss": 3.4117, "step": 84855 }, { "epoch": 5.7657290392716405, "grad_norm": 1.1014965772628784, "learning_rate": 0.00027953866014404134, "loss": 3.3145, "step": 84860 }, { "epoch": 5.766068759342302, "grad_norm": 0.9904446005821228, "learning_rate": 0.00027949619513520856, "loss": 3.3412, "step": 84865 }, { "epoch": 5.766408479412964, "grad_norm": 1.0077418088912964, "learning_rate": 0.00027945373012637584, "loss": 3.457, "step": 84870 }, { "epoch": 5.766748199483626, "grad_norm": 1.3061989545822144, "learning_rate": 0.0002794112651175432, "loss": 3.311, "step": 84875 }, { "epoch": 5.767087919554287, "grad_norm": 1.461753010749817, "learning_rate": 0.0002793688001087104, "loss": 3.2643, "step": 84880 }, { "epoch": 5.767427639624949, "grad_norm": 1.0910134315490723, "learning_rate": 0.0002793263350998777, "loss": 3.2008, "step": 84885 }, { "epoch": 5.767767359695611, "grad_norm": 1.396347165107727, "learning_rate": 0.000279283870091045, "loss": 3.406, "step": 84890 }, { "epoch": 5.768107079766272, "grad_norm": 1.1468162536621094, "learning_rate": 0.00027924140508221225, "loss": 3.2648, "step": 84895 }, { "epoch": 5.768446799836934, "grad_norm": 1.1671777963638306, "learning_rate": 0.0002791989400733795, "loss": 3.3827, "step": 84900 }, { "epoch": 5.7687865199075965, "grad_norm": 1.1562379598617554, "learning_rate": 0.00027915647506454686, "loss": 3.4368, "step": 84905 }, { "epoch": 5.769126239978258, "grad_norm": 1.1854888200759888, "learning_rate": 0.0002791140100557141, "loss": 3.4067, "step": 84910 }, { "epoch": 5.76946596004892, "grad_norm": 1.9941588640213013, "learning_rate": 0.00027907154504688137, "loss": 3.2623, "step": 84915 }, { "epoch": 5.769805680119582, "grad_norm": 1.2023651599884033, "learning_rate": 0.00027902908003804865, "loss": 3.1923, "step": 84920 }, { "epoch": 5.770145400190243, "grad_norm": 1.5117759704589844, "learning_rate": 0.0002789866150292159, "loss": 3.4612, "step": 84925 }, { "epoch": 5.770485120260905, "grad_norm": 1.1863356828689575, "learning_rate": 0.0002789441500203832, "loss": 3.4586, "step": 84930 }, { "epoch": 5.770824840331567, "grad_norm": 1.3057239055633545, "learning_rate": 0.0002789016850115505, "loss": 3.5171, "step": 84935 }, { "epoch": 5.771164560402228, "grad_norm": 1.3346000909805298, "learning_rate": 0.00027885922000271777, "loss": 3.381, "step": 84940 }, { "epoch": 5.77150428047289, "grad_norm": 1.0978384017944336, "learning_rate": 0.00027881675499388505, "loss": 3.2684, "step": 84945 }, { "epoch": 5.7718440005435525, "grad_norm": 1.4985541105270386, "learning_rate": 0.0002787742899850523, "loss": 3.386, "step": 84950 }, { "epoch": 5.772183720614214, "grad_norm": 0.969405472278595, "learning_rate": 0.0002787318249762196, "loss": 3.3586, "step": 84955 }, { "epoch": 5.772523440684876, "grad_norm": 1.1638365983963013, "learning_rate": 0.0002786893599673869, "loss": 3.312, "step": 84960 }, { "epoch": 5.772863160755538, "grad_norm": 1.268212080001831, "learning_rate": 0.00027864689495855417, "loss": 3.1454, "step": 84965 }, { "epoch": 5.773202880826199, "grad_norm": 1.0635533332824707, "learning_rate": 0.00027860442994972145, "loss": 3.2753, "step": 84970 }, { "epoch": 5.773542600896861, "grad_norm": 1.3782284259796143, "learning_rate": 0.0002785619649408887, "loss": 3.2949, "step": 84975 }, { "epoch": 5.773882320967523, "grad_norm": 1.3727492094039917, "learning_rate": 0.000278519499932056, "loss": 3.2003, "step": 84980 }, { "epoch": 5.774222041038184, "grad_norm": 1.2051855325698853, "learning_rate": 0.0002784770349232233, "loss": 3.1303, "step": 84985 }, { "epoch": 5.7745617611088464, "grad_norm": 1.358978509902954, "learning_rate": 0.0002784345699143905, "loss": 3.3944, "step": 84990 }, { "epoch": 5.7749014811795085, "grad_norm": 1.3389266729354858, "learning_rate": 0.00027839210490555785, "loss": 3.3584, "step": 84995 }, { "epoch": 5.77524120125017, "grad_norm": 1.0993645191192627, "learning_rate": 0.0002783496398967251, "loss": 3.3123, "step": 85000 }, { "epoch": 5.775580921320832, "grad_norm": 1.1914427280426025, "learning_rate": 0.00027830717488789235, "loss": 3.1305, "step": 85005 }, { "epoch": 5.775920641391494, "grad_norm": 1.4804418087005615, "learning_rate": 0.0002782647098790597, "loss": 3.3008, "step": 85010 }, { "epoch": 5.776260361462155, "grad_norm": 1.127252221107483, "learning_rate": 0.00027822224487022697, "loss": 3.4893, "step": 85015 }, { "epoch": 5.776600081532817, "grad_norm": 1.1954293251037598, "learning_rate": 0.0002781797798613942, "loss": 3.303, "step": 85020 }, { "epoch": 5.776939801603479, "grad_norm": 1.2077618837356567, "learning_rate": 0.00027813731485256147, "loss": 3.1957, "step": 85025 }, { "epoch": 5.77727952167414, "grad_norm": 1.1783565282821655, "learning_rate": 0.0002780948498437288, "loss": 3.2626, "step": 85030 }, { "epoch": 5.7776192417448025, "grad_norm": 1.1992696523666382, "learning_rate": 0.00027805238483489603, "loss": 3.4535, "step": 85035 }, { "epoch": 5.7779589618154645, "grad_norm": 1.4663033485412598, "learning_rate": 0.0002780099198260633, "loss": 3.1857, "step": 85040 }, { "epoch": 5.778298681886126, "grad_norm": 1.1198762655258179, "learning_rate": 0.00027796745481723065, "loss": 3.6761, "step": 85045 }, { "epoch": 5.778638401956788, "grad_norm": 1.6122483015060425, "learning_rate": 0.0002779249898083979, "loss": 3.1531, "step": 85050 }, { "epoch": 5.77897812202745, "grad_norm": 1.1420122385025024, "learning_rate": 0.00027788252479956515, "loss": 3.3758, "step": 85055 }, { "epoch": 5.779317842098111, "grad_norm": 1.95602285861969, "learning_rate": 0.00027784005979073243, "loss": 3.192, "step": 85060 }, { "epoch": 5.779657562168773, "grad_norm": 1.261427402496338, "learning_rate": 0.0002777975947818997, "loss": 3.4234, "step": 85065 }, { "epoch": 5.779997282239434, "grad_norm": 1.2029606103897095, "learning_rate": 0.000277755129773067, "loss": 3.1689, "step": 85070 }, { "epoch": 5.780337002310096, "grad_norm": 1.7619720697402954, "learning_rate": 0.0002777126647642343, "loss": 3.1471, "step": 85075 }, { "epoch": 5.7806767223807585, "grad_norm": 1.323412299156189, "learning_rate": 0.00027767019975540155, "loss": 3.3148, "step": 85080 }, { "epoch": 5.78101644245142, "grad_norm": 1.3285961151123047, "learning_rate": 0.00027762773474656883, "loss": 3.2811, "step": 85085 }, { "epoch": 5.781356162522082, "grad_norm": 1.2660036087036133, "learning_rate": 0.0002775852697377361, "loss": 3.3305, "step": 85090 }, { "epoch": 5.781695882592744, "grad_norm": 1.2617130279541016, "learning_rate": 0.00027754280472890334, "loss": 3.4774, "step": 85095 }, { "epoch": 5.782035602663405, "grad_norm": 1.323702096939087, "learning_rate": 0.0002775003397200707, "loss": 3.1867, "step": 85100 }, { "epoch": 5.782375322734067, "grad_norm": 1.4476711750030518, "learning_rate": 0.00027745787471123795, "loss": 3.0198, "step": 85105 }, { "epoch": 5.782715042804729, "grad_norm": 1.028996229171753, "learning_rate": 0.0002774154097024052, "loss": 3.2995, "step": 85110 }, { "epoch": 5.78305476287539, "grad_norm": 1.2113778591156006, "learning_rate": 0.0002773729446935725, "loss": 3.3849, "step": 85115 }, { "epoch": 5.783394482946052, "grad_norm": 1.3223539590835571, "learning_rate": 0.0002773304796847398, "loss": 3.077, "step": 85120 }, { "epoch": 5.7837342030167145, "grad_norm": 1.201650619506836, "learning_rate": 0.0002772880146759071, "loss": 3.3697, "step": 85125 }, { "epoch": 5.784073923087376, "grad_norm": 1.3800824880599976, "learning_rate": 0.0002772455496670743, "loss": 3.1875, "step": 85130 }, { "epoch": 5.784413643158038, "grad_norm": 1.332576036453247, "learning_rate": 0.00027720308465824163, "loss": 3.5785, "step": 85135 }, { "epoch": 5.7847533632287, "grad_norm": 1.0415276288986206, "learning_rate": 0.0002771606196494089, "loss": 3.1944, "step": 85140 }, { "epoch": 5.785093083299361, "grad_norm": 1.238810420036316, "learning_rate": 0.00027711815464057614, "loss": 3.3862, "step": 85145 }, { "epoch": 5.785432803370023, "grad_norm": 1.268378734588623, "learning_rate": 0.0002770756896317435, "loss": 3.2472, "step": 85150 }, { "epoch": 5.785772523440685, "grad_norm": 1.4332443475723267, "learning_rate": 0.00027703322462291075, "loss": 3.2678, "step": 85155 }, { "epoch": 5.786112243511346, "grad_norm": 1.4160457849502563, "learning_rate": 0.000276990759614078, "loss": 3.3057, "step": 85160 }, { "epoch": 5.786451963582008, "grad_norm": 1.4214847087860107, "learning_rate": 0.00027694829460524526, "loss": 3.3073, "step": 85165 }, { "epoch": 5.7867916836526705, "grad_norm": 1.1827274560928345, "learning_rate": 0.0002769058295964126, "loss": 3.3687, "step": 85170 }, { "epoch": 5.787131403723332, "grad_norm": 1.1697245836257935, "learning_rate": 0.0002768633645875798, "loss": 3.2944, "step": 85175 }, { "epoch": 5.787471123793994, "grad_norm": 1.2032160758972168, "learning_rate": 0.0002768208995787471, "loss": 3.6066, "step": 85180 }, { "epoch": 5.787810843864656, "grad_norm": 1.1150177717208862, "learning_rate": 0.00027677843456991444, "loss": 3.3131, "step": 85185 }, { "epoch": 5.788150563935317, "grad_norm": 1.1267001628875732, "learning_rate": 0.00027673596956108166, "loss": 3.5023, "step": 85190 }, { "epoch": 5.788490284005979, "grad_norm": 1.2242541313171387, "learning_rate": 0.00027669350455224894, "loss": 3.3552, "step": 85195 }, { "epoch": 5.788830004076641, "grad_norm": 1.6000311374664307, "learning_rate": 0.0002766510395434163, "loss": 3.1673, "step": 85200 }, { "epoch": 5.789169724147302, "grad_norm": 1.3017220497131348, "learning_rate": 0.0002766085745345835, "loss": 3.4193, "step": 85205 }, { "epoch": 5.789509444217964, "grad_norm": 1.3485718965530396, "learning_rate": 0.0002765661095257508, "loss": 3.4895, "step": 85210 }, { "epoch": 5.7898491642886265, "grad_norm": 1.481505036354065, "learning_rate": 0.00027652364451691806, "loss": 3.1136, "step": 85215 }, { "epoch": 5.790188884359288, "grad_norm": 1.2864323854446411, "learning_rate": 0.00027648117950808534, "loss": 3.3708, "step": 85220 }, { "epoch": 5.79052860442995, "grad_norm": 1.4527884721755981, "learning_rate": 0.0002764387144992526, "loss": 3.1486, "step": 85225 }, { "epoch": 5.790868324500612, "grad_norm": 1.2511087656021118, "learning_rate": 0.0002763962494904199, "loss": 3.4025, "step": 85230 }, { "epoch": 5.791208044571273, "grad_norm": 1.3316190242767334, "learning_rate": 0.0002763537844815872, "loss": 3.2318, "step": 85235 }, { "epoch": 5.791547764641935, "grad_norm": 1.3150681257247925, "learning_rate": 0.00027631131947275446, "loss": 3.2183, "step": 85240 }, { "epoch": 5.791887484712597, "grad_norm": 1.335488200187683, "learning_rate": 0.00027626885446392174, "loss": 3.4366, "step": 85245 }, { "epoch": 5.792227204783258, "grad_norm": 1.3609294891357422, "learning_rate": 0.00027622638945508897, "loss": 3.3631, "step": 85250 }, { "epoch": 5.79256692485392, "grad_norm": 1.3052845001220703, "learning_rate": 0.0002761839244462563, "loss": 3.1836, "step": 85255 }, { "epoch": 5.7929066449245825, "grad_norm": 1.4123517274856567, "learning_rate": 0.0002761414594374236, "loss": 3.4962, "step": 85260 }, { "epoch": 5.793246364995244, "grad_norm": 1.2263911962509155, "learning_rate": 0.0002760989944285908, "loss": 3.1595, "step": 85265 }, { "epoch": 5.793586085065906, "grad_norm": 1.779314637184143, "learning_rate": 0.00027605652941975814, "loss": 3.1152, "step": 85270 }, { "epoch": 5.793925805136568, "grad_norm": 0.9780406355857849, "learning_rate": 0.0002760140644109254, "loss": 3.3513, "step": 85275 }, { "epoch": 5.794265525207229, "grad_norm": 1.2546619176864624, "learning_rate": 0.00027597159940209265, "loss": 3.5719, "step": 85280 }, { "epoch": 5.794605245277891, "grad_norm": 1.106147050857544, "learning_rate": 0.00027592913439325993, "loss": 3.4802, "step": 85285 }, { "epoch": 5.794944965348552, "grad_norm": 1.0219818353652954, "learning_rate": 0.00027588666938442726, "loss": 3.2741, "step": 85290 }, { "epoch": 5.795284685419214, "grad_norm": 1.3442045450210571, "learning_rate": 0.00027584420437559454, "loss": 3.3476, "step": 85295 }, { "epoch": 5.7956244054898765, "grad_norm": 1.2660572528839111, "learning_rate": 0.00027580173936676177, "loss": 3.3307, "step": 85300 }, { "epoch": 5.795964125560538, "grad_norm": 1.427358627319336, "learning_rate": 0.0002757592743579291, "loss": 3.4207, "step": 85305 }, { "epoch": 5.7963038456312, "grad_norm": 1.3310468196868896, "learning_rate": 0.0002757168093490964, "loss": 3.499, "step": 85310 }, { "epoch": 5.796643565701862, "grad_norm": 1.0889900922775269, "learning_rate": 0.0002756743443402636, "loss": 3.2145, "step": 85315 }, { "epoch": 5.796983285772523, "grad_norm": 1.2952861785888672, "learning_rate": 0.0002756318793314309, "loss": 3.0746, "step": 85320 }, { "epoch": 5.797323005843185, "grad_norm": 0.9555054903030396, "learning_rate": 0.0002755894143225982, "loss": 3.1382, "step": 85325 }, { "epoch": 5.797662725913847, "grad_norm": 1.188948631286621, "learning_rate": 0.00027554694931376545, "loss": 3.3764, "step": 85330 }, { "epoch": 5.798002445984508, "grad_norm": 0.9291104674339294, "learning_rate": 0.00027550448430493273, "loss": 3.3971, "step": 85335 }, { "epoch": 5.79834216605517, "grad_norm": 1.513279914855957, "learning_rate": 0.00027546201929610006, "loss": 3.2111, "step": 85340 }, { "epoch": 5.7986818861258325, "grad_norm": 1.6831773519515991, "learning_rate": 0.0002754195542872673, "loss": 3.233, "step": 85345 }, { "epoch": 5.799021606196494, "grad_norm": 1.6546205282211304, "learning_rate": 0.00027537708927843457, "loss": 3.3451, "step": 85350 }, { "epoch": 5.799361326267156, "grad_norm": 1.1440725326538086, "learning_rate": 0.00027533462426960185, "loss": 3.2686, "step": 85355 }, { "epoch": 5.799701046337818, "grad_norm": 1.3568276166915894, "learning_rate": 0.00027529215926076913, "loss": 3.2988, "step": 85360 }, { "epoch": 5.800040766408479, "grad_norm": 1.2465108633041382, "learning_rate": 0.0002752496942519364, "loss": 3.2364, "step": 85365 }, { "epoch": 5.800380486479141, "grad_norm": 1.3226251602172852, "learning_rate": 0.0002752072292431037, "loss": 2.8038, "step": 85370 }, { "epoch": 5.800720206549803, "grad_norm": 1.3975633382797241, "learning_rate": 0.00027516476423427097, "loss": 3.2705, "step": 85375 }, { "epoch": 5.801059926620464, "grad_norm": 1.1629301309585571, "learning_rate": 0.00027512229922543825, "loss": 3.1704, "step": 85380 }, { "epoch": 5.801399646691126, "grad_norm": 1.2006558179855347, "learning_rate": 0.00027507983421660553, "loss": 3.3264, "step": 85385 }, { "epoch": 5.8017393667617885, "grad_norm": 1.189578890800476, "learning_rate": 0.00027503736920777276, "loss": 3.2016, "step": 85390 }, { "epoch": 5.80207908683245, "grad_norm": 1.1274433135986328, "learning_rate": 0.0002749949041989401, "loss": 3.5234, "step": 85395 }, { "epoch": 5.802418806903112, "grad_norm": 1.4365142583847046, "learning_rate": 0.00027495243919010737, "loss": 3.2683, "step": 85400 }, { "epoch": 5.802758526973774, "grad_norm": 1.3482987880706787, "learning_rate": 0.0002749099741812746, "loss": 3.3981, "step": 85405 }, { "epoch": 5.803098247044435, "grad_norm": 1.3586150407791138, "learning_rate": 0.00027486750917244193, "loss": 3.31, "step": 85410 }, { "epoch": 5.803437967115097, "grad_norm": 1.330902338027954, "learning_rate": 0.0002748250441636092, "loss": 3.0941, "step": 85415 }, { "epoch": 5.803777687185759, "grad_norm": 1.1312602758407593, "learning_rate": 0.00027478257915477644, "loss": 3.1559, "step": 85420 }, { "epoch": 5.80411740725642, "grad_norm": 1.499912142753601, "learning_rate": 0.0002747401141459437, "loss": 3.0384, "step": 85425 }, { "epoch": 5.804457127327082, "grad_norm": 1.2388596534729004, "learning_rate": 0.00027469764913711105, "loss": 3.05, "step": 85430 }, { "epoch": 5.8047968473977445, "grad_norm": 1.6223987340927124, "learning_rate": 0.0002746551841282783, "loss": 3.5963, "step": 85435 }, { "epoch": 5.805136567468406, "grad_norm": 1.2730101346969604, "learning_rate": 0.00027461271911944556, "loss": 3.1708, "step": 85440 }, { "epoch": 5.805476287539068, "grad_norm": 1.577818512916565, "learning_rate": 0.0002745702541106129, "loss": 3.3804, "step": 85445 }, { "epoch": 5.80581600760973, "grad_norm": 1.6154651641845703, "learning_rate": 0.0002745277891017801, "loss": 3.2939, "step": 85450 }, { "epoch": 5.806155727680391, "grad_norm": 1.5749222040176392, "learning_rate": 0.0002744853240929474, "loss": 3.3757, "step": 85455 }, { "epoch": 5.806495447751053, "grad_norm": 1.1591311693191528, "learning_rate": 0.00027444285908411473, "loss": 3.268, "step": 85460 }, { "epoch": 5.806835167821715, "grad_norm": 1.1451342105865479, "learning_rate": 0.000274400394075282, "loss": 3.3938, "step": 85465 }, { "epoch": 5.807174887892376, "grad_norm": 1.4201918840408325, "learning_rate": 0.00027435792906644924, "loss": 3.4015, "step": 85470 }, { "epoch": 5.807514607963038, "grad_norm": 1.2537362575531006, "learning_rate": 0.0002743154640576165, "loss": 3.2704, "step": 85475 }, { "epoch": 5.8078543280337005, "grad_norm": 1.0710997581481934, "learning_rate": 0.00027427299904878385, "loss": 3.3698, "step": 85480 }, { "epoch": 5.808194048104362, "grad_norm": 1.2737088203430176, "learning_rate": 0.0002742305340399511, "loss": 3.4659, "step": 85485 }, { "epoch": 5.808533768175024, "grad_norm": 1.098345398902893, "learning_rate": 0.00027418806903111836, "loss": 3.3645, "step": 85490 }, { "epoch": 5.808873488245686, "grad_norm": 1.576033115386963, "learning_rate": 0.0002741456040222857, "loss": 3.1993, "step": 85495 }, { "epoch": 5.809213208316347, "grad_norm": 1.3407540321350098, "learning_rate": 0.0002741031390134529, "loss": 3.3191, "step": 85500 }, { "epoch": 5.809552928387009, "grad_norm": 1.3133260011672974, "learning_rate": 0.0002740606740046202, "loss": 3.239, "step": 85505 }, { "epoch": 5.809892648457671, "grad_norm": 1.1521143913269043, "learning_rate": 0.0002740182089957875, "loss": 3.2104, "step": 85510 }, { "epoch": 5.810232368528332, "grad_norm": 1.5840480327606201, "learning_rate": 0.00027397574398695476, "loss": 3.3541, "step": 85515 }, { "epoch": 5.810572088598994, "grad_norm": 1.2854266166687012, "learning_rate": 0.00027393327897812204, "loss": 3.3066, "step": 85520 }, { "epoch": 5.8109118086696565, "grad_norm": 1.2355668544769287, "learning_rate": 0.0002738908139692893, "loss": 3.5682, "step": 85525 }, { "epoch": 5.811251528740318, "grad_norm": 1.195278286933899, "learning_rate": 0.0002738483489604566, "loss": 3.4066, "step": 85530 }, { "epoch": 5.81159124881098, "grad_norm": 1.2258871793746948, "learning_rate": 0.0002738058839516239, "loss": 3.4411, "step": 85535 }, { "epoch": 5.811930968881642, "grad_norm": 1.9804116487503052, "learning_rate": 0.00027376341894279116, "loss": 3.1184, "step": 85540 }, { "epoch": 5.812270688952303, "grad_norm": 1.1383957862854004, "learning_rate": 0.0002737209539339584, "loss": 3.294, "step": 85545 }, { "epoch": 5.812610409022965, "grad_norm": 1.1343777179718018, "learning_rate": 0.0002736784889251257, "loss": 3.1639, "step": 85550 }, { "epoch": 5.812950129093627, "grad_norm": 1.3826037645339966, "learning_rate": 0.000273636023916293, "loss": 3.3641, "step": 85555 }, { "epoch": 5.813289849164288, "grad_norm": 1.0263694524765015, "learning_rate": 0.0002735935589074602, "loss": 3.3263, "step": 85560 }, { "epoch": 5.8136295692349504, "grad_norm": 0.8664525151252747, "learning_rate": 0.00027355109389862756, "loss": 3.3747, "step": 85565 }, { "epoch": 5.8139692893056125, "grad_norm": 1.2996591329574585, "learning_rate": 0.00027350862888979484, "loss": 3.2539, "step": 85570 }, { "epoch": 5.814309009376274, "grad_norm": 1.2506146430969238, "learning_rate": 0.00027346616388096206, "loss": 3.357, "step": 85575 }, { "epoch": 5.814648729446936, "grad_norm": 1.3964588642120361, "learning_rate": 0.00027342369887212934, "loss": 3.417, "step": 85580 }, { "epoch": 5.814988449517598, "grad_norm": 1.0884265899658203, "learning_rate": 0.0002733812338632967, "loss": 3.4498, "step": 85585 }, { "epoch": 5.815328169588259, "grad_norm": 1.4446206092834473, "learning_rate": 0.0002733387688544639, "loss": 3.0732, "step": 85590 }, { "epoch": 5.815667889658921, "grad_norm": 1.609817385673523, "learning_rate": 0.0002732963038456312, "loss": 3.3523, "step": 85595 }, { "epoch": 5.816007609729583, "grad_norm": 1.2876925468444824, "learning_rate": 0.0002732538388367985, "loss": 3.4017, "step": 85600 }, { "epoch": 5.816347329800244, "grad_norm": 1.2603533267974854, "learning_rate": 0.00027321137382796574, "loss": 3.281, "step": 85605 }, { "epoch": 5.8166870498709065, "grad_norm": 1.500557541847229, "learning_rate": 0.000273168908819133, "loss": 3.178, "step": 85610 }, { "epoch": 5.8170267699415685, "grad_norm": 1.9426188468933105, "learning_rate": 0.0002731264438103003, "loss": 3.2263, "step": 85615 }, { "epoch": 5.81736649001223, "grad_norm": 1.2825933694839478, "learning_rate": 0.0002730839788014676, "loss": 3.1477, "step": 85620 }, { "epoch": 5.817706210082892, "grad_norm": 1.5449198484420776, "learning_rate": 0.00027304151379263487, "loss": 3.2583, "step": 85625 }, { "epoch": 5.818045930153554, "grad_norm": 1.2599279880523682, "learning_rate": 0.00027299904878380215, "loss": 3.3651, "step": 85630 }, { "epoch": 5.818385650224215, "grad_norm": 1.3582955598831177, "learning_rate": 0.0002729565837749695, "loss": 3.3289, "step": 85635 }, { "epoch": 5.818725370294877, "grad_norm": 1.0959771871566772, "learning_rate": 0.0002729141187661367, "loss": 3.3484, "step": 85640 }, { "epoch": 5.819065090365539, "grad_norm": 1.439493179321289, "learning_rate": 0.000272871653757304, "loss": 3.3354, "step": 85645 }, { "epoch": 5.8194048104362, "grad_norm": 1.5075480937957764, "learning_rate": 0.00027282918874847127, "loss": 3.366, "step": 85650 }, { "epoch": 5.8197445305068625, "grad_norm": 1.0318853855133057, "learning_rate": 0.00027278672373963855, "loss": 3.3702, "step": 85655 }, { "epoch": 5.8200842505775245, "grad_norm": 1.2532958984375, "learning_rate": 0.0002727442587308058, "loss": 3.3898, "step": 85660 }, { "epoch": 5.820423970648186, "grad_norm": 1.1714366674423218, "learning_rate": 0.0002727017937219731, "loss": 3.3635, "step": 85665 }, { "epoch": 5.820763690718848, "grad_norm": 1.165490984916687, "learning_rate": 0.0002726593287131404, "loss": 3.3952, "step": 85670 }, { "epoch": 5.82110341078951, "grad_norm": 1.2524491548538208, "learning_rate": 0.00027261686370430767, "loss": 3.2563, "step": 85675 }, { "epoch": 5.821443130860171, "grad_norm": 1.3225467205047607, "learning_rate": 0.00027257439869547495, "loss": 3.3266, "step": 85680 }, { "epoch": 5.821782850930833, "grad_norm": 1.1339020729064941, "learning_rate": 0.00027253193368664217, "loss": 3.1623, "step": 85685 }, { "epoch": 5.822122571001495, "grad_norm": 1.285220980644226, "learning_rate": 0.0002724894686778095, "loss": 3.2033, "step": 85690 }, { "epoch": 5.822462291072156, "grad_norm": 1.4186530113220215, "learning_rate": 0.0002724470036689768, "loss": 3.4485, "step": 85695 }, { "epoch": 5.8228020111428185, "grad_norm": 1.450653314590454, "learning_rate": 0.000272404538660144, "loss": 3.2848, "step": 85700 }, { "epoch": 5.8231417312134806, "grad_norm": 1.401695728302002, "learning_rate": 0.00027236207365131135, "loss": 3.2565, "step": 85705 }, { "epoch": 5.823481451284142, "grad_norm": 1.215205192565918, "learning_rate": 0.0002723196086424786, "loss": 3.2935, "step": 85710 }, { "epoch": 5.823821171354804, "grad_norm": 1.1354222297668457, "learning_rate": 0.00027227714363364585, "loss": 3.0547, "step": 85715 }, { "epoch": 5.824160891425466, "grad_norm": 1.5236142873764038, "learning_rate": 0.00027223467862481313, "loss": 3.4158, "step": 85720 }, { "epoch": 5.824500611496127, "grad_norm": 1.4730199575424194, "learning_rate": 0.00027219221361598047, "loss": 3.2805, "step": 85725 }, { "epoch": 5.824840331566789, "grad_norm": 1.588536262512207, "learning_rate": 0.0002721497486071477, "loss": 3.4179, "step": 85730 }, { "epoch": 5.825180051637451, "grad_norm": 1.5890979766845703, "learning_rate": 0.00027210728359831497, "loss": 3.1369, "step": 85735 }, { "epoch": 5.825519771708112, "grad_norm": 1.1993203163146973, "learning_rate": 0.0002720648185894823, "loss": 3.2388, "step": 85740 }, { "epoch": 5.8258594917787745, "grad_norm": 1.4227778911590576, "learning_rate": 0.00027202235358064953, "loss": 3.3221, "step": 85745 }, { "epoch": 5.826199211849436, "grad_norm": 1.2588692903518677, "learning_rate": 0.0002719798885718168, "loss": 3.3572, "step": 85750 }, { "epoch": 5.826538931920098, "grad_norm": 1.5560704469680786, "learning_rate": 0.00027193742356298415, "loss": 3.1614, "step": 85755 }, { "epoch": 5.82687865199076, "grad_norm": 1.4322900772094727, "learning_rate": 0.0002718949585541514, "loss": 3.0093, "step": 85760 }, { "epoch": 5.827218372061421, "grad_norm": 1.206263780593872, "learning_rate": 0.00027185249354531865, "loss": 3.5761, "step": 85765 }, { "epoch": 5.827558092132083, "grad_norm": 1.5728665590286255, "learning_rate": 0.00027181002853648593, "loss": 3.2486, "step": 85770 }, { "epoch": 5.827897812202745, "grad_norm": 1.3521313667297363, "learning_rate": 0.0002717675635276532, "loss": 3.0196, "step": 85775 }, { "epoch": 5.828237532273406, "grad_norm": 1.2034752368927002, "learning_rate": 0.0002717250985188205, "loss": 3.357, "step": 85780 }, { "epoch": 5.828577252344068, "grad_norm": 1.361412763595581, "learning_rate": 0.0002716826335099878, "loss": 3.4155, "step": 85785 }, { "epoch": 5.8289169724147305, "grad_norm": 1.2562251091003418, "learning_rate": 0.00027164016850115505, "loss": 3.4627, "step": 85790 }, { "epoch": 5.829256692485392, "grad_norm": 0.990273118019104, "learning_rate": 0.00027159770349232233, "loss": 3.3768, "step": 85795 }, { "epoch": 5.829596412556054, "grad_norm": 1.2820311784744263, "learning_rate": 0.0002715552384834896, "loss": 3.1534, "step": 85800 }, { "epoch": 5.829936132626716, "grad_norm": 1.4152657985687256, "learning_rate": 0.0002715127734746569, "loss": 3.1742, "step": 85805 }, { "epoch": 5.830275852697377, "grad_norm": 1.206924319267273, "learning_rate": 0.0002714703084658242, "loss": 3.441, "step": 85810 }, { "epoch": 5.830615572768039, "grad_norm": 1.3845887184143066, "learning_rate": 0.00027142784345699145, "loss": 3.1672, "step": 85815 }, { "epoch": 5.830955292838701, "grad_norm": 0.9515519142150879, "learning_rate": 0.00027138537844815873, "loss": 3.2759, "step": 85820 }, { "epoch": 5.831295012909362, "grad_norm": 1.1755646467208862, "learning_rate": 0.000271342913439326, "loss": 3.4842, "step": 85825 }, { "epoch": 5.831634732980024, "grad_norm": 1.3653995990753174, "learning_rate": 0.0002713004484304933, "loss": 3.2973, "step": 85830 }, { "epoch": 5.8319744530506865, "grad_norm": 1.066773772239685, "learning_rate": 0.0002712579834216606, "loss": 3.5821, "step": 85835 }, { "epoch": 5.832314173121348, "grad_norm": 1.005752682685852, "learning_rate": 0.0002712155184128278, "loss": 3.515, "step": 85840 }, { "epoch": 5.83265389319201, "grad_norm": 1.330147624015808, "learning_rate": 0.00027117305340399513, "loss": 3.2753, "step": 85845 }, { "epoch": 5.832993613262672, "grad_norm": 1.2627677917480469, "learning_rate": 0.0002711305883951624, "loss": 3.2969, "step": 85850 }, { "epoch": 5.833333333333333, "grad_norm": 1.2781283855438232, "learning_rate": 0.00027108812338632964, "loss": 3.5504, "step": 85855 }, { "epoch": 5.833673053403995, "grad_norm": 1.2194565534591675, "learning_rate": 0.000271045658377497, "loss": 3.419, "step": 85860 }, { "epoch": 5.834012773474657, "grad_norm": 1.2802863121032715, "learning_rate": 0.00027100319336866425, "loss": 3.2027, "step": 85865 }, { "epoch": 5.834352493545318, "grad_norm": 1.185354471206665, "learning_rate": 0.0002709607283598315, "loss": 3.4256, "step": 85870 }, { "epoch": 5.8346922136159804, "grad_norm": 1.335098385810852, "learning_rate": 0.00027091826335099876, "loss": 3.1124, "step": 85875 }, { "epoch": 5.8350319336866425, "grad_norm": 1.110873818397522, "learning_rate": 0.0002708757983421661, "loss": 3.3746, "step": 85880 }, { "epoch": 5.835371653757304, "grad_norm": 1.1026893854141235, "learning_rate": 0.0002708333333333333, "loss": 3.195, "step": 85885 }, { "epoch": 5.835711373827966, "grad_norm": 1.531451940536499, "learning_rate": 0.0002707908683245006, "loss": 3.4736, "step": 85890 }, { "epoch": 5.836051093898628, "grad_norm": 1.4302136898040771, "learning_rate": 0.00027074840331566794, "loss": 3.2623, "step": 85895 }, { "epoch": 5.836390813969289, "grad_norm": 1.2049081325531006, "learning_rate": 0.00027070593830683516, "loss": 3.1524, "step": 85900 }, { "epoch": 5.836730534039951, "grad_norm": 1.4240689277648926, "learning_rate": 0.00027066347329800244, "loss": 3.2735, "step": 85905 }, { "epoch": 5.837070254110613, "grad_norm": 1.3052165508270264, "learning_rate": 0.0002706210082891697, "loss": 3.352, "step": 85910 }, { "epoch": 5.837409974181274, "grad_norm": 1.082636833190918, "learning_rate": 0.000270578543280337, "loss": 3.2217, "step": 85915 }, { "epoch": 5.8377496942519365, "grad_norm": 1.339432954788208, "learning_rate": 0.0002705360782715043, "loss": 3.4498, "step": 85920 }, { "epoch": 5.8380894143225985, "grad_norm": 1.131876826286316, "learning_rate": 0.00027049361326267156, "loss": 3.2759, "step": 85925 }, { "epoch": 5.83842913439326, "grad_norm": 1.1734099388122559, "learning_rate": 0.00027045114825383884, "loss": 3.5209, "step": 85930 }, { "epoch": 5.838768854463922, "grad_norm": 1.5527393817901611, "learning_rate": 0.0002704086832450061, "loss": 3.1064, "step": 85935 }, { "epoch": 5.839108574534584, "grad_norm": 1.1044085025787354, "learning_rate": 0.0002703662182361734, "loss": 3.0343, "step": 85940 }, { "epoch": 5.839448294605245, "grad_norm": 1.321743369102478, "learning_rate": 0.00027032375322734063, "loss": 3.3528, "step": 85945 }, { "epoch": 5.839788014675907, "grad_norm": 1.134660005569458, "learning_rate": 0.00027028128821850796, "loss": 3.5353, "step": 85950 }, { "epoch": 5.840127734746569, "grad_norm": 1.1958216428756714, "learning_rate": 0.00027023882320967524, "loss": 3.3801, "step": 85955 }, { "epoch": 5.84046745481723, "grad_norm": 1.3577933311462402, "learning_rate": 0.00027019635820084247, "loss": 3.2556, "step": 85960 }, { "epoch": 5.8408071748878925, "grad_norm": 1.3497726917266846, "learning_rate": 0.0002701538931920098, "loss": 3.1233, "step": 85965 }, { "epoch": 5.841146894958554, "grad_norm": 1.6195279359817505, "learning_rate": 0.0002701114281831771, "loss": 3.0349, "step": 85970 }, { "epoch": 5.841486615029216, "grad_norm": 1.1244257688522339, "learning_rate": 0.00027006896317434436, "loss": 3.4757, "step": 85975 }, { "epoch": 5.841826335099878, "grad_norm": 1.3264631032943726, "learning_rate": 0.0002700264981655116, "loss": 3.2374, "step": 85980 }, { "epoch": 5.842166055170539, "grad_norm": 1.4591782093048096, "learning_rate": 0.0002699840331566789, "loss": 3.6727, "step": 85985 }, { "epoch": 5.842505775241201, "grad_norm": 1.8295342922210693, "learning_rate": 0.0002699415681478462, "loss": 3.2902, "step": 85990 }, { "epoch": 5.842845495311863, "grad_norm": 1.0080461502075195, "learning_rate": 0.00026989910313901343, "loss": 3.3088, "step": 85995 }, { "epoch": 5.843185215382524, "grad_norm": 1.4616550207138062, "learning_rate": 0.00026985663813018076, "loss": 3.0636, "step": 86000 }, { "epoch": 5.843524935453186, "grad_norm": 1.6676989793777466, "learning_rate": 0.00026981417312134804, "loss": 3.2261, "step": 86005 }, { "epoch": 5.8438646555238485, "grad_norm": 1.4481626749038696, "learning_rate": 0.00026977170811251527, "loss": 3.4344, "step": 86010 }, { "epoch": 5.84420437559451, "grad_norm": 1.3704136610031128, "learning_rate": 0.00026972924310368255, "loss": 3.2618, "step": 86015 }, { "epoch": 5.844544095665172, "grad_norm": 1.5417001247406006, "learning_rate": 0.0002696867780948499, "loss": 3.2467, "step": 86020 }, { "epoch": 5.844883815735834, "grad_norm": 1.394530177116394, "learning_rate": 0.0002696443130860171, "loss": 3.3071, "step": 86025 }, { "epoch": 5.845223535806495, "grad_norm": 1.5212455987930298, "learning_rate": 0.0002696018480771844, "loss": 3.5105, "step": 86030 }, { "epoch": 5.845563255877157, "grad_norm": 1.293558120727539, "learning_rate": 0.0002695593830683517, "loss": 3.2893, "step": 86035 }, { "epoch": 5.845902975947819, "grad_norm": 1.3481501340866089, "learning_rate": 0.00026951691805951895, "loss": 3.3031, "step": 86040 }, { "epoch": 5.84624269601848, "grad_norm": 1.4333127737045288, "learning_rate": 0.00026947445305068623, "loss": 3.2822, "step": 86045 }, { "epoch": 5.846582416089142, "grad_norm": 1.3235191106796265, "learning_rate": 0.00026943198804185356, "loss": 3.141, "step": 86050 }, { "epoch": 5.8469221361598045, "grad_norm": 1.4649304151535034, "learning_rate": 0.0002693895230330208, "loss": 3.061, "step": 86055 }, { "epoch": 5.847261856230466, "grad_norm": 1.206311821937561, "learning_rate": 0.00026934705802418807, "loss": 3.2946, "step": 86060 }, { "epoch": 5.847601576301128, "grad_norm": 1.1563048362731934, "learning_rate": 0.00026930459301535535, "loss": 3.0847, "step": 86065 }, { "epoch": 5.84794129637179, "grad_norm": 1.353760838508606, "learning_rate": 0.00026926212800652263, "loss": 3.0969, "step": 86070 }, { "epoch": 5.848281016442451, "grad_norm": 1.2363712787628174, "learning_rate": 0.0002692196629976899, "loss": 3.3496, "step": 86075 }, { "epoch": 5.848620736513113, "grad_norm": 1.3570770025253296, "learning_rate": 0.0002691771979888572, "loss": 3.1983, "step": 86080 }, { "epoch": 5.848960456583775, "grad_norm": 1.2526774406433105, "learning_rate": 0.00026913473298002447, "loss": 3.2112, "step": 86085 }, { "epoch": 5.849300176654436, "grad_norm": 1.16535222530365, "learning_rate": 0.00026909226797119175, "loss": 3.3906, "step": 86090 }, { "epoch": 5.849639896725098, "grad_norm": 1.2533457279205322, "learning_rate": 0.00026904980296235903, "loss": 3.2714, "step": 86095 }, { "epoch": 5.8499796167957605, "grad_norm": 1.4217703342437744, "learning_rate": 0.00026900733795352626, "loss": 3.1678, "step": 86100 }, { "epoch": 5.850319336866422, "grad_norm": 1.0255634784698486, "learning_rate": 0.0002689648729446936, "loss": 3.2787, "step": 86105 }, { "epoch": 5.850659056937084, "grad_norm": 1.0884329080581665, "learning_rate": 0.00026892240793586087, "loss": 3.1877, "step": 86110 }, { "epoch": 5.850998777007746, "grad_norm": 1.1766589879989624, "learning_rate": 0.0002688799429270281, "loss": 3.153, "step": 86115 }, { "epoch": 5.851338497078407, "grad_norm": 1.446290373802185, "learning_rate": 0.00026883747791819543, "loss": 3.2367, "step": 86120 }, { "epoch": 5.851678217149069, "grad_norm": 1.1217025518417358, "learning_rate": 0.0002687950129093627, "loss": 3.0582, "step": 86125 }, { "epoch": 5.852017937219731, "grad_norm": 1.025986909866333, "learning_rate": 0.00026875254790052994, "loss": 3.2961, "step": 86130 }, { "epoch": 5.852357657290392, "grad_norm": 1.4996711015701294, "learning_rate": 0.0002687100828916972, "loss": 3.2754, "step": 86135 }, { "epoch": 5.852697377361054, "grad_norm": 1.2464197874069214, "learning_rate": 0.00026866761788286455, "loss": 3.3558, "step": 86140 }, { "epoch": 5.8530370974317165, "grad_norm": 1.0649781227111816, "learning_rate": 0.00026862515287403183, "loss": 3.3928, "step": 86145 }, { "epoch": 5.853376817502378, "grad_norm": 1.0926326513290405, "learning_rate": 0.00026858268786519906, "loss": 3.3181, "step": 86150 }, { "epoch": 5.85371653757304, "grad_norm": 1.3224740028381348, "learning_rate": 0.0002685402228563664, "loss": 3.2673, "step": 86155 }, { "epoch": 5.854056257643702, "grad_norm": 1.0495491027832031, "learning_rate": 0.00026849775784753367, "loss": 3.0696, "step": 86160 }, { "epoch": 5.854395977714363, "grad_norm": 1.249300241470337, "learning_rate": 0.0002684552928387009, "loss": 3.5036, "step": 86165 }, { "epoch": 5.854735697785025, "grad_norm": 1.776715636253357, "learning_rate": 0.0002684128278298682, "loss": 3.4251, "step": 86170 }, { "epoch": 5.855075417855687, "grad_norm": 1.3277919292449951, "learning_rate": 0.0002683703628210355, "loss": 3.2748, "step": 86175 }, { "epoch": 5.855415137926348, "grad_norm": 1.2745782136917114, "learning_rate": 0.00026832789781220274, "loss": 3.4185, "step": 86180 }, { "epoch": 5.8557548579970105, "grad_norm": 1.2023942470550537, "learning_rate": 0.00026828543280337, "loss": 3.4431, "step": 86185 }, { "epoch": 5.8560945780676725, "grad_norm": 1.229965090751648, "learning_rate": 0.00026824296779453735, "loss": 3.282, "step": 86190 }, { "epoch": 5.856434298138334, "grad_norm": 0.9056265950202942, "learning_rate": 0.0002682005027857046, "loss": 3.3693, "step": 86195 }, { "epoch": 5.856774018208996, "grad_norm": 1.2011858224868774, "learning_rate": 0.00026815803777687186, "loss": 3.2539, "step": 86200 }, { "epoch": 5.857113738279658, "grad_norm": 1.5264662504196167, "learning_rate": 0.00026811557276803914, "loss": 3.3476, "step": 86205 }, { "epoch": 5.857453458350319, "grad_norm": 1.5515345335006714, "learning_rate": 0.0002680731077592064, "loss": 3.2567, "step": 86210 }, { "epoch": 5.857793178420981, "grad_norm": 1.2550910711288452, "learning_rate": 0.0002680306427503737, "loss": 3.2582, "step": 86215 }, { "epoch": 5.858132898491643, "grad_norm": 1.0903642177581787, "learning_rate": 0.000267988177741541, "loss": 3.3277, "step": 86220 }, { "epoch": 5.858472618562304, "grad_norm": 1.0988248586654663, "learning_rate": 0.00026794571273270826, "loss": 3.1214, "step": 86225 }, { "epoch": 5.8588123386329665, "grad_norm": 1.3564889430999756, "learning_rate": 0.00026790324772387554, "loss": 3.4825, "step": 86230 }, { "epoch": 5.8591520587036285, "grad_norm": 1.265040397644043, "learning_rate": 0.0002678607827150428, "loss": 3.1837, "step": 86235 }, { "epoch": 5.85949177877429, "grad_norm": 1.18654465675354, "learning_rate": 0.00026781831770621004, "loss": 3.1677, "step": 86240 }, { "epoch": 5.859831498844952, "grad_norm": 1.0764540433883667, "learning_rate": 0.0002677758526973774, "loss": 3.4222, "step": 86245 }, { "epoch": 5.860171218915614, "grad_norm": 1.7314739227294922, "learning_rate": 0.00026773338768854466, "loss": 3.2127, "step": 86250 }, { "epoch": 5.860510938986275, "grad_norm": 1.0459486246109009, "learning_rate": 0.0002676909226797119, "loss": 3.0888, "step": 86255 }, { "epoch": 5.860850659056937, "grad_norm": 1.4711365699768066, "learning_rate": 0.0002676484576708792, "loss": 3.2437, "step": 86260 }, { "epoch": 5.861190379127599, "grad_norm": 1.2508383989334106, "learning_rate": 0.0002676059926620465, "loss": 3.302, "step": 86265 }, { "epoch": 5.86153009919826, "grad_norm": 1.3531886339187622, "learning_rate": 0.0002675635276532137, "loss": 3.452, "step": 86270 }, { "epoch": 5.8618698192689225, "grad_norm": 1.3308967351913452, "learning_rate": 0.000267521062644381, "loss": 3.392, "step": 86275 }, { "epoch": 5.8622095393395846, "grad_norm": 1.0380779504776, "learning_rate": 0.00026747859763554834, "loss": 3.2838, "step": 86280 }, { "epoch": 5.862549259410246, "grad_norm": 1.1952705383300781, "learning_rate": 0.00026743613262671556, "loss": 2.9582, "step": 86285 }, { "epoch": 5.862888979480908, "grad_norm": 1.0680598020553589, "learning_rate": 0.00026739366761788284, "loss": 3.0546, "step": 86290 }, { "epoch": 5.86322869955157, "grad_norm": 1.1339707374572754, "learning_rate": 0.0002673512026090502, "loss": 3.4936, "step": 86295 }, { "epoch": 5.863568419622231, "grad_norm": 1.0603429079055786, "learning_rate": 0.0002673087376002174, "loss": 3.1081, "step": 86300 }, { "epoch": 5.863908139692893, "grad_norm": 1.2388501167297363, "learning_rate": 0.0002672662725913847, "loss": 3.5375, "step": 86305 }, { "epoch": 5.864247859763555, "grad_norm": 1.4299389123916626, "learning_rate": 0.000267223807582552, "loss": 3.6014, "step": 86310 }, { "epoch": 5.864587579834216, "grad_norm": 1.07322096824646, "learning_rate": 0.0002671813425737193, "loss": 3.589, "step": 86315 }, { "epoch": 5.8649272999048785, "grad_norm": 1.0235443115234375, "learning_rate": 0.0002671388775648865, "loss": 3.4971, "step": 86320 }, { "epoch": 5.865267019975541, "grad_norm": 1.1809617280960083, "learning_rate": 0.0002670964125560538, "loss": 3.1465, "step": 86325 }, { "epoch": 5.865606740046202, "grad_norm": 1.1322832107543945, "learning_rate": 0.00026705394754722114, "loss": 3.4013, "step": 86330 }, { "epoch": 5.865946460116864, "grad_norm": 1.3501977920532227, "learning_rate": 0.00026701148253838837, "loss": 3.3143, "step": 86335 }, { "epoch": 5.866286180187526, "grad_norm": 0.9844681024551392, "learning_rate": 0.00026696901752955565, "loss": 3.4461, "step": 86340 }, { "epoch": 5.866625900258187, "grad_norm": 1.4739491939544678, "learning_rate": 0.000266926552520723, "loss": 2.9737, "step": 86345 }, { "epoch": 5.866965620328849, "grad_norm": 1.0554622411727905, "learning_rate": 0.0002668840875118902, "loss": 3.2889, "step": 86350 }, { "epoch": 5.867305340399511, "grad_norm": 1.258237600326538, "learning_rate": 0.0002668416225030575, "loss": 3.221, "step": 86355 }, { "epoch": 5.867645060470172, "grad_norm": 1.2957226037979126, "learning_rate": 0.00026679915749422477, "loss": 3.1305, "step": 86360 }, { "epoch": 5.8679847805408345, "grad_norm": 1.258837103843689, "learning_rate": 0.00026675669248539205, "loss": 3.4325, "step": 86365 }, { "epoch": 5.868324500611497, "grad_norm": 1.0286636352539062, "learning_rate": 0.0002667142274765593, "loss": 3.2939, "step": 86370 }, { "epoch": 5.868664220682158, "grad_norm": 1.1357322931289673, "learning_rate": 0.0002666717624677266, "loss": 3.6465, "step": 86375 }, { "epoch": 5.86900394075282, "grad_norm": 1.5036101341247559, "learning_rate": 0.0002666292974588939, "loss": 3.2619, "step": 86380 }, { "epoch": 5.869343660823482, "grad_norm": 1.5515928268432617, "learning_rate": 0.00026658683245006117, "loss": 3.3001, "step": 86385 }, { "epoch": 5.869683380894143, "grad_norm": 1.3513405323028564, "learning_rate": 0.00026654436744122845, "loss": 3.4777, "step": 86390 }, { "epoch": 5.870023100964805, "grad_norm": 1.4033169746398926, "learning_rate": 0.00026650190243239567, "loss": 3.2386, "step": 86395 }, { "epoch": 5.870362821035467, "grad_norm": 1.1639429330825806, "learning_rate": 0.000266459437423563, "loss": 3.4726, "step": 86400 }, { "epoch": 5.870702541106128, "grad_norm": 1.241754412651062, "learning_rate": 0.0002664169724147303, "loss": 3.3006, "step": 86405 }, { "epoch": 5.8710422611767905, "grad_norm": 1.1435846090316772, "learning_rate": 0.0002663745074058975, "loss": 3.2326, "step": 86410 }, { "epoch": 5.871381981247453, "grad_norm": 1.0158233642578125, "learning_rate": 0.00026633204239706485, "loss": 3.4178, "step": 86415 }, { "epoch": 5.871721701318114, "grad_norm": 1.1715058088302612, "learning_rate": 0.0002662895773882321, "loss": 3.3679, "step": 86420 }, { "epoch": 5.872061421388776, "grad_norm": 1.4639180898666382, "learning_rate": 0.00026624711237939935, "loss": 3.171, "step": 86425 }, { "epoch": 5.872401141459437, "grad_norm": 1.2381092309951782, "learning_rate": 0.00026620464737056663, "loss": 3.2754, "step": 86430 }, { "epoch": 5.872740861530099, "grad_norm": 1.210792064666748, "learning_rate": 0.00026616218236173397, "loss": 3.392, "step": 86435 }, { "epoch": 5.873080581600761, "grad_norm": 1.291016697883606, "learning_rate": 0.0002661197173529012, "loss": 3.0393, "step": 86440 }, { "epoch": 5.873420301671422, "grad_norm": 1.2493807077407837, "learning_rate": 0.00026607725234406847, "loss": 3.3209, "step": 86445 }, { "epoch": 5.8737600217420844, "grad_norm": 1.2266778945922852, "learning_rate": 0.0002660347873352358, "loss": 3.5887, "step": 86450 }, { "epoch": 5.8740997418127465, "grad_norm": 1.57622492313385, "learning_rate": 0.00026599232232640303, "loss": 3.3596, "step": 86455 }, { "epoch": 5.874439461883408, "grad_norm": 1.235785961151123, "learning_rate": 0.0002659498573175703, "loss": 3.3693, "step": 86460 }, { "epoch": 5.87477918195407, "grad_norm": 1.364332675933838, "learning_rate": 0.0002659073923087376, "loss": 3.156, "step": 86465 }, { "epoch": 5.875118902024732, "grad_norm": 1.3194361925125122, "learning_rate": 0.0002658649272999049, "loss": 3.3227, "step": 86470 }, { "epoch": 5.875458622095393, "grad_norm": 0.9896379113197327, "learning_rate": 0.00026582246229107215, "loss": 3.5226, "step": 86475 }, { "epoch": 5.875798342166055, "grad_norm": 0.9342869520187378, "learning_rate": 0.00026577999728223943, "loss": 3.347, "step": 86480 }, { "epoch": 5.876138062236717, "grad_norm": 1.1207565069198608, "learning_rate": 0.00026573753227340677, "loss": 3.1953, "step": 86485 }, { "epoch": 5.876477782307378, "grad_norm": 1.3909380435943604, "learning_rate": 0.000265695067264574, "loss": 3.2526, "step": 86490 }, { "epoch": 5.8768175023780405, "grad_norm": 1.1899936199188232, "learning_rate": 0.0002656526022557413, "loss": 3.2454, "step": 86495 }, { "epoch": 5.8771572224487025, "grad_norm": 1.6814953088760376, "learning_rate": 0.00026561013724690855, "loss": 3.2869, "step": 86500 }, { "epoch": 5.877496942519364, "grad_norm": 1.827903151512146, "learning_rate": 0.00026556767223807583, "loss": 3.0561, "step": 86505 }, { "epoch": 5.877836662590026, "grad_norm": 1.310976266860962, "learning_rate": 0.0002655252072292431, "loss": 3.2375, "step": 86510 }, { "epoch": 5.878176382660688, "grad_norm": 1.0242664813995361, "learning_rate": 0.0002654827422204104, "loss": 3.3599, "step": 86515 }, { "epoch": 5.878516102731349, "grad_norm": 1.577177882194519, "learning_rate": 0.0002654402772115777, "loss": 3.3966, "step": 86520 }, { "epoch": 5.878855822802011, "grad_norm": 1.4653152227401733, "learning_rate": 0.00026539781220274495, "loss": 3.43, "step": 86525 }, { "epoch": 5.879195542872673, "grad_norm": 1.1568686962127686, "learning_rate": 0.00026535534719391223, "loss": 2.8526, "step": 86530 }, { "epoch": 5.879535262943334, "grad_norm": 1.2356352806091309, "learning_rate": 0.00026531288218507946, "loss": 3.2937, "step": 86535 }, { "epoch": 5.8798749830139965, "grad_norm": 1.4548258781433105, "learning_rate": 0.0002652704171762468, "loss": 3.3847, "step": 86540 }, { "epoch": 5.8802147030846585, "grad_norm": 1.046812891960144, "learning_rate": 0.0002652279521674141, "loss": 3.1434, "step": 86545 }, { "epoch": 5.88055442315532, "grad_norm": 1.2653069496154785, "learning_rate": 0.0002651854871585813, "loss": 3.4219, "step": 86550 }, { "epoch": 5.880894143225982, "grad_norm": 1.7358810901641846, "learning_rate": 0.00026514302214974863, "loss": 3.3194, "step": 86555 }, { "epoch": 5.881233863296644, "grad_norm": 1.5562424659729004, "learning_rate": 0.0002651005571409159, "loss": 3.4021, "step": 86560 }, { "epoch": 5.881573583367305, "grad_norm": 1.52919340133667, "learning_rate": 0.00026505809213208314, "loss": 3.3241, "step": 86565 }, { "epoch": 5.881913303437967, "grad_norm": 1.129278302192688, "learning_rate": 0.0002650156271232504, "loss": 3.3764, "step": 86570 }, { "epoch": 5.882253023508629, "grad_norm": 1.2946017980575562, "learning_rate": 0.00026497316211441775, "loss": 3.2602, "step": 86575 }, { "epoch": 5.88259274357929, "grad_norm": 1.1511385440826416, "learning_rate": 0.000264930697105585, "loss": 3.3717, "step": 86580 }, { "epoch": 5.8829324636499525, "grad_norm": 1.3148689270019531, "learning_rate": 0.00026488823209675226, "loss": 3.4806, "step": 86585 }, { "epoch": 5.883272183720615, "grad_norm": 1.3509619235992432, "learning_rate": 0.0002648457670879196, "loss": 3.4243, "step": 86590 }, { "epoch": 5.883611903791276, "grad_norm": 1.5157663822174072, "learning_rate": 0.0002648033020790868, "loss": 2.945, "step": 86595 }, { "epoch": 5.883951623861938, "grad_norm": 1.3093489408493042, "learning_rate": 0.0002647608370702541, "loss": 3.3187, "step": 86600 }, { "epoch": 5.8842913439326, "grad_norm": 1.456992745399475, "learning_rate": 0.00026471837206142144, "loss": 3.2351, "step": 86605 }, { "epoch": 5.884631064003261, "grad_norm": 1.8318973779678345, "learning_rate": 0.00026467590705258866, "loss": 3.0517, "step": 86610 }, { "epoch": 5.884970784073923, "grad_norm": 1.086827039718628, "learning_rate": 0.00026463344204375594, "loss": 3.3517, "step": 86615 }, { "epoch": 5.885310504144585, "grad_norm": 1.0550955533981323, "learning_rate": 0.0002645909770349232, "loss": 3.327, "step": 86620 }, { "epoch": 5.885650224215246, "grad_norm": 1.264649510383606, "learning_rate": 0.0002645485120260905, "loss": 3.1305, "step": 86625 }, { "epoch": 5.8859899442859085, "grad_norm": 1.358655333518982, "learning_rate": 0.0002645060470172578, "loss": 3.3655, "step": 86630 }, { "epoch": 5.886329664356571, "grad_norm": 1.1250923871994019, "learning_rate": 0.00026446358200842506, "loss": 3.179, "step": 86635 }, { "epoch": 5.886669384427232, "grad_norm": 1.5747811794281006, "learning_rate": 0.00026442111699959234, "loss": 3.2894, "step": 86640 }, { "epoch": 5.887009104497894, "grad_norm": 1.3804943561553955, "learning_rate": 0.0002643786519907596, "loss": 3.1281, "step": 86645 }, { "epoch": 5.887348824568555, "grad_norm": 1.3391201496124268, "learning_rate": 0.0002643361869819269, "loss": 3.396, "step": 86650 }, { "epoch": 5.887688544639217, "grad_norm": 1.168204665184021, "learning_rate": 0.0002642937219730942, "loss": 3.3838, "step": 86655 }, { "epoch": 5.888028264709879, "grad_norm": 1.3554890155792236, "learning_rate": 0.00026425125696426146, "loss": 3.3601, "step": 86660 }, { "epoch": 5.88836798478054, "grad_norm": 1.1352484226226807, "learning_rate": 0.00026420879195542874, "loss": 3.5084, "step": 86665 }, { "epoch": 5.888707704851202, "grad_norm": 1.5540258884429932, "learning_rate": 0.000264166326946596, "loss": 3.2326, "step": 86670 }, { "epoch": 5.8890474249218645, "grad_norm": 1.4380193948745728, "learning_rate": 0.0002641238619377633, "loss": 3.1017, "step": 86675 }, { "epoch": 5.889387144992526, "grad_norm": 1.3643871545791626, "learning_rate": 0.0002640813969289306, "loss": 3.0657, "step": 86680 }, { "epoch": 5.889726865063188, "grad_norm": 1.2123744487762451, "learning_rate": 0.00026403893192009786, "loss": 3.4288, "step": 86685 }, { "epoch": 5.89006658513385, "grad_norm": 1.0634549856185913, "learning_rate": 0.0002639964669112651, "loss": 3.1538, "step": 86690 }, { "epoch": 5.890406305204511, "grad_norm": 1.2091193199157715, "learning_rate": 0.0002639540019024324, "loss": 3.3439, "step": 86695 }, { "epoch": 5.890746025275173, "grad_norm": 1.061211109161377, "learning_rate": 0.0002639115368935997, "loss": 3.0909, "step": 86700 }, { "epoch": 5.891085745345835, "grad_norm": 1.4697952270507812, "learning_rate": 0.00026386907188476693, "loss": 3.1459, "step": 86705 }, { "epoch": 5.891425465416496, "grad_norm": 1.721948266029358, "learning_rate": 0.00026382660687593426, "loss": 3.4604, "step": 86710 }, { "epoch": 5.891765185487158, "grad_norm": 1.66097891330719, "learning_rate": 0.00026378414186710154, "loss": 3.2325, "step": 86715 }, { "epoch": 5.8921049055578205, "grad_norm": 1.239790916442871, "learning_rate": 0.00026374167685826877, "loss": 3.1896, "step": 86720 }, { "epoch": 5.892444625628482, "grad_norm": 1.1885040998458862, "learning_rate": 0.00026369921184943605, "loss": 3.6509, "step": 86725 }, { "epoch": 5.892784345699144, "grad_norm": 1.1464409828186035, "learning_rate": 0.0002636567468406034, "loss": 3.3724, "step": 86730 }, { "epoch": 5.893124065769806, "grad_norm": 1.323368787765503, "learning_rate": 0.0002636142818317706, "loss": 3.2082, "step": 86735 }, { "epoch": 5.893463785840467, "grad_norm": 1.2419170141220093, "learning_rate": 0.0002635718168229379, "loss": 3.4387, "step": 86740 }, { "epoch": 5.893803505911129, "grad_norm": 1.9342005252838135, "learning_rate": 0.0002635293518141052, "loss": 3.0844, "step": 86745 }, { "epoch": 5.894143225981791, "grad_norm": 1.2704333066940308, "learning_rate": 0.00026348688680527245, "loss": 3.3197, "step": 86750 }, { "epoch": 5.894482946052452, "grad_norm": 1.2960509061813354, "learning_rate": 0.00026344442179643973, "loss": 3.3177, "step": 86755 }, { "epoch": 5.8948226661231145, "grad_norm": 1.2382394075393677, "learning_rate": 0.000263401956787607, "loss": 3.3154, "step": 86760 }, { "epoch": 5.8951623861937765, "grad_norm": 1.4011260271072388, "learning_rate": 0.0002633594917787743, "loss": 3.0872, "step": 86765 }, { "epoch": 5.895502106264438, "grad_norm": 1.0645776987075806, "learning_rate": 0.00026331702676994157, "loss": 3.4005, "step": 86770 }, { "epoch": 5.8958418263351, "grad_norm": 1.2623873949050903, "learning_rate": 0.00026327456176110885, "loss": 3.3835, "step": 86775 }, { "epoch": 5.896181546405762, "grad_norm": 1.5483896732330322, "learning_rate": 0.00026323209675227613, "loss": 3.2354, "step": 86780 }, { "epoch": 5.896521266476423, "grad_norm": 1.3795976638793945, "learning_rate": 0.0002631896317434434, "loss": 3.5365, "step": 86785 }, { "epoch": 5.896860986547085, "grad_norm": 1.4977247714996338, "learning_rate": 0.0002631471667346107, "loss": 3.2908, "step": 86790 }, { "epoch": 5.897200706617747, "grad_norm": 1.3762634992599487, "learning_rate": 0.0002631047017257779, "loss": 3.2066, "step": 86795 }, { "epoch": 5.897540426688408, "grad_norm": 1.3056581020355225, "learning_rate": 0.00026306223671694525, "loss": 3.249, "step": 86800 }, { "epoch": 5.8978801467590705, "grad_norm": 1.7059649229049683, "learning_rate": 0.00026301977170811253, "loss": 3.3213, "step": 86805 }, { "epoch": 5.8982198668297325, "grad_norm": 1.2096443176269531, "learning_rate": 0.00026297730669927976, "loss": 3.2983, "step": 86810 }, { "epoch": 5.898559586900394, "grad_norm": 1.061876654624939, "learning_rate": 0.0002629348416904471, "loss": 3.1418, "step": 86815 }, { "epoch": 5.898899306971056, "grad_norm": 1.2003254890441895, "learning_rate": 0.00026289237668161437, "loss": 3.4457, "step": 86820 }, { "epoch": 5.899239027041718, "grad_norm": 1.0513614416122437, "learning_rate": 0.00026284991167278165, "loss": 3.4052, "step": 86825 }, { "epoch": 5.899578747112379, "grad_norm": 1.3775146007537842, "learning_rate": 0.0002628074466639489, "loss": 3.0518, "step": 86830 }, { "epoch": 5.899918467183041, "grad_norm": 1.2349095344543457, "learning_rate": 0.0002627649816551162, "loss": 3.2316, "step": 86835 }, { "epoch": 5.900258187253703, "grad_norm": 1.255927562713623, "learning_rate": 0.0002627225166462835, "loss": 3.3428, "step": 86840 }, { "epoch": 5.900597907324364, "grad_norm": 1.5038319826126099, "learning_rate": 0.0002626800516374507, "loss": 3.4468, "step": 86845 }, { "epoch": 5.9009376273950265, "grad_norm": 1.1325665712356567, "learning_rate": 0.00026263758662861805, "loss": 3.2285, "step": 86850 }, { "epoch": 5.9012773474656885, "grad_norm": 1.6841901540756226, "learning_rate": 0.00026259512161978533, "loss": 3.0938, "step": 86855 }, { "epoch": 5.90161706753635, "grad_norm": 1.0647337436676025, "learning_rate": 0.00026255265661095256, "loss": 3.4667, "step": 86860 }, { "epoch": 5.901956787607012, "grad_norm": 1.4098910093307495, "learning_rate": 0.00026251019160211984, "loss": 3.2404, "step": 86865 }, { "epoch": 5.902296507677674, "grad_norm": 1.5162030458450317, "learning_rate": 0.00026246772659328717, "loss": 3.4142, "step": 86870 }, { "epoch": 5.902636227748335, "grad_norm": 1.1725997924804688, "learning_rate": 0.0002624252615844544, "loss": 3.3113, "step": 86875 }, { "epoch": 5.902975947818997, "grad_norm": 1.293521761894226, "learning_rate": 0.0002623827965756217, "loss": 3.3365, "step": 86880 }, { "epoch": 5.903315667889659, "grad_norm": 1.4085348844528198, "learning_rate": 0.000262340331566789, "loss": 3.3053, "step": 86885 }, { "epoch": 5.90365538796032, "grad_norm": 1.2465466260910034, "learning_rate": 0.00026229786655795624, "loss": 3.4212, "step": 86890 }, { "epoch": 5.9039951080309825, "grad_norm": 1.1827059984207153, "learning_rate": 0.0002622554015491235, "loss": 3.3634, "step": 86895 }, { "epoch": 5.904334828101645, "grad_norm": 1.3097352981567383, "learning_rate": 0.00026221293654029085, "loss": 3.3171, "step": 86900 }, { "epoch": 5.904674548172306, "grad_norm": 1.219638705253601, "learning_rate": 0.0002621704715314581, "loss": 3.2938, "step": 86905 }, { "epoch": 5.905014268242968, "grad_norm": 1.218231201171875, "learning_rate": 0.00026212800652262536, "loss": 3.0644, "step": 86910 }, { "epoch": 5.90535398831363, "grad_norm": 1.335861325263977, "learning_rate": 0.00026208554151379264, "loss": 3.1923, "step": 86915 }, { "epoch": 5.905693708384291, "grad_norm": 1.2502321004867554, "learning_rate": 0.0002620430765049599, "loss": 3.1338, "step": 86920 }, { "epoch": 5.906033428454953, "grad_norm": 1.3874318599700928, "learning_rate": 0.0002620006114961272, "loss": 3.2742, "step": 86925 }, { "epoch": 5.906373148525615, "grad_norm": 1.2580124139785767, "learning_rate": 0.0002619581464872945, "loss": 3.1713, "step": 86930 }, { "epoch": 5.906712868596276, "grad_norm": 0.9846178293228149, "learning_rate": 0.00026191568147846176, "loss": 3.0354, "step": 86935 }, { "epoch": 5.9070525886669385, "grad_norm": 1.4014813899993896, "learning_rate": 0.00026187321646962904, "loss": 2.9774, "step": 86940 }, { "epoch": 5.907392308737601, "grad_norm": 1.1678966283798218, "learning_rate": 0.0002618307514607963, "loss": 3.2249, "step": 86945 }, { "epoch": 5.907732028808262, "grad_norm": 1.3477332592010498, "learning_rate": 0.00026178828645196354, "loss": 3.5429, "step": 86950 }, { "epoch": 5.908071748878924, "grad_norm": 1.1128721237182617, "learning_rate": 0.0002617458214431309, "loss": 3.2813, "step": 86955 }, { "epoch": 5.908411468949586, "grad_norm": 1.2024054527282715, "learning_rate": 0.00026170335643429816, "loss": 3.1819, "step": 86960 }, { "epoch": 5.908751189020247, "grad_norm": 1.5451276302337646, "learning_rate": 0.0002616608914254654, "loss": 3.4227, "step": 86965 }, { "epoch": 5.909090909090909, "grad_norm": 1.2905813455581665, "learning_rate": 0.0002616184264166327, "loss": 3.303, "step": 86970 }, { "epoch": 5.909430629161571, "grad_norm": 1.1892658472061157, "learning_rate": 0.0002615759614078, "loss": 3.5356, "step": 86975 }, { "epoch": 5.909770349232232, "grad_norm": 1.2541048526763916, "learning_rate": 0.0002615334963989672, "loss": 3.2872, "step": 86980 }, { "epoch": 5.9101100693028945, "grad_norm": 1.1276415586471558, "learning_rate": 0.0002614910313901345, "loss": 3.2019, "step": 86985 }, { "epoch": 5.910449789373557, "grad_norm": 1.1462180614471436, "learning_rate": 0.00026144856638130184, "loss": 3.2572, "step": 86990 }, { "epoch": 5.910789509444218, "grad_norm": 1.0653473138809204, "learning_rate": 0.0002614061013724691, "loss": 3.426, "step": 86995 }, { "epoch": 5.91112922951488, "grad_norm": 1.517945408821106, "learning_rate": 0.00026136363636363634, "loss": 3.327, "step": 87000 }, { "epoch": 5.911468949585542, "grad_norm": 1.2778875827789307, "learning_rate": 0.0002613211713548037, "loss": 3.4778, "step": 87005 }, { "epoch": 5.911808669656203, "grad_norm": 1.4470546245574951, "learning_rate": 0.00026127870634597096, "loss": 3.0623, "step": 87010 }, { "epoch": 5.912148389726865, "grad_norm": 1.1922969818115234, "learning_rate": 0.0002612362413371382, "loss": 3.3209, "step": 87015 }, { "epoch": 5.912488109797527, "grad_norm": 1.2670071125030518, "learning_rate": 0.00026119377632830546, "loss": 3.3692, "step": 87020 }, { "epoch": 5.9128278298681884, "grad_norm": 1.3150445222854614, "learning_rate": 0.0002611513113194728, "loss": 3.5743, "step": 87025 }, { "epoch": 5.9131675499388505, "grad_norm": 1.2631433010101318, "learning_rate": 0.00026110884631064, "loss": 3.5929, "step": 87030 }, { "epoch": 5.913507270009513, "grad_norm": 1.2057350873947144, "learning_rate": 0.0002610663813018073, "loss": 3.493, "step": 87035 }, { "epoch": 5.913846990080174, "grad_norm": 1.1933391094207764, "learning_rate": 0.00026102391629297464, "loss": 3.275, "step": 87040 }, { "epoch": 5.914186710150836, "grad_norm": 1.121944785118103, "learning_rate": 0.00026098145128414187, "loss": 3.0383, "step": 87045 }, { "epoch": 5.914526430221498, "grad_norm": 1.0924395322799683, "learning_rate": 0.00026093898627530915, "loss": 3.3214, "step": 87050 }, { "epoch": 5.914866150292159, "grad_norm": 1.7949424982070923, "learning_rate": 0.0002608965212664764, "loss": 3.0334, "step": 87055 }, { "epoch": 5.915205870362821, "grad_norm": 1.2260674238204956, "learning_rate": 0.0002608540562576437, "loss": 3.2108, "step": 87060 }, { "epoch": 5.915545590433483, "grad_norm": 1.109118103981018, "learning_rate": 0.000260811591248811, "loss": 3.3256, "step": 87065 }, { "epoch": 5.9158853105041445, "grad_norm": 1.2296360731124878, "learning_rate": 0.00026076912623997827, "loss": 3.2263, "step": 87070 }, { "epoch": 5.9162250305748065, "grad_norm": 1.029876947402954, "learning_rate": 0.00026072666123114555, "loss": 3.3408, "step": 87075 }, { "epoch": 5.916564750645469, "grad_norm": 1.4869427680969238, "learning_rate": 0.0002606841962223128, "loss": 3.4, "step": 87080 }, { "epoch": 5.91690447071613, "grad_norm": 1.5195579528808594, "learning_rate": 0.0002606417312134801, "loss": 3.1583, "step": 87085 }, { "epoch": 5.917244190786792, "grad_norm": 1.0172208547592163, "learning_rate": 0.00026059926620464733, "loss": 3.6085, "step": 87090 }, { "epoch": 5.917583910857454, "grad_norm": 1.3815932273864746, "learning_rate": 0.00026055680119581467, "loss": 3.1946, "step": 87095 }, { "epoch": 5.917923630928115, "grad_norm": 1.3245985507965088, "learning_rate": 0.00026051433618698195, "loss": 3.4248, "step": 87100 }, { "epoch": 5.918263350998777, "grad_norm": 1.2773135900497437, "learning_rate": 0.00026047187117814917, "loss": 3.4157, "step": 87105 }, { "epoch": 5.918603071069439, "grad_norm": 1.3136504888534546, "learning_rate": 0.0002604294061693165, "loss": 3.2116, "step": 87110 }, { "epoch": 5.9189427911401005, "grad_norm": 1.20729398727417, "learning_rate": 0.0002603869411604838, "loss": 3.3653, "step": 87115 }, { "epoch": 5.9192825112107625, "grad_norm": 1.2129778861999512, "learning_rate": 0.000260344476151651, "loss": 3.1571, "step": 87120 }, { "epoch": 5.919622231281424, "grad_norm": 1.1555192470550537, "learning_rate": 0.0002603020111428183, "loss": 3.3984, "step": 87125 }, { "epoch": 5.919961951352086, "grad_norm": 1.2887200117111206, "learning_rate": 0.0002602595461339856, "loss": 2.9577, "step": 87130 }, { "epoch": 5.920301671422748, "grad_norm": 1.2938166856765747, "learning_rate": 0.00026021708112515285, "loss": 3.479, "step": 87135 }, { "epoch": 5.920641391493409, "grad_norm": 1.1341288089752197, "learning_rate": 0.00026017461611632013, "loss": 3.328, "step": 87140 }, { "epoch": 5.920981111564071, "grad_norm": 1.333570957183838, "learning_rate": 0.00026013215110748747, "loss": 3.4208, "step": 87145 }, { "epoch": 5.921320831634733, "grad_norm": 1.2425905466079712, "learning_rate": 0.0002600896860986547, "loss": 3.3875, "step": 87150 }, { "epoch": 5.921660551705394, "grad_norm": 1.1862545013427734, "learning_rate": 0.00026004722108982197, "loss": 3.1751, "step": 87155 }, { "epoch": 5.9220002717760565, "grad_norm": 1.2613565921783447, "learning_rate": 0.00026000475608098925, "loss": 3.2413, "step": 87160 }, { "epoch": 5.9223399918467186, "grad_norm": 1.5057376623153687, "learning_rate": 0.0002599622910721566, "loss": 3.6211, "step": 87165 }, { "epoch": 5.92267971191738, "grad_norm": 1.4110039472579956, "learning_rate": 0.0002599198260633238, "loss": 3.4545, "step": 87170 }, { "epoch": 5.923019431988042, "grad_norm": 1.1898540258407593, "learning_rate": 0.0002598773610544911, "loss": 3.2674, "step": 87175 }, { "epoch": 5.923359152058704, "grad_norm": 1.2529921531677246, "learning_rate": 0.00025983489604565843, "loss": 3.4165, "step": 87180 }, { "epoch": 5.923698872129365, "grad_norm": 1.0438036918640137, "learning_rate": 0.00025979243103682565, "loss": 3.3819, "step": 87185 }, { "epoch": 5.924038592200027, "grad_norm": 1.1073429584503174, "learning_rate": 0.00025974996602799293, "loss": 3.3521, "step": 87190 }, { "epoch": 5.924378312270689, "grad_norm": 1.2115877866744995, "learning_rate": 0.00025970750101916027, "loss": 3.5613, "step": 87195 }, { "epoch": 5.92471803234135, "grad_norm": 0.966134250164032, "learning_rate": 0.0002596650360103275, "loss": 3.6083, "step": 87200 }, { "epoch": 5.9250577524120125, "grad_norm": 1.1099838018417358, "learning_rate": 0.0002596225710014948, "loss": 3.2302, "step": 87205 }, { "epoch": 5.925397472482675, "grad_norm": 1.4939569234848022, "learning_rate": 0.00025958010599266205, "loss": 3.188, "step": 87210 }, { "epoch": 5.925737192553336, "grad_norm": 1.3254591226577759, "learning_rate": 0.00025953764098382933, "loss": 3.4075, "step": 87215 }, { "epoch": 5.926076912623998, "grad_norm": 1.2506036758422852, "learning_rate": 0.0002594951759749966, "loss": 3.2775, "step": 87220 }, { "epoch": 5.92641663269466, "grad_norm": 1.3500961065292358, "learning_rate": 0.0002594527109661639, "loss": 3.2361, "step": 87225 }, { "epoch": 5.926756352765321, "grad_norm": 1.5048292875289917, "learning_rate": 0.0002594102459573312, "loss": 3.2968, "step": 87230 }, { "epoch": 5.927096072835983, "grad_norm": 1.0130616426467896, "learning_rate": 0.00025936778094849845, "loss": 3.5749, "step": 87235 }, { "epoch": 5.927435792906645, "grad_norm": 1.0852121114730835, "learning_rate": 0.00025932531593966573, "loss": 3.2499, "step": 87240 }, { "epoch": 5.927775512977306, "grad_norm": 1.1243044137954712, "learning_rate": 0.00025928285093083296, "loss": 2.9516, "step": 87245 }, { "epoch": 5.9281152330479685, "grad_norm": 1.2874841690063477, "learning_rate": 0.0002592403859220003, "loss": 3.3974, "step": 87250 }, { "epoch": 5.928454953118631, "grad_norm": 1.4279688596725464, "learning_rate": 0.0002591979209131676, "loss": 3.1904, "step": 87255 }, { "epoch": 5.928794673189292, "grad_norm": 1.58095383644104, "learning_rate": 0.0002591554559043348, "loss": 3.3969, "step": 87260 }, { "epoch": 5.929134393259954, "grad_norm": 1.6702555418014526, "learning_rate": 0.00025911299089550213, "loss": 3.17, "step": 87265 }, { "epoch": 5.929474113330616, "grad_norm": 1.2608489990234375, "learning_rate": 0.0002590705258866694, "loss": 3.5223, "step": 87270 }, { "epoch": 5.929813833401277, "grad_norm": 1.319720983505249, "learning_rate": 0.00025902806087783664, "loss": 3.1966, "step": 87275 }, { "epoch": 5.930153553471939, "grad_norm": 1.2010036706924438, "learning_rate": 0.0002589855958690039, "loss": 3.1614, "step": 87280 }, { "epoch": 5.930493273542601, "grad_norm": 1.4036861658096313, "learning_rate": 0.00025894313086017125, "loss": 3.2903, "step": 87285 }, { "epoch": 5.930832993613262, "grad_norm": 1.1478193998336792, "learning_rate": 0.0002589006658513385, "loss": 3.3241, "step": 87290 }, { "epoch": 5.9311727136839245, "grad_norm": 1.1665178537368774, "learning_rate": 0.00025885820084250576, "loss": 3.3572, "step": 87295 }, { "epoch": 5.931512433754587, "grad_norm": 1.3824046850204468, "learning_rate": 0.0002588157358336731, "loss": 3.2939, "step": 87300 }, { "epoch": 5.931852153825248, "grad_norm": 1.3703444004058838, "learning_rate": 0.0002587732708248403, "loss": 3.3677, "step": 87305 }, { "epoch": 5.93219187389591, "grad_norm": 1.3543815612792969, "learning_rate": 0.0002587308058160076, "loss": 3.3138, "step": 87310 }, { "epoch": 5.932531593966572, "grad_norm": 1.2381829023361206, "learning_rate": 0.0002586883408071749, "loss": 3.3792, "step": 87315 }, { "epoch": 5.932871314037233, "grad_norm": 1.0028468370437622, "learning_rate": 0.00025864587579834216, "loss": 3.3192, "step": 87320 }, { "epoch": 5.933211034107895, "grad_norm": 1.1359076499938965, "learning_rate": 0.00025860341078950944, "loss": 3.3337, "step": 87325 }, { "epoch": 5.933550754178556, "grad_norm": 1.414832592010498, "learning_rate": 0.0002585609457806767, "loss": 3.2993, "step": 87330 }, { "epoch": 5.9338904742492184, "grad_norm": 1.45749831199646, "learning_rate": 0.00025851848077184406, "loss": 3.402, "step": 87335 }, { "epoch": 5.9342301943198805, "grad_norm": 1.1544420719146729, "learning_rate": 0.0002584760157630113, "loss": 3.6193, "step": 87340 }, { "epoch": 5.934569914390542, "grad_norm": 1.1605236530303955, "learning_rate": 0.00025843355075417856, "loss": 3.2057, "step": 87345 }, { "epoch": 5.934909634461204, "grad_norm": 1.211429238319397, "learning_rate": 0.00025839108574534584, "loss": 3.3002, "step": 87350 }, { "epoch": 5.935249354531866, "grad_norm": 1.2099745273590088, "learning_rate": 0.0002583486207365131, "loss": 3.324, "step": 87355 }, { "epoch": 5.935589074602527, "grad_norm": 1.2893470525741577, "learning_rate": 0.0002583061557276804, "loss": 3.3166, "step": 87360 }, { "epoch": 5.935928794673189, "grad_norm": 1.1420894861221313, "learning_rate": 0.0002582636907188477, "loss": 3.1771, "step": 87365 }, { "epoch": 5.936268514743851, "grad_norm": 1.0278074741363525, "learning_rate": 0.00025822122571001496, "loss": 3.3017, "step": 87370 }, { "epoch": 5.936608234814512, "grad_norm": 1.3235005140304565, "learning_rate": 0.00025817876070118224, "loss": 3.0233, "step": 87375 }, { "epoch": 5.9369479548851745, "grad_norm": 1.2281615734100342, "learning_rate": 0.0002581362956923495, "loss": 3.2548, "step": 87380 }, { "epoch": 5.9372876749558365, "grad_norm": 1.42672598361969, "learning_rate": 0.00025809383068351675, "loss": 3.3597, "step": 87385 }, { "epoch": 5.937627395026498, "grad_norm": 1.219306230545044, "learning_rate": 0.0002580513656746841, "loss": 3.1836, "step": 87390 }, { "epoch": 5.93796711509716, "grad_norm": 1.5306596755981445, "learning_rate": 0.00025800890066585136, "loss": 2.9788, "step": 87395 }, { "epoch": 5.938306835167822, "grad_norm": 1.2525427341461182, "learning_rate": 0.0002579664356570186, "loss": 2.9353, "step": 87400 }, { "epoch": 5.938646555238483, "grad_norm": 1.6332683563232422, "learning_rate": 0.0002579239706481859, "loss": 3.2733, "step": 87405 }, { "epoch": 5.938986275309145, "grad_norm": 1.4505181312561035, "learning_rate": 0.0002578815056393532, "loss": 3.4113, "step": 87410 }, { "epoch": 5.939325995379807, "grad_norm": 1.2609089612960815, "learning_rate": 0.00025783904063052043, "loss": 3.1061, "step": 87415 }, { "epoch": 5.939665715450468, "grad_norm": 1.1065466403961182, "learning_rate": 0.0002577965756216877, "loss": 3.1812, "step": 87420 }, { "epoch": 5.9400054355211305, "grad_norm": 1.1220563650131226, "learning_rate": 0.00025775411061285504, "loss": 3.0974, "step": 87425 }, { "epoch": 5.9403451555917925, "grad_norm": 0.9070210456848145, "learning_rate": 0.00025771164560402227, "loss": 3.1304, "step": 87430 }, { "epoch": 5.940684875662454, "grad_norm": 1.1875494718551636, "learning_rate": 0.00025766918059518955, "loss": 3.3957, "step": 87435 }, { "epoch": 5.941024595733116, "grad_norm": 1.3101788759231567, "learning_rate": 0.0002576267155863569, "loss": 3.2697, "step": 87440 }, { "epoch": 5.941364315803778, "grad_norm": 1.2286944389343262, "learning_rate": 0.0002575842505775241, "loss": 3.1657, "step": 87445 }, { "epoch": 5.941704035874439, "grad_norm": 1.254576563835144, "learning_rate": 0.0002575417855686914, "loss": 3.3665, "step": 87450 }, { "epoch": 5.942043755945101, "grad_norm": 1.3030364513397217, "learning_rate": 0.0002574993205598587, "loss": 3.3125, "step": 87455 }, { "epoch": 5.942383476015763, "grad_norm": 1.291094422340393, "learning_rate": 0.00025745685555102595, "loss": 3.3816, "step": 87460 }, { "epoch": 5.942723196086424, "grad_norm": 1.5668666362762451, "learning_rate": 0.00025741439054219323, "loss": 3.1708, "step": 87465 }, { "epoch": 5.9430629161570865, "grad_norm": 1.0253931283950806, "learning_rate": 0.0002573719255333605, "loss": 3.1525, "step": 87470 }, { "epoch": 5.943402636227749, "grad_norm": 1.3481851816177368, "learning_rate": 0.0002573294605245278, "loss": 3.4461, "step": 87475 }, { "epoch": 5.94374235629841, "grad_norm": 1.2013590335845947, "learning_rate": 0.00025728699551569507, "loss": 3.4328, "step": 87480 }, { "epoch": 5.944082076369072, "grad_norm": 1.0534913539886475, "learning_rate": 0.00025724453050686235, "loss": 3.4003, "step": 87485 }, { "epoch": 5.944421796439734, "grad_norm": 1.2600501775741577, "learning_rate": 0.00025720206549802963, "loss": 3.293, "step": 87490 }, { "epoch": 5.944761516510395, "grad_norm": 1.2534154653549194, "learning_rate": 0.0002571596004891969, "loss": 3.1095, "step": 87495 }, { "epoch": 5.945101236581057, "grad_norm": 1.3571975231170654, "learning_rate": 0.0002571171354803642, "loss": 3.3693, "step": 87500 }, { "epoch": 5.945440956651719, "grad_norm": 1.1229366064071655, "learning_rate": 0.00025707467047153147, "loss": 3.3724, "step": 87505 }, { "epoch": 5.94578067672238, "grad_norm": 1.1698120832443237, "learning_rate": 0.00025703220546269875, "loss": 3.1976, "step": 87510 }, { "epoch": 5.9461203967930425, "grad_norm": 1.1455942392349243, "learning_rate": 0.00025698974045386603, "loss": 3.4817, "step": 87515 }, { "epoch": 5.946460116863705, "grad_norm": 1.4985570907592773, "learning_rate": 0.0002569472754450333, "loss": 3.4484, "step": 87520 }, { "epoch": 5.946799836934366, "grad_norm": 1.155990719795227, "learning_rate": 0.0002569048104362006, "loss": 3.4452, "step": 87525 }, { "epoch": 5.947139557005028, "grad_norm": 1.3638114929199219, "learning_rate": 0.00025686234542736787, "loss": 2.9843, "step": 87530 }, { "epoch": 5.94747927707569, "grad_norm": 1.2832309007644653, "learning_rate": 0.00025681988041853515, "loss": 3.4567, "step": 87535 }, { "epoch": 5.947818997146351, "grad_norm": 1.1345969438552856, "learning_rate": 0.0002567774154097024, "loss": 3.2605, "step": 87540 }, { "epoch": 5.948158717217013, "grad_norm": 1.357528567314148, "learning_rate": 0.0002567349504008697, "loss": 3.2308, "step": 87545 }, { "epoch": 5.948498437287675, "grad_norm": 1.2109618186950684, "learning_rate": 0.000256692485392037, "loss": 3.1457, "step": 87550 }, { "epoch": 5.948838157358336, "grad_norm": 1.5060609579086304, "learning_rate": 0.0002566500203832042, "loss": 3.6033, "step": 87555 }, { "epoch": 5.9491778774289985, "grad_norm": 0.9987205862998962, "learning_rate": 0.00025660755537437155, "loss": 3.0788, "step": 87560 }, { "epoch": 5.949517597499661, "grad_norm": 1.2095396518707275, "learning_rate": 0.00025656509036553883, "loss": 3.2074, "step": 87565 }, { "epoch": 5.949857317570322, "grad_norm": 1.2040565013885498, "learning_rate": 0.00025652262535670606, "loss": 3.2766, "step": 87570 }, { "epoch": 5.950197037640984, "grad_norm": 1.1890230178833008, "learning_rate": 0.00025648016034787334, "loss": 3.1468, "step": 87575 }, { "epoch": 5.950536757711646, "grad_norm": 1.538985252380371, "learning_rate": 0.00025643769533904067, "loss": 3.3526, "step": 87580 }, { "epoch": 5.950876477782307, "grad_norm": 1.1176785230636597, "learning_rate": 0.0002563952303302079, "loss": 3.4043, "step": 87585 }, { "epoch": 5.951216197852969, "grad_norm": 1.4357047080993652, "learning_rate": 0.0002563527653213752, "loss": 3.162, "step": 87590 }, { "epoch": 5.951555917923631, "grad_norm": 1.437928557395935, "learning_rate": 0.0002563103003125425, "loss": 3.4298, "step": 87595 }, { "epoch": 5.951895637994292, "grad_norm": 1.1016428470611572, "learning_rate": 0.00025626783530370974, "loss": 3.2654, "step": 87600 }, { "epoch": 5.9522353580649545, "grad_norm": 1.5447638034820557, "learning_rate": 0.000256225370294877, "loss": 3.3876, "step": 87605 }, { "epoch": 5.952575078135617, "grad_norm": 1.2548848390579224, "learning_rate": 0.0002561829052860443, "loss": 3.607, "step": 87610 }, { "epoch": 5.952914798206278, "grad_norm": 1.196578860282898, "learning_rate": 0.0002561404402772116, "loss": 3.5129, "step": 87615 }, { "epoch": 5.95325451827694, "grad_norm": 1.297819972038269, "learning_rate": 0.00025609797526837886, "loss": 3.2671, "step": 87620 }, { "epoch": 5.953594238347602, "grad_norm": 1.5047502517700195, "learning_rate": 0.00025605551025954614, "loss": 3.1413, "step": 87625 }, { "epoch": 5.953933958418263, "grad_norm": 1.2113592624664307, "learning_rate": 0.0002560130452507134, "loss": 3.4928, "step": 87630 }, { "epoch": 5.954273678488925, "grad_norm": 1.2708890438079834, "learning_rate": 0.0002559705802418807, "loss": 3.2429, "step": 87635 }, { "epoch": 5.954613398559587, "grad_norm": 1.1128246784210205, "learning_rate": 0.000255928115233048, "loss": 3.1576, "step": 87640 }, { "epoch": 5.9549531186302485, "grad_norm": 1.1458922624588013, "learning_rate": 0.0002558856502242152, "loss": 3.2363, "step": 87645 }, { "epoch": 5.9552928387009105, "grad_norm": 1.122806429862976, "learning_rate": 0.00025584318521538254, "loss": 3.4025, "step": 87650 }, { "epoch": 5.955632558771573, "grad_norm": 1.4947333335876465, "learning_rate": 0.0002558007202065498, "loss": 3.3394, "step": 87655 }, { "epoch": 5.955972278842234, "grad_norm": 1.1190940141677856, "learning_rate": 0.00025575825519771704, "loss": 3.287, "step": 87660 }, { "epoch": 5.956311998912896, "grad_norm": 1.7010973691940308, "learning_rate": 0.0002557157901888844, "loss": 3.3552, "step": 87665 }, { "epoch": 5.956651718983558, "grad_norm": 1.3227362632751465, "learning_rate": 0.00025567332518005166, "loss": 3.2801, "step": 87670 }, { "epoch": 5.956991439054219, "grad_norm": 1.3716832399368286, "learning_rate": 0.00025563086017121894, "loss": 3.2947, "step": 87675 }, { "epoch": 5.957331159124881, "grad_norm": 1.255183219909668, "learning_rate": 0.00025558839516238616, "loss": 3.4935, "step": 87680 }, { "epoch": 5.957670879195543, "grad_norm": 1.3097172975540161, "learning_rate": 0.0002555459301535535, "loss": 3.3677, "step": 87685 }, { "epoch": 5.9580105992662045, "grad_norm": 1.0498254299163818, "learning_rate": 0.0002555034651447208, "loss": 3.2819, "step": 87690 }, { "epoch": 5.9583503193368665, "grad_norm": 1.213236689567566, "learning_rate": 0.000255461000135888, "loss": 3.2203, "step": 87695 }, { "epoch": 5.958690039407529, "grad_norm": 1.310819387435913, "learning_rate": 0.00025541853512705534, "loss": 3.5831, "step": 87700 }, { "epoch": 5.95902975947819, "grad_norm": 1.0681781768798828, "learning_rate": 0.0002553760701182226, "loss": 3.4024, "step": 87705 }, { "epoch": 5.959369479548852, "grad_norm": 1.3590668439865112, "learning_rate": 0.00025533360510938984, "loss": 3.3081, "step": 87710 }, { "epoch": 5.959709199619514, "grad_norm": 1.1658504009246826, "learning_rate": 0.0002552911401005571, "loss": 3.2878, "step": 87715 }, { "epoch": 5.960048919690175, "grad_norm": 1.125414490699768, "learning_rate": 0.00025524867509172446, "loss": 3.3478, "step": 87720 }, { "epoch": 5.960388639760837, "grad_norm": 1.1389353275299072, "learning_rate": 0.0002552062100828917, "loss": 3.3823, "step": 87725 }, { "epoch": 5.960728359831499, "grad_norm": 1.2282081842422485, "learning_rate": 0.00025516374507405896, "loss": 3.6804, "step": 87730 }, { "epoch": 5.9610680799021605, "grad_norm": 1.1574673652648926, "learning_rate": 0.0002551212800652263, "loss": 3.3951, "step": 87735 }, { "epoch": 5.9614077999728226, "grad_norm": 1.3629621267318726, "learning_rate": 0.0002550788150563935, "loss": 3.2151, "step": 87740 }, { "epoch": 5.961747520043485, "grad_norm": 1.6217278242111206, "learning_rate": 0.0002550363500475608, "loss": 2.9661, "step": 87745 }, { "epoch": 5.962087240114146, "grad_norm": 1.2991799116134644, "learning_rate": 0.00025499388503872814, "loss": 3.7444, "step": 87750 }, { "epoch": 5.962426960184808, "grad_norm": 1.1217458248138428, "learning_rate": 0.00025495142002989537, "loss": 3.2955, "step": 87755 }, { "epoch": 5.96276668025547, "grad_norm": 1.1822322607040405, "learning_rate": 0.00025490895502106265, "loss": 3.3547, "step": 87760 }, { "epoch": 5.963106400326131, "grad_norm": 1.1940722465515137, "learning_rate": 0.0002548664900122299, "loss": 3.6774, "step": 87765 }, { "epoch": 5.963446120396793, "grad_norm": 1.44443678855896, "learning_rate": 0.0002548240250033972, "loss": 3.4928, "step": 87770 }, { "epoch": 5.963785840467455, "grad_norm": 1.3533661365509033, "learning_rate": 0.0002547815599945645, "loss": 3.2227, "step": 87775 }, { "epoch": 5.9641255605381165, "grad_norm": 1.1213637590408325, "learning_rate": 0.00025473909498573177, "loss": 3.2857, "step": 87780 }, { "epoch": 5.964465280608779, "grad_norm": 1.3715170621871948, "learning_rate": 0.00025469662997689905, "loss": 3.4947, "step": 87785 }, { "epoch": 5.964805000679441, "grad_norm": 1.4657257795333862, "learning_rate": 0.0002546541649680663, "loss": 3.4122, "step": 87790 }, { "epoch": 5.965144720750102, "grad_norm": 1.097353458404541, "learning_rate": 0.0002546116999592336, "loss": 2.9363, "step": 87795 }, { "epoch": 5.965484440820764, "grad_norm": 1.3649622201919556, "learning_rate": 0.00025456923495040083, "loss": 3.4233, "step": 87800 }, { "epoch": 5.965824160891425, "grad_norm": 1.7865722179412842, "learning_rate": 0.00025452676994156817, "loss": 3.1113, "step": 87805 }, { "epoch": 5.966163880962087, "grad_norm": 1.1936763525009155, "learning_rate": 0.00025448430493273545, "loss": 3.5352, "step": 87810 }, { "epoch": 5.966503601032749, "grad_norm": 1.4508026838302612, "learning_rate": 0.00025444183992390267, "loss": 3.657, "step": 87815 }, { "epoch": 5.96684332110341, "grad_norm": 1.0918923616409302, "learning_rate": 0.00025439937491507, "loss": 3.2064, "step": 87820 }, { "epoch": 5.9671830411740725, "grad_norm": 1.1064289808273315, "learning_rate": 0.0002543569099062373, "loss": 3.3492, "step": 87825 }, { "epoch": 5.967522761244735, "grad_norm": 1.2992010116577148, "learning_rate": 0.0002543144448974045, "loss": 3.5567, "step": 87830 }, { "epoch": 5.967862481315396, "grad_norm": 1.3670169115066528, "learning_rate": 0.0002542719798885718, "loss": 3.3109, "step": 87835 }, { "epoch": 5.968202201386058, "grad_norm": 1.3395788669586182, "learning_rate": 0.0002542295148797391, "loss": 3.2972, "step": 87840 }, { "epoch": 5.96854192145672, "grad_norm": 1.110848307609558, "learning_rate": 0.0002541870498709064, "loss": 3.3899, "step": 87845 }, { "epoch": 5.968881641527381, "grad_norm": 1.3180155754089355, "learning_rate": 0.00025414458486207363, "loss": 3.2738, "step": 87850 }, { "epoch": 5.969221361598043, "grad_norm": 1.3395564556121826, "learning_rate": 0.00025410211985324097, "loss": 3.4645, "step": 87855 }, { "epoch": 5.969561081668705, "grad_norm": 1.3388001918792725, "learning_rate": 0.00025405965484440825, "loss": 3.3072, "step": 87860 }, { "epoch": 5.969900801739366, "grad_norm": 1.1119173765182495, "learning_rate": 0.00025401718983557547, "loss": 3.3885, "step": 87865 }, { "epoch": 5.9702405218100285, "grad_norm": 1.0980387926101685, "learning_rate": 0.00025397472482674275, "loss": 3.3316, "step": 87870 }, { "epoch": 5.970580241880691, "grad_norm": 1.3702783584594727, "learning_rate": 0.0002539322598179101, "loss": 3.2125, "step": 87875 }, { "epoch": 5.970919961951352, "grad_norm": 1.297816276550293, "learning_rate": 0.0002538897948090773, "loss": 3.2605, "step": 87880 }, { "epoch": 5.971259682022014, "grad_norm": 1.1437597274780273, "learning_rate": 0.0002538473298002446, "loss": 3.4688, "step": 87885 }, { "epoch": 5.971599402092676, "grad_norm": 1.54869544506073, "learning_rate": 0.00025380486479141193, "loss": 3.1078, "step": 87890 }, { "epoch": 5.971939122163337, "grad_norm": 1.173815131187439, "learning_rate": 0.00025376239978257915, "loss": 3.399, "step": 87895 }, { "epoch": 5.972278842233999, "grad_norm": 1.224195122718811, "learning_rate": 0.00025371993477374643, "loss": 3.1551, "step": 87900 }, { "epoch": 5.972618562304661, "grad_norm": 1.0073109865188599, "learning_rate": 0.0002536774697649137, "loss": 3.1201, "step": 87905 }, { "epoch": 5.9729582823753224, "grad_norm": 1.2901055812835693, "learning_rate": 0.000253635004756081, "loss": 3.4577, "step": 87910 }, { "epoch": 5.9732980024459845, "grad_norm": 1.0612558126449585, "learning_rate": 0.0002535925397472483, "loss": 3.3362, "step": 87915 }, { "epoch": 5.973637722516647, "grad_norm": 1.317739725112915, "learning_rate": 0.00025355007473841555, "loss": 3.3054, "step": 87920 }, { "epoch": 5.973977442587308, "grad_norm": 1.3014165163040161, "learning_rate": 0.00025350760972958283, "loss": 3.2265, "step": 87925 }, { "epoch": 5.97431716265797, "grad_norm": 1.1760262250900269, "learning_rate": 0.0002534651447207501, "loss": 3.418, "step": 87930 }, { "epoch": 5.974656882728632, "grad_norm": 1.3110287189483643, "learning_rate": 0.0002534226797119174, "loss": 3.3148, "step": 87935 }, { "epoch": 5.974996602799293, "grad_norm": 1.150091528892517, "learning_rate": 0.0002533802147030846, "loss": 3.0971, "step": 87940 }, { "epoch": 5.975336322869955, "grad_norm": 1.051282525062561, "learning_rate": 0.00025333774969425195, "loss": 3.3705, "step": 87945 }, { "epoch": 5.975676042940617, "grad_norm": 1.1315213441848755, "learning_rate": 0.00025329528468541923, "loss": 3.1474, "step": 87950 }, { "epoch": 5.9760157630112785, "grad_norm": 1.1658357381820679, "learning_rate": 0.00025325281967658646, "loss": 3.1298, "step": 87955 }, { "epoch": 5.9763554830819405, "grad_norm": 1.4127230644226074, "learning_rate": 0.0002532103546677538, "loss": 3.4431, "step": 87960 }, { "epoch": 5.976695203152603, "grad_norm": 1.1704074144363403, "learning_rate": 0.0002531678896589211, "loss": 3.176, "step": 87965 }, { "epoch": 5.977034923223264, "grad_norm": 1.1115272045135498, "learning_rate": 0.0002531254246500883, "loss": 3.3993, "step": 87970 }, { "epoch": 5.977374643293926, "grad_norm": 1.6588718891143799, "learning_rate": 0.0002530829596412556, "loss": 3.5397, "step": 87975 }, { "epoch": 5.977714363364588, "grad_norm": 1.283518671989441, "learning_rate": 0.0002530404946324229, "loss": 3.3494, "step": 87980 }, { "epoch": 5.978054083435249, "grad_norm": 0.9633604884147644, "learning_rate": 0.00025299802962359014, "loss": 3.5017, "step": 87985 }, { "epoch": 5.978393803505911, "grad_norm": 1.2759660482406616, "learning_rate": 0.0002529555646147574, "loss": 3.1438, "step": 87990 }, { "epoch": 5.978733523576573, "grad_norm": 1.253831148147583, "learning_rate": 0.00025291309960592475, "loss": 3.4965, "step": 87995 }, { "epoch": 5.9790732436472345, "grad_norm": 1.3738956451416016, "learning_rate": 0.000252870634597092, "loss": 3.2776, "step": 88000 }, { "epoch": 5.9794129637178965, "grad_norm": 1.270563006401062, "learning_rate": 0.00025282816958825926, "loss": 3.1838, "step": 88005 }, { "epoch": 5.979752683788558, "grad_norm": 1.1031417846679688, "learning_rate": 0.00025278570457942654, "loss": 3.0748, "step": 88010 }, { "epoch": 5.98009240385922, "grad_norm": 1.4998890161514282, "learning_rate": 0.0002527432395705939, "loss": 3.3124, "step": 88015 }, { "epoch": 5.980432123929882, "grad_norm": 1.1683610677719116, "learning_rate": 0.0002527007745617611, "loss": 3.4482, "step": 88020 }, { "epoch": 5.980771844000543, "grad_norm": 1.1524875164031982, "learning_rate": 0.0002526583095529284, "loss": 3.2092, "step": 88025 }, { "epoch": 5.981111564071205, "grad_norm": 1.3895951509475708, "learning_rate": 0.0002526158445440957, "loss": 3.4515, "step": 88030 }, { "epoch": 5.981451284141867, "grad_norm": 1.3338336944580078, "learning_rate": 0.00025257337953526294, "loss": 3.4874, "step": 88035 }, { "epoch": 5.981791004212528, "grad_norm": 1.6464558839797974, "learning_rate": 0.0002525309145264302, "loss": 3.4175, "step": 88040 }, { "epoch": 5.9821307242831905, "grad_norm": 1.162819743156433, "learning_rate": 0.00025248844951759756, "loss": 3.2583, "step": 88045 }, { "epoch": 5.982470444353853, "grad_norm": 1.2227791547775269, "learning_rate": 0.0002524459845087648, "loss": 3.3226, "step": 88050 }, { "epoch": 5.982810164424514, "grad_norm": 1.205835223197937, "learning_rate": 0.00025240351949993206, "loss": 3.2569, "step": 88055 }, { "epoch": 5.983149884495176, "grad_norm": 1.1183290481567383, "learning_rate": 0.00025236105449109934, "loss": 3.4138, "step": 88060 }, { "epoch": 5.983489604565838, "grad_norm": 1.1897763013839722, "learning_rate": 0.0002523185894822666, "loss": 3.4022, "step": 88065 }, { "epoch": 5.983829324636499, "grad_norm": 1.3194082975387573, "learning_rate": 0.0002522761244734339, "loss": 3.5485, "step": 88070 }, { "epoch": 5.984169044707161, "grad_norm": 1.1175968647003174, "learning_rate": 0.0002522336594646012, "loss": 3.0376, "step": 88075 }, { "epoch": 5.984508764777823, "grad_norm": 1.7377127408981323, "learning_rate": 0.00025219119445576846, "loss": 3.4578, "step": 88080 }, { "epoch": 5.984848484848484, "grad_norm": 1.4891215562820435, "learning_rate": 0.00025214872944693574, "loss": 2.9854, "step": 88085 }, { "epoch": 5.9851882049191465, "grad_norm": 1.1546529531478882, "learning_rate": 0.000252106264438103, "loss": 3.4129, "step": 88090 }, { "epoch": 5.985527924989809, "grad_norm": 1.2380222082138062, "learning_rate": 0.00025206379942927025, "loss": 3.0336, "step": 88095 }, { "epoch": 5.98586764506047, "grad_norm": 1.3897780179977417, "learning_rate": 0.0002520213344204376, "loss": 3.4233, "step": 88100 }, { "epoch": 5.986207365131132, "grad_norm": 0.9697067141532898, "learning_rate": 0.00025197886941160486, "loss": 3.2872, "step": 88105 }, { "epoch": 5.986547085201794, "grad_norm": 1.2481434345245361, "learning_rate": 0.0002519364044027721, "loss": 3.4994, "step": 88110 }, { "epoch": 5.986886805272455, "grad_norm": 1.0484619140625, "learning_rate": 0.0002518939393939394, "loss": 3.5197, "step": 88115 }, { "epoch": 5.987226525343117, "grad_norm": 1.0611562728881836, "learning_rate": 0.0002518514743851067, "loss": 3.2354, "step": 88120 }, { "epoch": 5.987566245413779, "grad_norm": 1.2248317003250122, "learning_rate": 0.00025180900937627393, "loss": 3.1747, "step": 88125 }, { "epoch": 5.98790596548444, "grad_norm": 1.3465690612792969, "learning_rate": 0.0002517665443674412, "loss": 3.0172, "step": 88130 }, { "epoch": 5.9882456855551025, "grad_norm": 1.2119574546813965, "learning_rate": 0.00025172407935860854, "loss": 3.0763, "step": 88135 }, { "epoch": 5.988585405625765, "grad_norm": 1.3603894710540771, "learning_rate": 0.00025168161434977577, "loss": 3.2541, "step": 88140 }, { "epoch": 5.988925125696426, "grad_norm": 1.1568117141723633, "learning_rate": 0.00025163914934094305, "loss": 3.1127, "step": 88145 }, { "epoch": 5.989264845767088, "grad_norm": 1.3301845788955688, "learning_rate": 0.0002515966843321104, "loss": 3.1901, "step": 88150 }, { "epoch": 5.98960456583775, "grad_norm": 1.2590808868408203, "learning_rate": 0.0002515542193232776, "loss": 3.3717, "step": 88155 }, { "epoch": 5.989944285908411, "grad_norm": 1.3459135293960571, "learning_rate": 0.0002515117543144449, "loss": 3.2785, "step": 88160 }, { "epoch": 5.990284005979073, "grad_norm": 1.3422529697418213, "learning_rate": 0.00025146928930561217, "loss": 3.0831, "step": 88165 }, { "epoch": 5.990623726049735, "grad_norm": 1.4582725763320923, "learning_rate": 0.00025142682429677945, "loss": 3.6213, "step": 88170 }, { "epoch": 5.990963446120396, "grad_norm": 1.5478520393371582, "learning_rate": 0.00025138435928794673, "loss": 3.2549, "step": 88175 }, { "epoch": 5.9913031661910585, "grad_norm": 1.7274540662765503, "learning_rate": 0.000251341894279114, "loss": 3.5517, "step": 88180 }, { "epoch": 5.991642886261721, "grad_norm": 1.3320192098617554, "learning_rate": 0.00025129942927028134, "loss": 3.2928, "step": 88185 }, { "epoch": 5.991982606332382, "grad_norm": 1.2586026191711426, "learning_rate": 0.00025125696426144857, "loss": 3.2276, "step": 88190 }, { "epoch": 5.992322326403044, "grad_norm": 1.412428855895996, "learning_rate": 0.00025121449925261585, "loss": 3.0318, "step": 88195 }, { "epoch": 5.992662046473706, "grad_norm": 1.1652624607086182, "learning_rate": 0.00025117203424378313, "loss": 3.4688, "step": 88200 }, { "epoch": 5.993001766544367, "grad_norm": 1.110079050064087, "learning_rate": 0.0002511295692349504, "loss": 3.1807, "step": 88205 }, { "epoch": 5.993341486615029, "grad_norm": 1.1577515602111816, "learning_rate": 0.0002510871042261177, "loss": 3.3266, "step": 88210 }, { "epoch": 5.993681206685691, "grad_norm": 1.156934142112732, "learning_rate": 0.00025104463921728497, "loss": 3.094, "step": 88215 }, { "epoch": 5.9940209267563525, "grad_norm": 1.0921847820281982, "learning_rate": 0.00025100217420845225, "loss": 3.2148, "step": 88220 }, { "epoch": 5.9943606468270145, "grad_norm": 1.166025996208191, "learning_rate": 0.00025095970919961953, "loss": 3.4332, "step": 88225 }, { "epoch": 5.994700366897677, "grad_norm": 1.4755433797836304, "learning_rate": 0.0002509172441907868, "loss": 3.5018, "step": 88230 }, { "epoch": 5.995040086968338, "grad_norm": 1.5851027965545654, "learning_rate": 0.00025087477918195404, "loss": 3.5652, "step": 88235 }, { "epoch": 5.995379807039, "grad_norm": 1.2723841667175293, "learning_rate": 0.00025083231417312137, "loss": 3.438, "step": 88240 }, { "epoch": 5.995719527109662, "grad_norm": 1.4610234498977661, "learning_rate": 0.00025078984916428865, "loss": 3.2105, "step": 88245 }, { "epoch": 5.996059247180323, "grad_norm": 1.3721877336502075, "learning_rate": 0.0002507473841554559, "loss": 3.4542, "step": 88250 }, { "epoch": 5.996398967250985, "grad_norm": 1.171910047531128, "learning_rate": 0.0002507049191466232, "loss": 3.1159, "step": 88255 }, { "epoch": 5.996738687321647, "grad_norm": 1.152916431427002, "learning_rate": 0.0002506624541377905, "loss": 3.2335, "step": 88260 }, { "epoch": 5.9970784073923085, "grad_norm": 1.4506855010986328, "learning_rate": 0.0002506199891289577, "loss": 3.4387, "step": 88265 }, { "epoch": 5.9974181274629705, "grad_norm": 1.3970074653625488, "learning_rate": 0.000250577524120125, "loss": 3.1647, "step": 88270 }, { "epoch": 5.997757847533633, "grad_norm": 1.0895073413848877, "learning_rate": 0.00025053505911129233, "loss": 3.1345, "step": 88275 }, { "epoch": 5.998097567604294, "grad_norm": 1.1730953454971313, "learning_rate": 0.00025049259410245956, "loss": 3.4425, "step": 88280 }, { "epoch": 5.998437287674956, "grad_norm": 1.2059727907180786, "learning_rate": 0.00025045012909362684, "loss": 3.3876, "step": 88285 }, { "epoch": 5.998777007745618, "grad_norm": 1.2242447137832642, "learning_rate": 0.00025040766408479417, "loss": 3.3906, "step": 88290 }, { "epoch": 5.999116727816279, "grad_norm": 1.1768255233764648, "learning_rate": 0.0002503651990759614, "loss": 3.2627, "step": 88295 }, { "epoch": 5.999456447886941, "grad_norm": 1.6515408754348755, "learning_rate": 0.0002503227340671287, "loss": 3.4263, "step": 88300 }, { "epoch": 5.999796167957603, "grad_norm": 1.5158902406692505, "learning_rate": 0.000250280269058296, "loss": 3.3502, "step": 88305 }, { "epoch": 6.0, "eval_bertscore": { "f1": 0.8422207021992825, "precision": 0.8439389901609889, "recall": 0.8413375254309978 }, "eval_bleu_4": 0.02093923396756236, "eval_exact_match": 0.00019381723035177828, "eval_loss": 3.356447219848633, "eval_meteor": 0.09137562279064966, "eval_rouge": { "rouge1": 0.13099331864246871, "rouge2": 0.02043444665315017, "rougeL": 0.11323675007816342, "rougeLsum": 0.11335679781908245 }, "eval_runtime": 1751.8602, "eval_samples_per_second": 5.89, "eval_steps_per_second": 0.736, "step": 88308 } ], "logging_steps": 5, "max_steps": 117744, "num_input_tokens_seen": 0, "num_train_epochs": 8, "save_steps": 500, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 4.442687294794629e+17, "train_batch_size": 8, "trial_name": null, "trial_params": null }