| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 3702, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.008103727714748784, |
| "grad_norm": 3.9317815161633276, |
| "learning_rate": 4.851752021563343e-07, |
| "loss": 0.6293, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01620745542949757, |
| "grad_norm": 2.0418755784879634, |
| "learning_rate": 1.0242587601078167e-06, |
| "loss": 0.6102, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.024311183144246355, |
| "grad_norm": 1.3573676592550898, |
| "learning_rate": 1.5633423180592994e-06, |
| "loss": 0.5537, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03241491085899514, |
| "grad_norm": 1.0559835418958263, |
| "learning_rate": 2.1024258760107817e-06, |
| "loss": 0.4928, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04051863857374392, |
| "grad_norm": 1.1231946952797747, |
| "learning_rate": 2.6415094339622644e-06, |
| "loss": 0.4757, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.04862236628849271, |
| "grad_norm": 0.9206334418541732, |
| "learning_rate": 3.1805929919137467e-06, |
| "loss": 0.4503, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.05672609400324149, |
| "grad_norm": 0.7306789030238736, |
| "learning_rate": 3.7196765498652294e-06, |
| "loss": 0.4462, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06482982171799027, |
| "grad_norm": 0.6319281525271221, |
| "learning_rate": 4.258760107816712e-06, |
| "loss": 0.4371, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07293354943273905, |
| "grad_norm": 0.8818018224803541, |
| "learning_rate": 4.797843665768194e-06, |
| "loss": 0.4203, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08103727714748785, |
| "grad_norm": 2.3155217136018322, |
| "learning_rate": 5.336927223719677e-06, |
| "loss": 0.4309, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.08914100486223663, |
| "grad_norm": 0.7366962662256316, |
| "learning_rate": 5.8760107816711595e-06, |
| "loss": 0.4134, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.09724473257698542, |
| "grad_norm": 1.3014445246025408, |
| "learning_rate": 6.415094339622642e-06, |
| "loss": 0.4055, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.1053484602917342, |
| "grad_norm": 0.6356998529279267, |
| "learning_rate": 6.954177897574125e-06, |
| "loss": 0.4113, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.11345218800648298, |
| "grad_norm": 0.6612274605066358, |
| "learning_rate": 7.493261455525606e-06, |
| "loss": 0.4064, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.12155591572123177, |
| "grad_norm": 0.7689051780142585, |
| "learning_rate": 8.03234501347709e-06, |
| "loss": 0.4033, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.12965964343598055, |
| "grad_norm": 1.0607666629354113, |
| "learning_rate": 8.571428571428571e-06, |
| "loss": 0.4061, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.13776337115072934, |
| "grad_norm": 0.7783185643172053, |
| "learning_rate": 9.110512129380054e-06, |
| "loss": 0.4089, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.1458670988654781, |
| "grad_norm": 0.7588540625222094, |
| "learning_rate": 9.649595687331537e-06, |
| "loss": 0.3975, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.1539708265802269, |
| "grad_norm": 1.1303368297907246, |
| "learning_rate": 1.018867924528302e-05, |
| "loss": 0.4035, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.1620745542949757, |
| "grad_norm": 0.6741951880065019, |
| "learning_rate": 1.0727762803234503e-05, |
| "loss": 0.3999, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.17017828200972449, |
| "grad_norm": 0.9112923080490107, |
| "learning_rate": 1.1266846361185985e-05, |
| "loss": 0.3895, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.17828200972447325, |
| "grad_norm": 0.6125528001380088, |
| "learning_rate": 1.1805929919137466e-05, |
| "loss": 0.3983, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.18638573743922204, |
| "grad_norm": 0.714919551884383, |
| "learning_rate": 1.234501347708895e-05, |
| "loss": 0.3881, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.19448946515397084, |
| "grad_norm": 0.6079293525218256, |
| "learning_rate": 1.2884097035040433e-05, |
| "loss": 0.3874, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.2025931928687196, |
| "grad_norm": 0.664658131122071, |
| "learning_rate": 1.3423180592991916e-05, |
| "loss": 0.4012, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.2106969205834684, |
| "grad_norm": 0.5125134214430775, |
| "learning_rate": 1.3962264150943397e-05, |
| "loss": 0.3846, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.2188006482982172, |
| "grad_norm": 0.7547282139400072, |
| "learning_rate": 1.4501347708894879e-05, |
| "loss": 0.3871, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.22690437601296595, |
| "grad_norm": 0.7512746325692048, |
| "learning_rate": 1.5040431266846362e-05, |
| "loss": 0.3935, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.23500810372771475, |
| "grad_norm": 0.6266337459533123, |
| "learning_rate": 1.5579514824797845e-05, |
| "loss": 0.4045, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.24311183144246354, |
| "grad_norm": 0.6114987217161486, |
| "learning_rate": 1.6118598382749326e-05, |
| "loss": 0.3957, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.25121555915721233, |
| "grad_norm": 0.7397528168001368, |
| "learning_rate": 1.6657681940700808e-05, |
| "loss": 0.3956, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.2593192868719611, |
| "grad_norm": 0.5583712896018308, |
| "learning_rate": 1.7196765498652293e-05, |
| "loss": 0.3858, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.26742301458670986, |
| "grad_norm": 2.880818229874767, |
| "learning_rate": 1.7735849056603774e-05, |
| "loss": 0.3968, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.2755267423014587, |
| "grad_norm": 0.5662243201059273, |
| "learning_rate": 1.827493261455526e-05, |
| "loss": 0.388, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.28363047001620745, |
| "grad_norm": 0.5243749947639336, |
| "learning_rate": 1.881401617250674e-05, |
| "loss": 0.3882, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.2917341977309562, |
| "grad_norm": 0.617467124575289, |
| "learning_rate": 1.9353099730458222e-05, |
| "loss": 0.3881, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.29983792544570503, |
| "grad_norm": 0.5048328310575497, |
| "learning_rate": 1.9892183288409707e-05, |
| "loss": 0.3959, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3079416531604538, |
| "grad_norm": 0.5738969287135204, |
| "learning_rate": 1.999971535838293e-05, |
| "loss": 0.3967, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.3160453808752026, |
| "grad_norm": 0.5818193729866968, |
| "learning_rate": 1.999855902958531e-05, |
| "loss": 0.3919, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.3241491085899514, |
| "grad_norm": 0.5820678537371953, |
| "learning_rate": 1.999651332628271e-05, |
| "loss": 0.393, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.33225283630470015, |
| "grad_norm": 0.5813573966227891, |
| "learning_rate": 1.9993578430440986e-05, |
| "loss": 0.3992, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.34035656401944897, |
| "grad_norm": 0.5559605824732404, |
| "learning_rate": 1.9989754603119914e-05, |
| "loss": 0.3917, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.34846029173419774, |
| "grad_norm": 0.528966803593413, |
| "learning_rate": 1.9985042184449953e-05, |
| "loss": 0.3919, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.3565640194489465, |
| "grad_norm": 0.6172583160510402, |
| "learning_rate": 1.997944159360201e-05, |
| "loss": 0.3793, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.3646677471636953, |
| "grad_norm": 0.5526432953834083, |
| "learning_rate": 1.997295332875014e-05, |
| "loss": 0.3885, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.3727714748784441, |
| "grad_norm": 0.7295459300561044, |
| "learning_rate": 1.9965577967027244e-05, |
| "loss": 0.3899, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.38087520259319285, |
| "grad_norm": 0.5028553074346275, |
| "learning_rate": 1.9957316164473717e-05, |
| "loss": 0.3867, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.3889789303079417, |
| "grad_norm": 0.5118627037246366, |
| "learning_rate": 1.9948168655979115e-05, |
| "loss": 0.3872, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.39708265802269044, |
| "grad_norm": 0.5030996867937232, |
| "learning_rate": 1.993813625521677e-05, |
| "loss": 0.3763, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4051863857374392, |
| "grad_norm": 0.6769814230173139, |
| "learning_rate": 1.9927219854571415e-05, |
| "loss": 0.3834, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.413290113452188, |
| "grad_norm": 0.4695176663309419, |
| "learning_rate": 1.9915420425059816e-05, |
| "loss": 0.3778, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.4213938411669368, |
| "grad_norm": 0.6980556842825765, |
| "learning_rate": 1.9902739016244386e-05, |
| "loss": 0.3788, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.42949756888168555, |
| "grad_norm": 0.9136785259862569, |
| "learning_rate": 1.9889176756139838e-05, |
| "loss": 0.387, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.4376012965964344, |
| "grad_norm": 0.6431846972330396, |
| "learning_rate": 1.987473485111285e-05, |
| "loss": 0.3861, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.44570502431118314, |
| "grad_norm": 4.0506189084135125, |
| "learning_rate": 1.9859414585774735e-05, |
| "loss": 0.3885, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.4538087520259319, |
| "grad_norm": 0.503007223612934, |
| "learning_rate": 1.9843217322867204e-05, |
| "loss": 0.3828, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.4619124797406807, |
| "grad_norm": 1.1216627204622915, |
| "learning_rate": 1.9826144503141132e-05, |
| "loss": 0.3872, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.4700162074554295, |
| "grad_norm": 0.5287649700448528, |
| "learning_rate": 1.9808197645228408e-05, |
| "loss": 0.376, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.47811993517017826, |
| "grad_norm": 0.8701265223405853, |
| "learning_rate": 1.9789378345506854e-05, |
| "loss": 0.3747, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.4862236628849271, |
| "grad_norm": 0.55683050719409, |
| "learning_rate": 1.976968827795822e-05, |
| "loss": 0.3904, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.49432739059967584, |
| "grad_norm": 0.6665421175321402, |
| "learning_rate": 1.97491291940193e-05, |
| "loss": 0.3824, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5024311183144247, |
| "grad_norm": 1.4275272713851255, |
| "learning_rate": 1.972770292242611e-05, |
| "loss": 0.3838, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5105348460291734, |
| "grad_norm": 0.45131485278613265, |
| "learning_rate": 1.9705411369051252e-05, |
| "loss": 0.3721, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5186385737439222, |
| "grad_norm": 0.5107521727226195, |
| "learning_rate": 1.9682256516734377e-05, |
| "loss": 0.3855, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.526742301458671, |
| "grad_norm": 0.6278390278606955, |
| "learning_rate": 1.9658240425105792e-05, |
| "loss": 0.375, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.5348460291734197, |
| "grad_norm": 0.4581632394005937, |
| "learning_rate": 1.9633365230403294e-05, |
| "loss": 0.3758, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.5429497568881686, |
| "grad_norm": 0.5567975622988115, |
| "learning_rate": 1.9607633145282117e-05, |
| "loss": 0.3768, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.5510534846029174, |
| "grad_norm": 0.6771278695211652, |
| "learning_rate": 1.958104645861812e-05, |
| "loss": 0.3758, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.5591572123176661, |
| "grad_norm": 0.48077746516463726, |
| "learning_rate": 1.9553607535304214e-05, |
| "loss": 0.3823, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.5672609400324149, |
| "grad_norm": 0.4517126844556044, |
| "learning_rate": 1.9525318816039975e-05, |
| "loss": 0.3763, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.5753646677471637, |
| "grad_norm": 0.4807211148263866, |
| "learning_rate": 1.9496182817114567e-05, |
| "loss": 0.3836, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.5834683954619124, |
| "grad_norm": 0.5502261204263199, |
| "learning_rate": 1.9466202130182898e-05, |
| "loss": 0.3801, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.5915721231766613, |
| "grad_norm": 0.5675413081917593, |
| "learning_rate": 1.9435379422035104e-05, |
| "loss": 0.3708, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.5996758508914101, |
| "grad_norm": 0.6578972439110701, |
| "learning_rate": 1.940371743435934e-05, |
| "loss": 0.374, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6077795786061588, |
| "grad_norm": 0.4769676889929452, |
| "learning_rate": 1.9371218983497888e-05, |
| "loss": 0.378, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.6158833063209076, |
| "grad_norm": 0.42273081946374813, |
| "learning_rate": 1.9337886960196666e-05, |
| "loss": 0.3778, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.6239870340356564, |
| "grad_norm": 0.42624221161598685, |
| "learning_rate": 1.9303724329348072e-05, |
| "loss": 0.3755, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.6320907617504052, |
| "grad_norm": 0.642793603293006, |
| "learning_rate": 1.926873412972728e-05, |
| "loss": 0.3642, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.640194489465154, |
| "grad_norm": 0.4956524298763809, |
| "learning_rate": 1.9232919473721918e-05, |
| "loss": 0.3676, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.6482982171799028, |
| "grad_norm": 0.4654521059254106, |
| "learning_rate": 1.919628354705524e-05, |
| "loss": 0.3767, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.6564019448946515, |
| "grad_norm": 0.4824070792378254, |
| "learning_rate": 1.915882960850274e-05, |
| "loss": 0.3726, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.6645056726094003, |
| "grad_norm": 1.2211147936308497, |
| "learning_rate": 1.9120560989602282e-05, |
| "loss": 0.3773, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.6726094003241491, |
| "grad_norm": 0.4847195686066869, |
| "learning_rate": 1.9081481094357778e-05, |
| "loss": 0.3749, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.6807131280388979, |
| "grad_norm": 14.297698915976696, |
| "learning_rate": 1.904159339893638e-05, |
| "loss": 0.368, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.6888168557536467, |
| "grad_norm": 0.46530970562336593, |
| "learning_rate": 1.9000901451359286e-05, |
| "loss": 0.3709, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.6969205834683955, |
| "grad_norm": 0.5964141915082521, |
| "learning_rate": 1.8959408871186134e-05, |
| "loss": 0.3739, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.7050243111831442, |
| "grad_norm": 0.5763889905076917, |
| "learning_rate": 1.8917119349193054e-05, |
| "loss": 0.3729, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.713128038897893, |
| "grad_norm": 1.1796003590565691, |
| "learning_rate": 1.8874036647044356e-05, |
| "loss": 0.3687, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.7212317666126418, |
| "grad_norm": 0.5712612474372325, |
| "learning_rate": 1.883016459695794e-05, |
| "loss": 0.3713, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.7293354943273906, |
| "grad_norm": 0.5594762253059221, |
| "learning_rate": 1.8785507101364423e-05, |
| "loss": 0.3735, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.7374392220421394, |
| "grad_norm": 0.4738376188490885, |
| "learning_rate": 1.8740068132559995e-05, |
| "loss": 0.3853, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.7455429497568882, |
| "grad_norm": 0.5284483243004581, |
| "learning_rate": 1.869385173235312e-05, |
| "loss": 0.3678, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.7536466774716369, |
| "grad_norm": 0.46182903799794156, |
| "learning_rate": 1.864686201170497e-05, |
| "loss": 0.3665, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.7617504051863857, |
| "grad_norm": 0.4002058778150188, |
| "learning_rate": 1.8599103150363796e-05, |
| "loss": 0.3711, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.7698541329011345, |
| "grad_norm": 0.6183520728928886, |
| "learning_rate": 1.8550579396493108e-05, |
| "loss": 0.3612, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.7779578606158833, |
| "grad_norm": 0.8389295050934313, |
| "learning_rate": 1.8501295066293818e-05, |
| "loss": 0.3759, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.7860615883306321, |
| "grad_norm": 0.4999353892957952, |
| "learning_rate": 1.84512545436203e-05, |
| "loss": 0.3606, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.7941653160453809, |
| "grad_norm": 0.45736033254899117, |
| "learning_rate": 1.8400462279590455e-05, |
| "loss": 0.3702, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.8022690437601296, |
| "grad_norm": 0.5722433571234294, |
| "learning_rate": 1.8348922792189786e-05, |
| "loss": 0.3653, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.8103727714748784, |
| "grad_norm": 0.46919046318848373, |
| "learning_rate": 1.82966406658695e-05, |
| "loss": 0.3625, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.8184764991896273, |
| "grad_norm": 0.44375349667419184, |
| "learning_rate": 1.8243620551138752e-05, |
| "loss": 0.3662, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.826580226904376, |
| "grad_norm": 0.4471263607447999, |
| "learning_rate": 1.8189867164150946e-05, |
| "loss": 0.3654, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.8346839546191248, |
| "grad_norm": 0.501866537642701, |
| "learning_rate": 1.8135385286284263e-05, |
| "loss": 0.3605, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.8427876823338736, |
| "grad_norm": 0.6285621423008637, |
| "learning_rate": 1.8080179763716343e-05, |
| "loss": 0.3653, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.8508914100486223, |
| "grad_norm": 0.407873050803088, |
| "learning_rate": 1.8024255506993203e-05, |
| "loss": 0.3679, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.8589951377633711, |
| "grad_norm": 0.5477630887086236, |
| "learning_rate": 1.796761749059247e-05, |
| "loss": 0.3658, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.86709886547812, |
| "grad_norm": 0.42325808351405364, |
| "learning_rate": 1.791027075248088e-05, |
| "loss": 0.3612, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.8752025931928687, |
| "grad_norm": 0.5000862290517376, |
| "learning_rate": 1.7852220393666155e-05, |
| "loss": 0.3736, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.8833063209076175, |
| "grad_norm": 0.4797387067687876, |
| "learning_rate": 1.7793471577743263e-05, |
| "loss": 0.366, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.8914100486223663, |
| "grad_norm": 0.6060548002400858, |
| "learning_rate": 1.7734029530435128e-05, |
| "loss": 0.3728, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.899513776337115, |
| "grad_norm": 0.5213162494917053, |
| "learning_rate": 1.7673899539127785e-05, |
| "loss": 0.367, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.9076175040518638, |
| "grad_norm": 0.5292941155705279, |
| "learning_rate": 1.7613086952400072e-05, |
| "loss": 0.3797, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.9157212317666127, |
| "grad_norm": 0.4548729888143476, |
| "learning_rate": 1.7551597179547876e-05, |
| "loss": 0.3712, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.9238249594813615, |
| "grad_norm": 0.6169483754933206, |
| "learning_rate": 1.7489435690102975e-05, |
| "loss": 0.3761, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.9319286871961102, |
| "grad_norm": 0.8815296763800744, |
| "learning_rate": 1.7426608013346504e-05, |
| "loss": 0.3681, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.940032414910859, |
| "grad_norm": 0.9252301416025814, |
| "learning_rate": 1.7363119737817148e-05, |
| "loss": 0.3657, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.9481361426256077, |
| "grad_norm": 0.5463039416378292, |
| "learning_rate": 1.7298976510814026e-05, |
| "loss": 0.3759, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.9562398703403565, |
| "grad_norm": 0.4579458311721191, |
| "learning_rate": 1.723418403789438e-05, |
| "loss": 0.3714, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.9643435980551054, |
| "grad_norm": 0.476489073405017, |
| "learning_rate": 1.716874808236602e-05, |
| "loss": 0.3536, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.9724473257698542, |
| "grad_norm": 0.4913928713038406, |
| "learning_rate": 1.710267446477474e-05, |
| "loss": 0.3731, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.9805510534846029, |
| "grad_norm": 0.43140900742148275, |
| "learning_rate": 1.7035969062386527e-05, |
| "loss": 0.3634, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.9886547811993517, |
| "grad_norm": 0.5654914326244047, |
| "learning_rate": 1.69686378086648e-05, |
| "loss": 0.3726, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.9967585089141004, |
| "grad_norm": 0.5746153876278244, |
| "learning_rate": 1.6900686692742617e-05, |
| "loss": 0.3606, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.0048622366288493, |
| "grad_norm": 0.47883464445764196, |
| "learning_rate": 1.683212175888994e-05, |
| "loss": 0.3364, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.012965964343598, |
| "grad_norm": 0.4599866049860579, |
| "learning_rate": 1.6762949105976028e-05, |
| "loss": 0.3116, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.0210696920583469, |
| "grad_norm": 0.4535043511569735, |
| "learning_rate": 1.6693174886926878e-05, |
| "loss": 0.3136, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.0291734197730957, |
| "grad_norm": 0.6649061266031846, |
| "learning_rate": 1.6622805308177972e-05, |
| "loss": 0.3066, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.0372771474878444, |
| "grad_norm": 0.49333042287227813, |
| "learning_rate": 1.6551846629122204e-05, |
| "loss": 0.3121, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.0453808752025933, |
| "grad_norm": 0.4727340839446586, |
| "learning_rate": 1.648030516155309e-05, |
| "loss": 0.3125, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.053484602917342, |
| "grad_norm": 0.4789506057032804, |
| "learning_rate": 1.6408187269103334e-05, |
| "loss": 0.3083, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.0615883306320908, |
| "grad_norm": 0.6380998878618579, |
| "learning_rate": 1.63354993666788e-05, |
| "loss": 0.3088, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.0696920583468394, |
| "grad_norm": 0.40993650437587914, |
| "learning_rate": 1.626224791988789e-05, |
| "loss": 0.3135, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.0777957860615883, |
| "grad_norm": 0.7916073627292187, |
| "learning_rate": 1.618843944446642e-05, |
| "loss": 0.3078, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.0858995137763372, |
| "grad_norm": 0.3982036787757661, |
| "learning_rate": 1.6114080505698057e-05, |
| "loss": 0.3143, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.0940032414910859, |
| "grad_norm": 0.4886593804010818, |
| "learning_rate": 1.6039177717830334e-05, |
| "loss": 0.3146, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.1021069692058347, |
| "grad_norm": 0.4362085386164278, |
| "learning_rate": 1.5963737743486296e-05, |
| "loss": 0.3101, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.1102106969205834, |
| "grad_norm": 0.4832648288442626, |
| "learning_rate": 1.5887767293071886e-05, |
| "loss": 0.3191, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.1183144246353323, |
| "grad_norm": 3.12690234083941, |
| "learning_rate": 1.581127312417902e-05, |
| "loss": 0.3099, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.1264181523500811, |
| "grad_norm": 0.506742497350345, |
| "learning_rate": 1.5734262040984518e-05, |
| "loss": 0.3115, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.1345218800648298, |
| "grad_norm": 0.4326366081491033, |
| "learning_rate": 1.565674089364487e-05, |
| "loss": 0.306, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.1426256077795787, |
| "grad_norm": 0.5101383803446576, |
| "learning_rate": 1.5578716577686906e-05, |
| "loss": 0.3149, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.1507293354943273, |
| "grad_norm": 0.466467681207644, |
| "learning_rate": 1.550019603339444e-05, |
| "loss": 0.3094, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.1588330632090762, |
| "grad_norm": 0.4324192975231726, |
| "learning_rate": 1.542118624519092e-05, |
| "loss": 0.3194, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.1669367909238249, |
| "grad_norm": 0.5451019100392235, |
| "learning_rate": 1.5341694241018186e-05, |
| "loss": 0.3094, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.1750405186385737, |
| "grad_norm": 0.5038807940683517, |
| "learning_rate": 1.5261727091711304e-05, |
| "loss": 0.3091, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.1831442463533226, |
| "grad_norm": 0.5307483433500298, |
| "learning_rate": 1.5181291910369643e-05, |
| "loss": 0.3062, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.1912479740680713, |
| "grad_norm": 1.5939214561257982, |
| "learning_rate": 1.5100395851724127e-05, |
| "loss": 0.3097, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.1993517017828201, |
| "grad_norm": 0.46387474399530304, |
| "learning_rate": 1.501904611150086e-05, |
| "loss": 0.3068, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.2074554294975688, |
| "grad_norm": 0.6633833013460058, |
| "learning_rate": 1.4937249925781025e-05, |
| "loss": 0.3129, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.2155591572123177, |
| "grad_norm": 0.6412444023665336, |
| "learning_rate": 1.4855014570357266e-05, |
| "loss": 0.3104, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.2236628849270665, |
| "grad_norm": 0.5256422510213131, |
| "learning_rate": 1.477234736008648e-05, |
| "loss": 0.3047, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.2317666126418152, |
| "grad_norm": 0.40656434444419987, |
| "learning_rate": 1.4689255648239182e-05, |
| "loss": 0.3041, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.239870340356564, |
| "grad_norm": 0.4758893103433166, |
| "learning_rate": 1.4605746825845394e-05, |
| "loss": 0.3111, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.2479740680713127, |
| "grad_norm": 0.5463130947213312, |
| "learning_rate": 1.452182832103726e-05, |
| "loss": 0.3073, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.2560777957860616, |
| "grad_norm": 0.438008763529305, |
| "learning_rate": 1.4437507598388256e-05, |
| "loss": 0.3067, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.2641815235008105, |
| "grad_norm": 0.490881291679877, |
| "learning_rate": 1.4352792158249267e-05, |
| "loss": 0.3119, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.2722852512155591, |
| "grad_norm": 0.44541220450724833, |
| "learning_rate": 1.4267689536081391e-05, |
| "loss": 0.2998, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.280388978930308, |
| "grad_norm": 0.4298216130010655, |
| "learning_rate": 1.4182207301785673e-05, |
| "loss": 0.2992, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.2884927066450567, |
| "grad_norm": 0.5117813083461242, |
| "learning_rate": 1.4096353059029759e-05, |
| "loss": 0.3099, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.2965964343598055, |
| "grad_norm": 0.4365604649899055, |
| "learning_rate": 1.4010134444571554e-05, |
| "loss": 0.3082, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.3047001620745542, |
| "grad_norm": 0.4279375225793271, |
| "learning_rate": 1.3923559127579917e-05, |
| "loss": 0.3089, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.312803889789303, |
| "grad_norm": 0.4309092541378403, |
| "learning_rate": 1.3836634808952488e-05, |
| "loss": 0.3132, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.320907617504052, |
| "grad_norm": 0.47600343407209245, |
| "learning_rate": 1.3749369220630706e-05, |
| "loss": 0.3065, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.3290113452188006, |
| "grad_norm": 0.46308122621094266, |
| "learning_rate": 1.3661770124912037e-05, |
| "loss": 0.3167, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.3371150729335495, |
| "grad_norm": 0.4572510567971018, |
| "learning_rate": 1.357384531375952e-05, |
| "loss": 0.3107, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.3452188006482984, |
| "grad_norm": 0.4467596488843486, |
| "learning_rate": 1.3485602608108665e-05, |
| "loss": 0.3141, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.353322528363047, |
| "grad_norm": 0.4024832244734071, |
| "learning_rate": 1.3397049857171789e-05, |
| "loss": 0.317, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.3614262560777957, |
| "grad_norm": 0.4069981811083432, |
| "learning_rate": 1.3308194937739811e-05, |
| "loss": 0.3079, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.3695299837925445, |
| "grad_norm": 0.3744780812707145, |
| "learning_rate": 1.3219045753481633e-05, |
| "loss": 0.312, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.3776337115072934, |
| "grad_norm": 0.5528595105239157, |
| "learning_rate": 1.3129610234241078e-05, |
| "loss": 0.3089, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.385737439222042, |
| "grad_norm": 0.3824521485349888, |
| "learning_rate": 1.3039896335331553e-05, |
| "loss": 0.305, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.393841166936791, |
| "grad_norm": 0.42322429215639495, |
| "learning_rate": 1.2949912036828402e-05, |
| "loss": 0.306, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.4019448946515398, |
| "grad_norm": 0.6010113797673305, |
| "learning_rate": 1.2859665342859094e-05, |
| "loss": 0.3114, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.4100486223662885, |
| "grad_norm": 0.5226322175181108, |
| "learning_rate": 1.2769164280891232e-05, |
| "loss": 0.3099, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.4181523500810373, |
| "grad_norm": 0.44179864325029244, |
| "learning_rate": 1.2678416901018547e-05, |
| "loss": 0.3163, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.426256077795786, |
| "grad_norm": 0.4602625693572698, |
| "learning_rate": 1.2587431275244776e-05, |
| "loss": 0.3094, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.4343598055105349, |
| "grad_norm": 0.43940134729597297, |
| "learning_rate": 1.2496215496765725e-05, |
| "loss": 0.31, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.4424635332252835, |
| "grad_norm": 0.46342172282209004, |
| "learning_rate": 1.2404777679249331e-05, |
| "loss": 0.3106, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.4505672609400324, |
| "grad_norm": 0.48199983224606735, |
| "learning_rate": 1.2313125956113963e-05, |
| "loss": 0.3091, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.4586709886547813, |
| "grad_norm": 0.5796133537203843, |
| "learning_rate": 1.2221268479804952e-05, |
| "loss": 0.3041, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.46677471636953, |
| "grad_norm": 0.3916391717593484, |
| "learning_rate": 1.2129213421069434e-05, |
| "loss": 0.303, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.4748784440842788, |
| "grad_norm": 0.47407293008567386, |
| "learning_rate": 1.2036968968229553e-05, |
| "loss": 0.3005, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.4829821717990275, |
| "grad_norm": 0.4742547690494269, |
| "learning_rate": 1.1944543326454099e-05, |
| "loss": 0.31, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.4910858995137763, |
| "grad_norm": 0.5968741484264349, |
| "learning_rate": 1.1851944717028685e-05, |
| "loss": 0.3015, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.499189627228525, |
| "grad_norm": 0.42113518745463263, |
| "learning_rate": 1.1759181376624425e-05, |
| "loss": 0.3062, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.5072933549432739, |
| "grad_norm": 0.4221625446484812, |
| "learning_rate": 1.1666261556565311e-05, |
| "loss": 0.3154, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.5153970826580228, |
| "grad_norm": 0.37363373908254355, |
| "learning_rate": 1.1573193522094235e-05, |
| "loss": 0.3074, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.5235008103727714, |
| "grad_norm": 0.4097973041623187, |
| "learning_rate": 1.1479985551637802e-05, |
| "loss": 0.3104, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.5316045380875203, |
| "grad_norm": 0.503026045894683, |
| "learning_rate": 1.1386645936069959e-05, |
| "loss": 0.303, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.5397082658022692, |
| "grad_norm": 0.6764944642077741, |
| "learning_rate": 1.129318297797453e-05, |
| "loss": 0.3148, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.5478119935170178, |
| "grad_norm": 0.4493347534605716, |
| "learning_rate": 1.1199604990906673e-05, |
| "loss": 0.2993, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.5559157212317665, |
| "grad_norm": 0.41924482066132346, |
| "learning_rate": 1.1105920298653424e-05, |
| "loss": 0.303, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.5640194489465153, |
| "grad_norm": 0.4032353411754684, |
| "learning_rate": 1.1012137234493257e-05, |
| "loss": 0.3083, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.5721231766612642, |
| "grad_norm": 0.4337602358099995, |
| "learning_rate": 1.0918264140454858e-05, |
| "loss": 0.3098, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.5802269043760129, |
| "grad_norm": 0.4712510568563143, |
| "learning_rate": 1.0824309366575102e-05, |
| "loss": 0.3019, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.5883306320907618, |
| "grad_norm": 0.417863657320501, |
| "learning_rate": 1.07302812701563e-05, |
| "loss": 0.3134, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.5964343598055106, |
| "grad_norm": 0.396170929504839, |
| "learning_rate": 1.0636188215022822e-05, |
| "loss": 0.3069, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.6045380875202593, |
| "grad_norm": 0.44185173527839794, |
| "learning_rate": 1.0542038570777145e-05, |
| "loss": 0.3023, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.6126418152350082, |
| "grad_norm": 0.4585710775923548, |
| "learning_rate": 1.0447840712055346e-05, |
| "loss": 0.3006, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.620745542949757, |
| "grad_norm": 0.40442217233513095, |
| "learning_rate": 1.0353603017782198e-05, |
| "loss": 0.3078, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.6288492706645057, |
| "grad_norm": 0.4204837524606176, |
| "learning_rate": 1.025933387042587e-05, |
| "loss": 0.3022, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.6369529983792543, |
| "grad_norm": 0.4111160401978428, |
| "learning_rate": 1.0165041655252273e-05, |
| "loss": 0.3087, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.6450567260940032, |
| "grad_norm": 0.4094137388336773, |
| "learning_rate": 1.0070734759579213e-05, |
| "loss": 0.305, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.653160453808752, |
| "grad_norm": 0.497539705471503, |
| "learning_rate": 9.976421572030335e-06, |
| "loss": 0.3085, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.6612641815235007, |
| "grad_norm": 0.48668670576022205, |
| "learning_rate": 9.882110481788943e-06, |
| "loss": 0.3011, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.6693679092382496, |
| "grad_norm": 0.4983913869533297, |
| "learning_rate": 9.787809877851788e-06, |
| "loss": 0.3046, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.6774716369529985, |
| "grad_norm": 0.43523910129253507, |
| "learning_rate": 9.693528148282858e-06, |
| "loss": 0.3091, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.6855753646677472, |
| "grad_norm": 0.400445744613139, |
| "learning_rate": 9.599273679467261e-06, |
| "loss": 0.302, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.6936790923824958, |
| "grad_norm": 0.3719591203602621, |
| "learning_rate": 9.505054855365255e-06, |
| "loss": 0.304, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.7017828200972447, |
| "grad_norm": 0.42904850392701255, |
| "learning_rate": 9.4108800567665e-06, |
| "loss": 0.3019, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.7098865478119936, |
| "grad_norm": 0.38522776942261844, |
| "learning_rate": 9.31675766054456e-06, |
| "loss": 0.2986, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.7179902755267422, |
| "grad_norm": 0.4471785152960648, |
| "learning_rate": 9.222696038911799e-06, |
| "loss": 0.3063, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.726094003241491, |
| "grad_norm": 0.4518210928364812, |
| "learning_rate": 9.128703558674671e-06, |
| "loss": 0.2999, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.73419773095624, |
| "grad_norm": 0.3774709767589339, |
| "learning_rate": 9.034788580489483e-06, |
| "loss": 0.3028, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.7423014586709886, |
| "grad_norm": 1.206345275784034, |
| "learning_rate": 8.9409594581187e-06, |
| "loss": 0.2976, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.7504051863857373, |
| "grad_norm": 0.9305367096127886, |
| "learning_rate": 8.84722453768791e-06, |
| "loss": 0.2994, |
| "step": 2160 |
| }, |
| { |
| "epoch": 1.7585089141004864, |
| "grad_norm": 0.7089339929942101, |
| "learning_rate": 8.753592156943398e-06, |
| "loss": 0.2989, |
| "step": 2170 |
| }, |
| { |
| "epoch": 1.766612641815235, |
| "grad_norm": 0.4065505242372769, |
| "learning_rate": 8.660070644510527e-06, |
| "loss": 0.3008, |
| "step": 2180 |
| }, |
| { |
| "epoch": 1.7747163695299837, |
| "grad_norm": 0.4285649698256748, |
| "learning_rate": 8.566668319152885e-06, |
| "loss": 0.298, |
| "step": 2190 |
| }, |
| { |
| "epoch": 1.7828200972447326, |
| "grad_norm": 0.9190811286888556, |
| "learning_rate": 8.47339348903234e-06, |
| "loss": 0.2969, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.7909238249594814, |
| "grad_norm": 0.4006678679346164, |
| "learning_rate": 8.38025445097003e-06, |
| "loss": 0.3028, |
| "step": 2210 |
| }, |
| { |
| "epoch": 1.79902755267423, |
| "grad_norm": 0.37508868691568753, |
| "learning_rate": 8.287259489708345e-06, |
| "loss": 0.3038, |
| "step": 2220 |
| }, |
| { |
| "epoch": 1.807131280388979, |
| "grad_norm": 0.47585858623889243, |
| "learning_rate": 8.194416877174011e-06, |
| "loss": 0.3003, |
| "step": 2230 |
| }, |
| { |
| "epoch": 1.8152350081037278, |
| "grad_norm": 0.557738896636675, |
| "learning_rate": 8.101734871742293e-06, |
| "loss": 0.3019, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.8233387358184765, |
| "grad_norm": 0.3753260161379116, |
| "learning_rate": 8.00922171750241e-06, |
| "loss": 0.3009, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.8314424635332252, |
| "grad_norm": 0.43219188438768574, |
| "learning_rate": 7.916885643524218e-06, |
| "loss": 0.3073, |
| "step": 2260 |
| }, |
| { |
| "epoch": 1.839546191247974, |
| "grad_norm": 0.4087379694639625, |
| "learning_rate": 7.824734863126242e-06, |
| "loss": 0.3031, |
| "step": 2270 |
| }, |
| { |
| "epoch": 1.847649918962723, |
| "grad_norm": 0.4680062742989638, |
| "learning_rate": 7.732777573145095e-06, |
| "loss": 0.3015, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.8557536466774716, |
| "grad_norm": 0.47630065707014735, |
| "learning_rate": 7.641021953206365e-06, |
| "loss": 0.3049, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.8638573743922204, |
| "grad_norm": 0.382619083849301, |
| "learning_rate": 7.549476164997023e-06, |
| "loss": 0.3062, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.8719611021069693, |
| "grad_norm": 0.46118225061671075, |
| "learning_rate": 7.458148351539469e-06, |
| "loss": 0.3015, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.880064829821718, |
| "grad_norm": 0.4470501754225101, |
| "learning_rate": 7.367046636467181e-06, |
| "loss": 0.303, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.8881685575364666, |
| "grad_norm": 0.37259711233495485, |
| "learning_rate": 7.276179123302132e-06, |
| "loss": 0.2987, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.8962722852512157, |
| "grad_norm": 0.3828380710217088, |
| "learning_rate": 7.185553894733961e-06, |
| "loss": 0.3063, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.9043760129659644, |
| "grad_norm": 0.43232450031072595, |
| "learning_rate": 7.0951790119010335e-06, |
| "loss": 0.3057, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.912479740680713, |
| "grad_norm": 0.6794912328243613, |
| "learning_rate": 7.0050625136734e-06, |
| "loss": 0.2986, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.920583468395462, |
| "grad_norm": 0.44967268132350086, |
| "learning_rate": 6.915212415937727e-06, |
| "loss": 0.3, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.9286871961102108, |
| "grad_norm": 0.3813148690736967, |
| "learning_rate": 6.825636710884276e-06, |
| "loss": 0.2983, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.9367909238249594, |
| "grad_norm": 0.4583664266244799, |
| "learning_rate": 6.736343366296023e-06, |
| "loss": 0.3067, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.9448946515397083, |
| "grad_norm": 0.41678473009190237, |
| "learning_rate": 6.647340324839899e-06, |
| "loss": 0.3004, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.9529983792544572, |
| "grad_norm": 0.4358479736313102, |
| "learning_rate": 6.558635503360282e-06, |
| "loss": 0.2954, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.9611021069692058, |
| "grad_norm": 0.3681741215171407, |
| "learning_rate": 6.470236792174821e-06, |
| "loss": 0.2981, |
| "step": 2420 |
| }, |
| { |
| "epoch": 1.9692058346839545, |
| "grad_norm": 0.4553771802708097, |
| "learning_rate": 6.382152054372559e-06, |
| "loss": 0.2962, |
| "step": 2430 |
| }, |
| { |
| "epoch": 1.9773095623987034, |
| "grad_norm": 0.4035197510740001, |
| "learning_rate": 6.294389125114533e-06, |
| "loss": 0.303, |
| "step": 2440 |
| }, |
| { |
| "epoch": 1.9854132901134522, |
| "grad_norm": 0.38904501709214123, |
| "learning_rate": 6.206955810936812e-06, |
| "loss": 0.3007, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.993517017828201, |
| "grad_norm": 0.3369713424160447, |
| "learning_rate": 6.1198598890561245e-06, |
| "loss": 0.2976, |
| "step": 2460 |
| }, |
| { |
| "epoch": 2.0016207455429496, |
| "grad_norm": 0.46052297639692136, |
| "learning_rate": 6.033109106678061e-06, |
| "loss": 0.2843, |
| "step": 2470 |
| }, |
| { |
| "epoch": 2.0097244732576987, |
| "grad_norm": 0.4165773625784103, |
| "learning_rate": 5.946711180307962e-06, |
| "loss": 0.2562, |
| "step": 2480 |
| }, |
| { |
| "epoch": 2.0178282009724473, |
| "grad_norm": 0.48747342784329734, |
| "learning_rate": 5.860673795064526e-06, |
| "loss": 0.2461, |
| "step": 2490 |
| }, |
| { |
| "epoch": 2.025931928687196, |
| "grad_norm": 0.38561574078631383, |
| "learning_rate": 5.7750046039962256e-06, |
| "loss": 0.2496, |
| "step": 2500 |
| }, |
| { |
| "epoch": 2.034035656401945, |
| "grad_norm": 0.3619233004354448, |
| "learning_rate": 5.689711227400565e-06, |
| "loss": 0.2413, |
| "step": 2510 |
| }, |
| { |
| "epoch": 2.0421393841166937, |
| "grad_norm": 0.4332706443306492, |
| "learning_rate": 5.604801252146254e-06, |
| "loss": 0.2508, |
| "step": 2520 |
| }, |
| { |
| "epoch": 2.0502431118314424, |
| "grad_norm": 0.5618640479718628, |
| "learning_rate": 5.520282230998338e-06, |
| "loss": 0.2433, |
| "step": 2530 |
| }, |
| { |
| "epoch": 2.0583468395461915, |
| "grad_norm": 0.5086803446012763, |
| "learning_rate": 5.436161681946395e-06, |
| "loss": 0.2434, |
| "step": 2540 |
| }, |
| { |
| "epoch": 2.06645056726094, |
| "grad_norm": 0.4656470539260537, |
| "learning_rate": 5.352447087535806e-06, |
| "loss": 0.244, |
| "step": 2550 |
| }, |
| { |
| "epoch": 2.0745542949756888, |
| "grad_norm": 0.4006101056462074, |
| "learning_rate": 5.2691458942021824e-06, |
| "loss": 0.2494, |
| "step": 2560 |
| }, |
| { |
| "epoch": 2.0826580226904374, |
| "grad_norm": 0.4241900725669252, |
| "learning_rate": 5.186265511608986e-06, |
| "loss": 0.2445, |
| "step": 2570 |
| }, |
| { |
| "epoch": 2.0907617504051865, |
| "grad_norm": 0.3775085886627043, |
| "learning_rate": 5.103813311988464e-06, |
| "loss": 0.2534, |
| "step": 2580 |
| }, |
| { |
| "epoch": 2.098865478119935, |
| "grad_norm": 0.44744742574972074, |
| "learning_rate": 5.021796629485864e-06, |
| "loss": 0.2517, |
| "step": 2590 |
| }, |
| { |
| "epoch": 2.106969205834684, |
| "grad_norm": 0.42804732064669376, |
| "learning_rate": 4.9402227595070685e-06, |
| "loss": 0.2465, |
| "step": 2600 |
| }, |
| { |
| "epoch": 2.115072933549433, |
| "grad_norm": 0.4509968018515705, |
| "learning_rate": 4.859098958069676e-06, |
| "loss": 0.2468, |
| "step": 2610 |
| }, |
| { |
| "epoch": 2.1231766612641816, |
| "grad_norm": 0.46449954347887473, |
| "learning_rate": 4.778432441157563e-06, |
| "loss": 0.2472, |
| "step": 2620 |
| }, |
| { |
| "epoch": 2.1312803889789302, |
| "grad_norm": 0.4212354509045052, |
| "learning_rate": 4.698230384079027e-06, |
| "loss": 0.2431, |
| "step": 2630 |
| }, |
| { |
| "epoch": 2.139384116693679, |
| "grad_norm": 0.4126775993056891, |
| "learning_rate": 4.618499920828534e-06, |
| "loss": 0.2413, |
| "step": 2640 |
| }, |
| { |
| "epoch": 2.147487844408428, |
| "grad_norm": 1.0053701059750597, |
| "learning_rate": 4.539248143452152e-06, |
| "loss": 0.2504, |
| "step": 2650 |
| }, |
| { |
| "epoch": 2.1555915721231766, |
| "grad_norm": 0.42794882075384755, |
| "learning_rate": 4.46048210141672e-06, |
| "loss": 0.2461, |
| "step": 2660 |
| }, |
| { |
| "epoch": 2.1636952998379253, |
| "grad_norm": 0.44630739523619917, |
| "learning_rate": 4.382208800982786e-06, |
| "loss": 0.2492, |
| "step": 2670 |
| }, |
| { |
| "epoch": 2.1717990275526744, |
| "grad_norm": 0.40184228816797024, |
| "learning_rate": 4.304435204581391e-06, |
| "loss": 0.2499, |
| "step": 2680 |
| }, |
| { |
| "epoch": 2.179902755267423, |
| "grad_norm": 0.5573923075225637, |
| "learning_rate": 4.227168230194784e-06, |
| "loss": 0.2518, |
| "step": 2690 |
| }, |
| { |
| "epoch": 2.1880064829821717, |
| "grad_norm": 0.39409118645453173, |
| "learning_rate": 4.150414750741034e-06, |
| "loss": 0.2423, |
| "step": 2700 |
| }, |
| { |
| "epoch": 2.1961102106969204, |
| "grad_norm": 0.5614548645666275, |
| "learning_rate": 4.074181593462714e-06, |
| "loss": 0.2444, |
| "step": 2710 |
| }, |
| { |
| "epoch": 2.2042139384116695, |
| "grad_norm": 0.43368433141829477, |
| "learning_rate": 3.998475539319584e-06, |
| "loss": 0.2426, |
| "step": 2720 |
| }, |
| { |
| "epoch": 2.212317666126418, |
| "grad_norm": 0.4835176606950031, |
| "learning_rate": 3.92330332238545e-06, |
| "loss": 0.2409, |
| "step": 2730 |
| }, |
| { |
| "epoch": 2.2204213938411668, |
| "grad_norm": 0.38421665137482836, |
| "learning_rate": 3.8486716292491535e-06, |
| "loss": 0.242, |
| "step": 2740 |
| }, |
| { |
| "epoch": 2.228525121555916, |
| "grad_norm": 1.183353326709701, |
| "learning_rate": 3.7745870984198008e-06, |
| "loss": 0.2471, |
| "step": 2750 |
| }, |
| { |
| "epoch": 2.2366288492706645, |
| "grad_norm": 0.5175889110144042, |
| "learning_rate": 3.701056319736257e-06, |
| "loss": 0.2474, |
| "step": 2760 |
| }, |
| { |
| "epoch": 2.244732576985413, |
| "grad_norm": 0.3841490156805979, |
| "learning_rate": 3.6280858337809875e-06, |
| "loss": 0.2448, |
| "step": 2770 |
| }, |
| { |
| "epoch": 2.2528363047001623, |
| "grad_norm": 0.3850534746995418, |
| "learning_rate": 3.555682131298279e-06, |
| "loss": 0.246, |
| "step": 2780 |
| }, |
| { |
| "epoch": 2.260940032414911, |
| "grad_norm": 0.5386275035212884, |
| "learning_rate": 3.483851652616862e-06, |
| "loss": 0.2457, |
| "step": 2790 |
| }, |
| { |
| "epoch": 2.2690437601296596, |
| "grad_norm": 0.4766394762015124, |
| "learning_rate": 3.412600787077066e-06, |
| "loss": 0.2442, |
| "step": 2800 |
| }, |
| { |
| "epoch": 2.2771474878444082, |
| "grad_norm": 0.45336828531117784, |
| "learning_rate": 3.3419358724624785e-06, |
| "loss": 0.2457, |
| "step": 2810 |
| }, |
| { |
| "epoch": 2.2852512155591573, |
| "grad_norm": 0.3899936791407577, |
| "learning_rate": 3.2718631944361856e-06, |
| "loss": 0.2475, |
| "step": 2820 |
| }, |
| { |
| "epoch": 2.293354943273906, |
| "grad_norm": 0.4638514300378942, |
| "learning_rate": 3.2023889859816737e-06, |
| "loss": 0.2528, |
| "step": 2830 |
| }, |
| { |
| "epoch": 2.3014586709886546, |
| "grad_norm": 0.46207911721833217, |
| "learning_rate": 3.1335194268484027e-06, |
| "loss": 0.2464, |
| "step": 2840 |
| }, |
| { |
| "epoch": 2.3095623987034037, |
| "grad_norm": 0.4494974595149148, |
| "learning_rate": 3.065260643002115e-06, |
| "loss": 0.2477, |
| "step": 2850 |
| }, |
| { |
| "epoch": 2.3176661264181524, |
| "grad_norm": 0.5237975403078062, |
| "learning_rate": 2.9976187060799254e-06, |
| "loss": 0.2491, |
| "step": 2860 |
| }, |
| { |
| "epoch": 2.325769854132901, |
| "grad_norm": 0.4228496375207238, |
| "learning_rate": 2.9305996328502473e-06, |
| "loss": 0.2486, |
| "step": 2870 |
| }, |
| { |
| "epoch": 2.3338735818476497, |
| "grad_norm": 0.402568414268707, |
| "learning_rate": 2.8642093846776007e-06, |
| "loss": 0.2445, |
| "step": 2880 |
| }, |
| { |
| "epoch": 2.341977309562399, |
| "grad_norm": 0.4517134992500212, |
| "learning_rate": 2.7984538669923534e-06, |
| "loss": 0.2455, |
| "step": 2890 |
| }, |
| { |
| "epoch": 2.3500810372771475, |
| "grad_norm": 0.40878386175707665, |
| "learning_rate": 2.7333389287654276e-06, |
| "loss": 0.2449, |
| "step": 2900 |
| }, |
| { |
| "epoch": 2.358184764991896, |
| "grad_norm": 0.5426654742148659, |
| "learning_rate": 2.6688703619880207e-06, |
| "loss": 0.247, |
| "step": 2910 |
| }, |
| { |
| "epoch": 2.366288492706645, |
| "grad_norm": 0.48949948663274995, |
| "learning_rate": 2.6050539011564256e-06, |
| "loss": 0.2429, |
| "step": 2920 |
| }, |
| { |
| "epoch": 2.374392220421394, |
| "grad_norm": 0.4015208445324201, |
| "learning_rate": 2.5418952227619387e-06, |
| "loss": 0.2456, |
| "step": 2930 |
| }, |
| { |
| "epoch": 2.3824959481361425, |
| "grad_norm": 0.39679113028964264, |
| "learning_rate": 2.479399944785923e-06, |
| "loss": 0.2488, |
| "step": 2940 |
| }, |
| { |
| "epoch": 2.3905996758508916, |
| "grad_norm": 0.4194967061022551, |
| "learning_rate": 2.4175736262001003e-06, |
| "loss": 0.2452, |
| "step": 2950 |
| }, |
| { |
| "epoch": 2.3987034035656403, |
| "grad_norm": 0.40986790099959525, |
| "learning_rate": 2.356421766472077e-06, |
| "loss": 0.2444, |
| "step": 2960 |
| }, |
| { |
| "epoch": 2.406807131280389, |
| "grad_norm": 0.4323381979453355, |
| "learning_rate": 2.2959498050761665e-06, |
| "loss": 0.248, |
| "step": 2970 |
| }, |
| { |
| "epoch": 2.4149108589951376, |
| "grad_norm": 0.5115877124688396, |
| "learning_rate": 2.236163121009535e-06, |
| "loss": 0.2387, |
| "step": 2980 |
| }, |
| { |
| "epoch": 2.4230145867098867, |
| "grad_norm": 0.6386161325963382, |
| "learning_rate": 2.1770670323137543e-06, |
| "loss": 0.2475, |
| "step": 2990 |
| }, |
| { |
| "epoch": 2.4311183144246353, |
| "grad_norm": 0.4784913634378947, |
| "learning_rate": 2.118666795601746e-06, |
| "loss": 0.2489, |
| "step": 3000 |
| }, |
| { |
| "epoch": 2.439222042139384, |
| "grad_norm": 0.43984401271561907, |
| "learning_rate": 2.0609676055902206e-06, |
| "loss": 0.2438, |
| "step": 3010 |
| }, |
| { |
| "epoch": 2.447325769854133, |
| "grad_norm": 0.40066472622294524, |
| "learning_rate": 2.0039745946375843e-06, |
| "loss": 0.2486, |
| "step": 3020 |
| }, |
| { |
| "epoch": 2.4554294975688817, |
| "grad_norm": 0.41104694101861183, |
| "learning_rate": 1.9476928322874388e-06, |
| "loss": 0.2457, |
| "step": 3030 |
| }, |
| { |
| "epoch": 2.4635332252836304, |
| "grad_norm": 0.5227302837740756, |
| "learning_rate": 1.8921273248176274e-06, |
| "loss": 0.241, |
| "step": 3040 |
| }, |
| { |
| "epoch": 2.471636952998379, |
| "grad_norm": 0.36733066209170323, |
| "learning_rate": 1.8372830147949273e-06, |
| "loss": 0.249, |
| "step": 3050 |
| }, |
| { |
| "epoch": 2.479740680713128, |
| "grad_norm": 0.4030662849923467, |
| "learning_rate": 1.783164780635409e-06, |
| "loss": 0.233, |
| "step": 3060 |
| }, |
| { |
| "epoch": 2.487844408427877, |
| "grad_norm": 0.412196413521116, |
| "learning_rate": 1.7297774361705067e-06, |
| "loss": 0.2462, |
| "step": 3070 |
| }, |
| { |
| "epoch": 2.4959481361426255, |
| "grad_norm": 0.35694583280585374, |
| "learning_rate": 1.6771257302188138e-06, |
| "loss": 0.2416, |
| "step": 3080 |
| }, |
| { |
| "epoch": 2.5040518638573745, |
| "grad_norm": 0.3428163831314568, |
| "learning_rate": 1.6252143461636894e-06, |
| "loss": 0.241, |
| "step": 3090 |
| }, |
| { |
| "epoch": 2.512155591572123, |
| "grad_norm": 0.4327556065174441, |
| "learning_rate": 1.5740479015366516e-06, |
| "loss": 0.2475, |
| "step": 3100 |
| }, |
| { |
| "epoch": 2.520259319286872, |
| "grad_norm": 0.39635974405307706, |
| "learning_rate": 1.523630947606659e-06, |
| "loss": 0.2441, |
| "step": 3110 |
| }, |
| { |
| "epoch": 2.528363047001621, |
| "grad_norm": 0.47175615435889623, |
| "learning_rate": 1.4739679689752761e-06, |
| "loss": 0.2452, |
| "step": 3120 |
| }, |
| { |
| "epoch": 2.5364667747163696, |
| "grad_norm": 0.8028515362350427, |
| "learning_rate": 1.4250633831777617e-06, |
| "loss": 0.2462, |
| "step": 3130 |
| }, |
| { |
| "epoch": 2.5445705024311183, |
| "grad_norm": 0.39072342007660477, |
| "learning_rate": 1.3769215402901248e-06, |
| "loss": 0.2448, |
| "step": 3140 |
| }, |
| { |
| "epoch": 2.5526742301458674, |
| "grad_norm": 0.37581376165011565, |
| "learning_rate": 1.3295467225421922e-06, |
| "loss": 0.2384, |
| "step": 3150 |
| }, |
| { |
| "epoch": 2.560777957860616, |
| "grad_norm": 0.5041858255649342, |
| "learning_rate": 1.2829431439366979e-06, |
| "loss": 0.2433, |
| "step": 3160 |
| }, |
| { |
| "epoch": 2.5688816855753647, |
| "grad_norm": 0.3696378563338389, |
| "learning_rate": 1.2371149498744483e-06, |
| "loss": 0.2432, |
| "step": 3170 |
| }, |
| { |
| "epoch": 2.5769854132901133, |
| "grad_norm": 0.9465843377425273, |
| "learning_rate": 1.1920662167855801e-06, |
| "loss": 0.2436, |
| "step": 3180 |
| }, |
| { |
| "epoch": 2.585089141004862, |
| "grad_norm": 0.407383390924596, |
| "learning_rate": 1.1478009517669786e-06, |
| "loss": 0.2425, |
| "step": 3190 |
| }, |
| { |
| "epoch": 2.593192868719611, |
| "grad_norm": 0.6241717797785749, |
| "learning_rate": 1.1043230922258286e-06, |
| "loss": 0.2471, |
| "step": 3200 |
| }, |
| { |
| "epoch": 2.6012965964343597, |
| "grad_norm": 0.5159052649253751, |
| "learning_rate": 1.0616365055293832e-06, |
| "loss": 0.2451, |
| "step": 3210 |
| }, |
| { |
| "epoch": 2.6094003241491084, |
| "grad_norm": 0.48933190388622655, |
| "learning_rate": 1.0197449886609734e-06, |
| "loss": 0.2442, |
| "step": 3220 |
| }, |
| { |
| "epoch": 2.6175040518638575, |
| "grad_norm": 0.6168482880545989, |
| "learning_rate": 9.786522678822441e-07, |
| "loss": 0.2364, |
| "step": 3230 |
| }, |
| { |
| "epoch": 2.625607779578606, |
| "grad_norm": 0.37081933829009445, |
| "learning_rate": 9.383619984017256e-07, |
| "loss": 0.2453, |
| "step": 3240 |
| }, |
| { |
| "epoch": 2.633711507293355, |
| "grad_norm": 0.4449451095066097, |
| "learning_rate": 8.988777640496804e-07, |
| "loss": 0.2461, |
| "step": 3250 |
| }, |
| { |
| "epoch": 2.641815235008104, |
| "grad_norm": 0.3960001138164595, |
| "learning_rate": 8.602030769593361e-07, |
| "loss": 0.2439, |
| "step": 3260 |
| }, |
| { |
| "epoch": 2.6499189627228525, |
| "grad_norm": 0.36284505963713964, |
| "learning_rate": 8.223413772544753e-07, |
| "loss": 0.2469, |
| "step": 3270 |
| }, |
| { |
| "epoch": 2.658022690437601, |
| "grad_norm": 0.641229195521172, |
| "learning_rate": 7.8529603274343e-07, |
| "loss": 0.247, |
| "step": 3280 |
| }, |
| { |
| "epoch": 2.6661264181523503, |
| "grad_norm": 0.40246844644814356, |
| "learning_rate": 7.490703386195209e-07, |
| "loss": 0.2443, |
| "step": 3290 |
| }, |
| { |
| "epoch": 2.674230145867099, |
| "grad_norm": 0.40985271275906787, |
| "learning_rate": 7.136675171679486e-07, |
| "loss": 0.2437, |
| "step": 3300 |
| }, |
| { |
| "epoch": 2.6823338735818476, |
| "grad_norm": 0.42188562574437993, |
| "learning_rate": 6.790907174791673e-07, |
| "loss": 0.2451, |
| "step": 3310 |
| }, |
| { |
| "epoch": 2.6904376012965967, |
| "grad_norm": 0.3604413570140741, |
| "learning_rate": 6.453430151687778e-07, |
| "loss": 0.2388, |
| "step": 3320 |
| }, |
| { |
| "epoch": 2.6985413290113454, |
| "grad_norm": 0.4371459491765129, |
| "learning_rate": 6.124274121039409e-07, |
| "loss": 0.2416, |
| "step": 3330 |
| }, |
| { |
| "epoch": 2.706645056726094, |
| "grad_norm": 0.41136837236659024, |
| "learning_rate": 5.803468361363718e-07, |
| "loss": 0.2391, |
| "step": 3340 |
| }, |
| { |
| "epoch": 2.7147487844408427, |
| "grad_norm": 0.38299317915180126, |
| "learning_rate": 5.491041408418973e-07, |
| "loss": 0.2386, |
| "step": 3350 |
| }, |
| { |
| "epoch": 2.7228525121555913, |
| "grad_norm": 0.38243580422203033, |
| "learning_rate": 5.187021052666364e-07, |
| "loss": 0.2457, |
| "step": 3360 |
| }, |
| { |
| "epoch": 2.7309562398703404, |
| "grad_norm": 0.3388732073634393, |
| "learning_rate": 4.891434336797995e-07, |
| "loss": 0.2398, |
| "step": 3370 |
| }, |
| { |
| "epoch": 2.739059967585089, |
| "grad_norm": 0.41255384318125127, |
| "learning_rate": 4.604307553331466e-07, |
| "loss": 0.2416, |
| "step": 3380 |
| }, |
| { |
| "epoch": 2.7471636952998377, |
| "grad_norm": 0.40148897889806623, |
| "learning_rate": 4.3256662422711203e-07, |
| "loss": 0.2452, |
| "step": 3390 |
| }, |
| { |
| "epoch": 2.755267423014587, |
| "grad_norm": 0.36614628711673536, |
| "learning_rate": 4.0555351888362126e-07, |
| "loss": 0.2379, |
| "step": 3400 |
| }, |
| { |
| "epoch": 2.7633711507293355, |
| "grad_norm": 0.44071636359358657, |
| "learning_rate": 3.793938421256349e-07, |
| "loss": 0.2432, |
| "step": 3410 |
| }, |
| { |
| "epoch": 2.771474878444084, |
| "grad_norm": 0.4260236376544698, |
| "learning_rate": 3.5408992086340963e-07, |
| "loss": 0.248, |
| "step": 3420 |
| }, |
| { |
| "epoch": 2.7795786061588332, |
| "grad_norm": 0.40074452539447364, |
| "learning_rate": 3.296440058875239e-07, |
| "loss": 0.2461, |
| "step": 3430 |
| }, |
| { |
| "epoch": 2.787682333873582, |
| "grad_norm": 0.5015009259573896, |
| "learning_rate": 3.060582716686633e-07, |
| "loss": 0.2448, |
| "step": 3440 |
| }, |
| { |
| "epoch": 2.7957860615883305, |
| "grad_norm": 0.38220831512027065, |
| "learning_rate": 2.833348161642091e-07, |
| "loss": 0.2464, |
| "step": 3450 |
| }, |
| { |
| "epoch": 2.8038897893030796, |
| "grad_norm": 0.43496157901456367, |
| "learning_rate": 2.61475660631616e-07, |
| "loss": 0.2458, |
| "step": 3460 |
| }, |
| { |
| "epoch": 2.8119935170178283, |
| "grad_norm": 0.4000598346197503, |
| "learning_rate": 2.4048274944862837e-07, |
| "loss": 0.2468, |
| "step": 3470 |
| }, |
| { |
| "epoch": 2.820097244732577, |
| "grad_norm": 0.6792927974295451, |
| "learning_rate": 2.2035794994031856e-07, |
| "loss": 0.2451, |
| "step": 3480 |
| }, |
| { |
| "epoch": 2.828200972447326, |
| "grad_norm": 0.45063350341612657, |
| "learning_rate": 2.0110305221299641e-07, |
| "loss": 0.246, |
| "step": 3490 |
| }, |
| { |
| "epoch": 2.8363047001620747, |
| "grad_norm": 0.7418067463524373, |
| "learning_rate": 1.8271976899497555e-07, |
| "loss": 0.2482, |
| "step": 3500 |
| }, |
| { |
| "epoch": 2.8444084278768234, |
| "grad_norm": 0.7361414726016929, |
| "learning_rate": 1.6520973548422526e-07, |
| "loss": 0.2427, |
| "step": 3510 |
| }, |
| { |
| "epoch": 2.852512155591572, |
| "grad_norm": 0.40423085958924226, |
| "learning_rate": 1.485745092029145e-07, |
| "loss": 0.2452, |
| "step": 3520 |
| }, |
| { |
| "epoch": 2.8606158833063207, |
| "grad_norm": 0.46302346771686664, |
| "learning_rate": 1.328155698588829e-07, |
| "loss": 0.2412, |
| "step": 3530 |
| }, |
| { |
| "epoch": 2.8687196110210698, |
| "grad_norm": 0.3730322605924023, |
| "learning_rate": 1.1793431921400589e-07, |
| "loss": 0.2397, |
| "step": 3540 |
| }, |
| { |
| "epoch": 2.8768233387358184, |
| "grad_norm": 0.40843157463877305, |
| "learning_rate": 1.0393208095951569e-07, |
| "loss": 0.2442, |
| "step": 3550 |
| }, |
| { |
| "epoch": 2.884927066450567, |
| "grad_norm": 0.37914035318421063, |
| "learning_rate": 9.081010059825868e-08, |
| "loss": 0.2444, |
| "step": 3560 |
| }, |
| { |
| "epoch": 2.893030794165316, |
| "grad_norm": 0.44651233685847325, |
| "learning_rate": 7.856954533390305e-08, |
| "loss": 0.2418, |
| "step": 3570 |
| }, |
| { |
| "epoch": 2.901134521880065, |
| "grad_norm": 0.41695042377736163, |
| "learning_rate": 6.721150396712062e-08, |
| "loss": 0.2412, |
| "step": 3580 |
| }, |
| { |
| "epoch": 2.9092382495948135, |
| "grad_norm": 0.45904555193175056, |
| "learning_rate": 5.67369867987344e-08, |
| "loss": 0.2423, |
| "step": 3590 |
| }, |
| { |
| "epoch": 2.9173419773095626, |
| "grad_norm": 0.49181189818046256, |
| "learning_rate": 4.7146925539852675e-08, |
| "loss": 0.2427, |
| "step": 3600 |
| }, |
| { |
| "epoch": 2.9254457050243112, |
| "grad_norm": 0.435555071023551, |
| "learning_rate": 3.8442173228994126e-08, |
| "loss": 0.2515, |
| "step": 3610 |
| }, |
| { |
| "epoch": 2.93354943273906, |
| "grad_norm": 0.3713980339480842, |
| "learning_rate": 3.0623504156210846e-08, |
| "loss": 0.2412, |
| "step": 3620 |
| }, |
| { |
| "epoch": 2.941653160453809, |
| "grad_norm": 0.5531175004210699, |
| "learning_rate": 2.3691613794210034e-08, |
| "loss": 0.2444, |
| "step": 3630 |
| }, |
| { |
| "epoch": 2.9497568881685576, |
| "grad_norm": 0.3607226741593471, |
| "learning_rate": 1.7647118736499046e-08, |
| "loss": 0.2432, |
| "step": 3640 |
| }, |
| { |
| "epoch": 2.9578606158833063, |
| "grad_norm": 0.3854207792925584, |
| "learning_rate": 1.2490556642530404e-08, |
| "loss": 0.2383, |
| "step": 3650 |
| }, |
| { |
| "epoch": 2.965964343598055, |
| "grad_norm": 0.41882584574098985, |
| "learning_rate": 8.222386189886688e-09, |
| "loss": 0.2369, |
| "step": 3660 |
| }, |
| { |
| "epoch": 2.974068071312804, |
| "grad_norm": 0.39371013246278497, |
| "learning_rate": 4.84298703347319e-09, |
| "loss": 0.2437, |
| "step": 3670 |
| }, |
| { |
| "epoch": 2.9821717990275527, |
| "grad_norm": 0.415413442459999, |
| "learning_rate": 2.3526597717482558e-09, |
| "loss": 0.248, |
| "step": 3680 |
| }, |
| { |
| "epoch": 2.9902755267423013, |
| "grad_norm": 0.4255278524944212, |
| "learning_rate": 7.516259199913389e-10, |
| "loss": 0.2427, |
| "step": 3690 |
| }, |
| { |
| "epoch": 2.99837925445705, |
| "grad_norm": 0.39207320692233005, |
| "learning_rate": 4.002789059098788e-11, |
| "loss": 0.2467, |
| "step": 3700 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 3702, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 10000000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7096684954255360.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|