Invalid JSON:
Unexpected token 'I', ..."ad_norm": Infinity,
"... is not valid JSON
| { | |
| "best_global_step": 11148, | |
| "best_metric": 1.9240386486053467, | |
| "best_model_checkpoint": "./best_mcqa_model/checkpoint-11148", | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 11148, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0001794043774668102, | |
| "grad_norm": Infinity, | |
| "learning_rate": 5e-05, | |
| "loss": 2.0537, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.001794043774668102, | |
| "grad_norm": 50.121803283691406, | |
| "learning_rate": 4.9994617868676e-05, | |
| "loss": 2.3674, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.003588087549336204, | |
| "grad_norm": 25.215618133544922, | |
| "learning_rate": 4.997667743092932e-05, | |
| "loss": 2.2324, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.005382131324004306, | |
| "grad_norm": 17.250703811645508, | |
| "learning_rate": 4.995873699318264e-05, | |
| "loss": 2.1936, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.007176175098672408, | |
| "grad_norm": 16.687049865722656, | |
| "learning_rate": 4.994079655543596e-05, | |
| "loss": 2.1778, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.00897021887334051, | |
| "grad_norm": 18.19718360900879, | |
| "learning_rate": 4.9922856117689274e-05, | |
| "loss": 1.9997, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.010764262648008612, | |
| "grad_norm": 18.301307678222656, | |
| "learning_rate": 4.990491567994259e-05, | |
| "loss": 2.1654, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.012558306422676713, | |
| "grad_norm": 18.938064575195312, | |
| "learning_rate": 4.988697524219591e-05, | |
| "loss": 2.1311, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.014352350197344816, | |
| "grad_norm": 24.058290481567383, | |
| "learning_rate": 4.986903480444923e-05, | |
| "loss": 2.1101, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.016146393972012917, | |
| "grad_norm": 20.780832290649414, | |
| "learning_rate": 4.985109436670255e-05, | |
| "loss": 2.043, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.01794043774668102, | |
| "grad_norm": 18.546377182006836, | |
| "learning_rate": 4.983315392895587e-05, | |
| "loss": 2.1232, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.019734481521349122, | |
| "grad_norm": 15.411616325378418, | |
| "learning_rate": 4.981521349120919e-05, | |
| "loss": 2.1469, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.021528525296017224, | |
| "grad_norm": 20.646089553833008, | |
| "learning_rate": 4.979727305346251e-05, | |
| "loss": 2.2586, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.023322569070685324, | |
| "grad_norm": 17.679092407226562, | |
| "learning_rate": 4.9779332615715826e-05, | |
| "loss": 2.0833, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.025116612845353426, | |
| "grad_norm": 16.85576629638672, | |
| "learning_rate": 4.976139217796914e-05, | |
| "loss": 2.0967, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02691065662002153, | |
| "grad_norm": 13.570154190063477, | |
| "learning_rate": 4.974345174022246e-05, | |
| "loss": 2.1794, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.02870470039468963, | |
| "grad_norm": 20.535985946655273, | |
| "learning_rate": 4.972551130247578e-05, | |
| "loss": 2.1686, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.030498744169357734, | |
| "grad_norm": 16.294347763061523, | |
| "learning_rate": 4.97075708647291e-05, | |
| "loss": 2.0392, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03229278794402583, | |
| "grad_norm": 12.76968765258789, | |
| "learning_rate": 4.968963042698242e-05, | |
| "loss": 2.1039, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.034086831718693936, | |
| "grad_norm": 14.646772384643555, | |
| "learning_rate": 4.967168998923574e-05, | |
| "loss": 2.0321, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.03588087549336204, | |
| "grad_norm": 13.305708885192871, | |
| "learning_rate": 4.965374955148906e-05, | |
| "loss": 2.1168, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.03767491926803014, | |
| "grad_norm": 14.862943649291992, | |
| "learning_rate": 4.963580911374238e-05, | |
| "loss": 2.0901, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.039468963042698244, | |
| "grad_norm": 15.443259239196777, | |
| "learning_rate": 4.96178686759957e-05, | |
| "loss": 2.0921, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.041263006817366346, | |
| "grad_norm": 15.883851051330566, | |
| "learning_rate": 4.959992823824902e-05, | |
| "loss": 2.2321, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04305705059203445, | |
| "grad_norm": 12.791455268859863, | |
| "learning_rate": 4.9581987800502336e-05, | |
| "loss": 2.0071, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.044851094366702544, | |
| "grad_norm": 15.706707954406738, | |
| "learning_rate": 4.956404736275565e-05, | |
| "loss": 2.1264, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.04664513814137065, | |
| "grad_norm": 18.551183700561523, | |
| "learning_rate": 4.954610692500897e-05, | |
| "loss": 1.8886, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.04843918191603875, | |
| "grad_norm": 24.170883178710938, | |
| "learning_rate": 4.952816648726229e-05, | |
| "loss": 2.2594, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.05023322569070685, | |
| "grad_norm": 12.178893089294434, | |
| "learning_rate": 4.951022604951561e-05, | |
| "loss": 2.0534, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.052027269465374955, | |
| "grad_norm": 14.537866592407227, | |
| "learning_rate": 4.949228561176893e-05, | |
| "loss": 1.9462, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.05382131324004306, | |
| "grad_norm": 15.807729721069336, | |
| "learning_rate": 4.9474345174022246e-05, | |
| "loss": 2.0243, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.05561535701471116, | |
| "grad_norm": 14.98117446899414, | |
| "learning_rate": 4.945640473627557e-05, | |
| "loss": 2.0688, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.05740940078937926, | |
| "grad_norm": 17.541954040527344, | |
| "learning_rate": 4.943846429852889e-05, | |
| "loss": 1.9925, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.059203444564047365, | |
| "grad_norm": 9.95796012878418, | |
| "learning_rate": 4.9420523860782205e-05, | |
| "loss": 1.9715, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.06099748833871547, | |
| "grad_norm": 12.977546691894531, | |
| "learning_rate": 4.940258342303553e-05, | |
| "loss": 2.0491, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.06279153211338356, | |
| "grad_norm": 15.887458801269531, | |
| "learning_rate": 4.938464298528884e-05, | |
| "loss": 2.1484, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.06458557588805167, | |
| "grad_norm": 11.04765510559082, | |
| "learning_rate": 4.9366702547542157e-05, | |
| "loss": 2.1138, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.06637961966271977, | |
| "grad_norm": 13.182682991027832, | |
| "learning_rate": 4.934876210979548e-05, | |
| "loss": 2.1559, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.06817366343738787, | |
| "grad_norm": 11.596452713012695, | |
| "learning_rate": 4.93308216720488e-05, | |
| "loss": 2.0895, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.06996770721205597, | |
| "grad_norm": 13.062545776367188, | |
| "learning_rate": 4.931288123430212e-05, | |
| "loss": 2.1482, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.07176175098672408, | |
| "grad_norm": 12.839446067810059, | |
| "learning_rate": 4.929494079655544e-05, | |
| "loss": 2.0827, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.07355579476139218, | |
| "grad_norm": 11.77519702911377, | |
| "learning_rate": 4.9277000358808756e-05, | |
| "loss": 2.1411, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.07534983853606028, | |
| "grad_norm": 10.112687110900879, | |
| "learning_rate": 4.925905992106208e-05, | |
| "loss": 2.0311, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.07714388231072838, | |
| "grad_norm": 13.721075057983398, | |
| "learning_rate": 4.92411194833154e-05, | |
| "loss": 1.9533, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.07893792608539649, | |
| "grad_norm": 13.185443878173828, | |
| "learning_rate": 4.9223179045568715e-05, | |
| "loss": 2.1598, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.08073196986006459, | |
| "grad_norm": 11.687894821166992, | |
| "learning_rate": 4.920523860782203e-05, | |
| "loss": 2.2673, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.08252601363473269, | |
| "grad_norm": 17.916851043701172, | |
| "learning_rate": 4.918729817007535e-05, | |
| "loss": 2.1737, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.0843200574094008, | |
| "grad_norm": 11.431639671325684, | |
| "learning_rate": 4.916935773232867e-05, | |
| "loss": 2.1655, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.0861141011840689, | |
| "grad_norm": 14.9763822555542, | |
| "learning_rate": 4.915141729458199e-05, | |
| "loss": 2.0351, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.087908144958737, | |
| "grad_norm": 12.372869491577148, | |
| "learning_rate": 4.913347685683531e-05, | |
| "loss": 2.2227, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08970218873340509, | |
| "grad_norm": 11.000882148742676, | |
| "learning_rate": 4.9115536419088625e-05, | |
| "loss": 2.0837, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.09149623250807319, | |
| "grad_norm": 22.3309326171875, | |
| "learning_rate": 4.909759598134195e-05, | |
| "loss": 2.1196, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.0932902762827413, | |
| "grad_norm": 15.499113082885742, | |
| "learning_rate": 4.9079655543595267e-05, | |
| "loss": 2.2164, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.0950843200574094, | |
| "grad_norm": 9.863581657409668, | |
| "learning_rate": 4.9061715105848584e-05, | |
| "loss": 2.1818, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.0968783638320775, | |
| "grad_norm": 14.396673202514648, | |
| "learning_rate": 4.904377466810191e-05, | |
| "loss": 2.0059, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.0986724076067456, | |
| "grad_norm": 10.207282066345215, | |
| "learning_rate": 4.9025834230355225e-05, | |
| "loss": 2.1265, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.1004664513814137, | |
| "grad_norm": 11.846447944641113, | |
| "learning_rate": 4.900789379260854e-05, | |
| "loss": 2.2348, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.10226049515608181, | |
| "grad_norm": 15.00061321258545, | |
| "learning_rate": 4.898995335486186e-05, | |
| "loss": 2.0822, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.10405453893074991, | |
| "grad_norm": 19.457279205322266, | |
| "learning_rate": 4.897201291711518e-05, | |
| "loss": 2.1838, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.10584858270541801, | |
| "grad_norm": 9.659820556640625, | |
| "learning_rate": 4.89540724793685e-05, | |
| "loss": 1.9626, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.10764262648008611, | |
| "grad_norm": 14.713663101196289, | |
| "learning_rate": 4.893613204162182e-05, | |
| "loss": 2.2248, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.10943667025475422, | |
| "grad_norm": 10.287516593933105, | |
| "learning_rate": 4.8918191603875135e-05, | |
| "loss": 2.1525, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.11123071402942232, | |
| "grad_norm": 10.284120559692383, | |
| "learning_rate": 4.890025116612846e-05, | |
| "loss": 2.1112, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.11302475780409042, | |
| "grad_norm": 9.741727828979492, | |
| "learning_rate": 4.888231072838178e-05, | |
| "loss": 2.0442, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.11481880157875853, | |
| "grad_norm": 8.978920936584473, | |
| "learning_rate": 4.8864370290635094e-05, | |
| "loss": 2.0471, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.11661284535342663, | |
| "grad_norm": 12.450641632080078, | |
| "learning_rate": 4.884642985288841e-05, | |
| "loss": 2.1083, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.11840688912809473, | |
| "grad_norm": 11.721051216125488, | |
| "learning_rate": 4.882848941514173e-05, | |
| "loss": 2.2182, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.12020093290276283, | |
| "grad_norm": 9.241230964660645, | |
| "learning_rate": 4.8810548977395046e-05, | |
| "loss": 2.1022, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.12199497667743094, | |
| "grad_norm": 9.87799072265625, | |
| "learning_rate": 4.879260853964837e-05, | |
| "loss": 1.9777, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.12378902045209902, | |
| "grad_norm": 13.317924499511719, | |
| "learning_rate": 4.877466810190169e-05, | |
| "loss": 2.0508, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.12558306422676713, | |
| "grad_norm": 11.3270845413208, | |
| "learning_rate": 4.8756727664155004e-05, | |
| "loss": 2.0391, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.12737710800143523, | |
| "grad_norm": 10.925955772399902, | |
| "learning_rate": 4.873878722640833e-05, | |
| "loss": 2.1051, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.12917115177610333, | |
| "grad_norm": 11.26830768585205, | |
| "learning_rate": 4.8720846788661646e-05, | |
| "loss": 1.994, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.13096519555077143, | |
| "grad_norm": 16.53739356994629, | |
| "learning_rate": 4.870290635091496e-05, | |
| "loss": 2.1491, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.13275923932543954, | |
| "grad_norm": 11.774067878723145, | |
| "learning_rate": 4.868496591316829e-05, | |
| "loss": 2.1163, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.13455328310010764, | |
| "grad_norm": 11.250940322875977, | |
| "learning_rate": 4.8667025475421604e-05, | |
| "loss": 2.1339, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.13634732687477574, | |
| "grad_norm": 12.242491722106934, | |
| "learning_rate": 4.864908503767492e-05, | |
| "loss": 1.8935, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.13814137064944385, | |
| "grad_norm": 9.951498985290527, | |
| "learning_rate": 4.863114459992824e-05, | |
| "loss": 1.9869, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.13993541442411195, | |
| "grad_norm": 8.47032356262207, | |
| "learning_rate": 4.8613204162181556e-05, | |
| "loss": 2.008, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.14172945819878005, | |
| "grad_norm": 10.949905395507812, | |
| "learning_rate": 4.859526372443488e-05, | |
| "loss": 2.1019, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.14352350197344815, | |
| "grad_norm": 11.062277793884277, | |
| "learning_rate": 4.85773232866882e-05, | |
| "loss": 2.1903, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.14531754574811626, | |
| "grad_norm": 16.415149688720703, | |
| "learning_rate": 4.8559382848941514e-05, | |
| "loss": 2.1214, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.14711158952278436, | |
| "grad_norm": 10.11135196685791, | |
| "learning_rate": 4.854144241119484e-05, | |
| "loss": 1.8267, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.14890563329745246, | |
| "grad_norm": 10.645978927612305, | |
| "learning_rate": 4.8523501973448156e-05, | |
| "loss": 2.0422, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.15069967707212056, | |
| "grad_norm": 8.619297981262207, | |
| "learning_rate": 4.850556153570147e-05, | |
| "loss": 2.2584, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.15249372084678867, | |
| "grad_norm": 9.050743103027344, | |
| "learning_rate": 4.84876210979548e-05, | |
| "loss": 2.0256, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.15428776462145677, | |
| "grad_norm": 10.283220291137695, | |
| "learning_rate": 4.8469680660208114e-05, | |
| "loss": 2.0351, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.15608180839612487, | |
| "grad_norm": 10.877766609191895, | |
| "learning_rate": 4.8451740222461425e-05, | |
| "loss": 2.0931, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.15787585217079297, | |
| "grad_norm": 13.12762451171875, | |
| "learning_rate": 4.843379978471475e-05, | |
| "loss": 2.1018, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.15966989594546108, | |
| "grad_norm": 9.463711738586426, | |
| "learning_rate": 4.8415859346968066e-05, | |
| "loss": 2.0868, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.16146393972012918, | |
| "grad_norm": 7.113370418548584, | |
| "learning_rate": 4.839791890922138e-05, | |
| "loss": 2.0611, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.16325798349479728, | |
| "grad_norm": 11.066067695617676, | |
| "learning_rate": 4.837997847147471e-05, | |
| "loss": 2.0621, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.16505202726946538, | |
| "grad_norm": 12.794750213623047, | |
| "learning_rate": 4.8362038033728025e-05, | |
| "loss": 2.0361, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.1668460710441335, | |
| "grad_norm": 10.758418083190918, | |
| "learning_rate": 4.834409759598135e-05, | |
| "loss": 1.9764, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.1686401148188016, | |
| "grad_norm": 13.04681396484375, | |
| "learning_rate": 4.8326157158234666e-05, | |
| "loss": 2.0678, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.1704341585934697, | |
| "grad_norm": 12.170126914978027, | |
| "learning_rate": 4.830821672048798e-05, | |
| "loss": 2.1812, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.1722282023681378, | |
| "grad_norm": 9.722840309143066, | |
| "learning_rate": 4.82902762827413e-05, | |
| "loss": 2.0603, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1740222461428059, | |
| "grad_norm": 11.84780216217041, | |
| "learning_rate": 4.827233584499462e-05, | |
| "loss": 1.9741, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.175816289917474, | |
| "grad_norm": 11.813640594482422, | |
| "learning_rate": 4.8254395407247935e-05, | |
| "loss": 2.0533, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.17761033369214208, | |
| "grad_norm": 14.080793380737305, | |
| "learning_rate": 4.823645496950126e-05, | |
| "loss": 2.0094, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.17940437746681018, | |
| "grad_norm": 10.01248550415039, | |
| "learning_rate": 4.8218514531754576e-05, | |
| "loss": 2.1019, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.18119842124147828, | |
| "grad_norm": 10.648508071899414, | |
| "learning_rate": 4.8200574094007894e-05, | |
| "loss": 2.018, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.18299246501614638, | |
| "grad_norm": 12.019718170166016, | |
| "learning_rate": 4.818263365626122e-05, | |
| "loss": 1.9094, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.18478650879081449, | |
| "grad_norm": 8.853078842163086, | |
| "learning_rate": 4.8164693218514535e-05, | |
| "loss": 2.1722, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.1865805525654826, | |
| "grad_norm": 8.063078880310059, | |
| "learning_rate": 4.814675278076785e-05, | |
| "loss": 2.0377, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.1883745963401507, | |
| "grad_norm": 9.952075958251953, | |
| "learning_rate": 4.8128812343021176e-05, | |
| "loss": 2.1291, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.1901686401148188, | |
| "grad_norm": 10.393296241760254, | |
| "learning_rate": 4.811087190527449e-05, | |
| "loss": 2.0707, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.1919626838894869, | |
| "grad_norm": 10.250103950500488, | |
| "learning_rate": 4.809293146752781e-05, | |
| "loss": 2.0411, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.193756727664155, | |
| "grad_norm": 10.035394668579102, | |
| "learning_rate": 4.807499102978113e-05, | |
| "loss": 1.9696, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1955507714388231, | |
| "grad_norm": 12.22006893157959, | |
| "learning_rate": 4.8057050592034445e-05, | |
| "loss": 2.0681, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.1973448152134912, | |
| "grad_norm": 9.505834579467773, | |
| "learning_rate": 4.803911015428776e-05, | |
| "loss": 2.0469, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.1991388589881593, | |
| "grad_norm": 8.87895393371582, | |
| "learning_rate": 4.8021169716541086e-05, | |
| "loss": 2.1125, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2009329027628274, | |
| "grad_norm": 8.545978546142578, | |
| "learning_rate": 4.8003229278794404e-05, | |
| "loss": 2.008, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2027269465374955, | |
| "grad_norm": 9.056685447692871, | |
| "learning_rate": 4.798528884104773e-05, | |
| "loss": 1.9602, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.20452099031216361, | |
| "grad_norm": 15.332698822021484, | |
| "learning_rate": 4.7967348403301045e-05, | |
| "loss": 1.8802, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.20631503408683172, | |
| "grad_norm": 12.204120635986328, | |
| "learning_rate": 4.794940796555436e-05, | |
| "loss": 1.9994, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.20810907786149982, | |
| "grad_norm": 10.399602890014648, | |
| "learning_rate": 4.7931467527807686e-05, | |
| "loss": 2.1549, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.20990312163616792, | |
| "grad_norm": 9.073091506958008, | |
| "learning_rate": 4.7913527090061e-05, | |
| "loss": 2.1483, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.21169716541083602, | |
| "grad_norm": 11.733457565307617, | |
| "learning_rate": 4.7895586652314314e-05, | |
| "loss": 2.1155, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.21349120918550413, | |
| "grad_norm": 7.083014011383057, | |
| "learning_rate": 4.787764621456764e-05, | |
| "loss": 2.0738, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.21528525296017223, | |
| "grad_norm": 11.031323432922363, | |
| "learning_rate": 4.7859705776820955e-05, | |
| "loss": 2.1221, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.21707929673484033, | |
| "grad_norm": 9.98110580444336, | |
| "learning_rate": 4.784176533907427e-05, | |
| "loss": 2.0356, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.21887334050950844, | |
| "grad_norm": 13.873873710632324, | |
| "learning_rate": 4.78238249013276e-05, | |
| "loss": 2.0523, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.22066738428417654, | |
| "grad_norm": 10.365226745605469, | |
| "learning_rate": 4.7805884463580914e-05, | |
| "loss": 2.0989, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.22246142805884464, | |
| "grad_norm": 9.975032806396484, | |
| "learning_rate": 4.778794402583423e-05, | |
| "loss": 2.0278, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.22425547183351274, | |
| "grad_norm": 10.711677551269531, | |
| "learning_rate": 4.7770003588087555e-05, | |
| "loss": 2.1347, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.22604951560818085, | |
| "grad_norm": 8.741484642028809, | |
| "learning_rate": 4.775206315034087e-05, | |
| "loss": 2.0043, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.22784355938284895, | |
| "grad_norm": 11.853199005126953, | |
| "learning_rate": 4.773412271259419e-05, | |
| "loss": 2.1624, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.22963760315751705, | |
| "grad_norm": 8.458366394042969, | |
| "learning_rate": 4.771618227484751e-05, | |
| "loss": 2.0635, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.23143164693218515, | |
| "grad_norm": 9.640998840332031, | |
| "learning_rate": 4.7698241837100824e-05, | |
| "loss": 1.9351, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.23322569070685326, | |
| "grad_norm": 11.35208511352539, | |
| "learning_rate": 4.768030139935415e-05, | |
| "loss": 2.0016, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.23501973448152136, | |
| "grad_norm": 12.047490119934082, | |
| "learning_rate": 4.7662360961607465e-05, | |
| "loss": 1.9982, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.23681377825618946, | |
| "grad_norm": 7.340638637542725, | |
| "learning_rate": 4.764442052386078e-05, | |
| "loss": 1.9317, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.23860782203085756, | |
| "grad_norm": 9.259571075439453, | |
| "learning_rate": 4.762648008611411e-05, | |
| "loss": 1.867, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.24040186580552567, | |
| "grad_norm": 10.415928840637207, | |
| "learning_rate": 4.7608539648367424e-05, | |
| "loss": 1.8926, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.24219590958019377, | |
| "grad_norm": 10.523111343383789, | |
| "learning_rate": 4.759059921062074e-05, | |
| "loss": 1.9432, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.24398995335486187, | |
| "grad_norm": 10.31205940246582, | |
| "learning_rate": 4.7572658772874065e-05, | |
| "loss": 2.0679, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.24578399712952997, | |
| "grad_norm": 11.592795372009277, | |
| "learning_rate": 4.755471833512738e-05, | |
| "loss": 1.8786, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.24757804090419805, | |
| "grad_norm": 13.131718635559082, | |
| "learning_rate": 4.75367778973807e-05, | |
| "loss": 2.1331, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.24937208467886615, | |
| "grad_norm": 8.448001861572266, | |
| "learning_rate": 4.751883745963402e-05, | |
| "loss": 2.2519, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.25116612845353425, | |
| "grad_norm": 9.400849342346191, | |
| "learning_rate": 4.7500897021887334e-05, | |
| "loss": 2.162, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2529601722282024, | |
| "grad_norm": 9.269533157348633, | |
| "learning_rate": 4.748295658414065e-05, | |
| "loss": 2.0009, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.25475421600287046, | |
| "grad_norm": 12.824959754943848, | |
| "learning_rate": 4.7465016146393976e-05, | |
| "loss": 2.1757, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.2565482597775386, | |
| "grad_norm": 6.765061378479004, | |
| "learning_rate": 4.744707570864729e-05, | |
| "loss": 1.9529, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.25834230355220666, | |
| "grad_norm": 8.876534461975098, | |
| "learning_rate": 4.742913527090061e-05, | |
| "loss": 1.9964, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.2601363473268748, | |
| "grad_norm": 13.438302040100098, | |
| "learning_rate": 4.7411194833153934e-05, | |
| "loss": 2.0535, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.26193039110154287, | |
| "grad_norm": 11.55459213256836, | |
| "learning_rate": 4.739325439540725e-05, | |
| "loss": 2.0917, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.263724434876211, | |
| "grad_norm": 13.218855857849121, | |
| "learning_rate": 4.737531395766057e-05, | |
| "loss": 1.9388, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2655184786508791, | |
| "grad_norm": 8.17874526977539, | |
| "learning_rate": 4.7357373519913886e-05, | |
| "loss": 1.9581, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.2673125224255472, | |
| "grad_norm": 9.68444538116455, | |
| "learning_rate": 4.73394330821672e-05, | |
| "loss": 2.0179, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.2691065662002153, | |
| "grad_norm": 10.616148948669434, | |
| "learning_rate": 4.732149264442053e-05, | |
| "loss": 2.024, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.2709006099748834, | |
| "grad_norm": 11.970083236694336, | |
| "learning_rate": 4.7303552206673845e-05, | |
| "loss": 2.1263, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.2726946537495515, | |
| "grad_norm": 9.173564910888672, | |
| "learning_rate": 4.728561176892716e-05, | |
| "loss": 1.9028, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2744886975242196, | |
| "grad_norm": 12.469501495361328, | |
| "learning_rate": 4.7267671331180486e-05, | |
| "loss": 2.1234, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2762827412988877, | |
| "grad_norm": 7.576513290405273, | |
| "learning_rate": 4.72497308934338e-05, | |
| "loss": 1.9832, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.2780767850735558, | |
| "grad_norm": 7.813937187194824, | |
| "learning_rate": 4.723179045568712e-05, | |
| "loss": 2.0337, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.2798708288482239, | |
| "grad_norm": 8.702999114990234, | |
| "learning_rate": 4.7213850017940444e-05, | |
| "loss": 1.8968, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.281664872622892, | |
| "grad_norm": 9.221305847167969, | |
| "learning_rate": 4.719590958019376e-05, | |
| "loss": 1.8972, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.2834589163975601, | |
| "grad_norm": 7.7957892417907715, | |
| "learning_rate": 4.717796914244708e-05, | |
| "loss": 1.985, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.2852529601722282, | |
| "grad_norm": 15.21849250793457, | |
| "learning_rate": 4.7160028704700396e-05, | |
| "loss": 1.8525, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.2870470039468963, | |
| "grad_norm": 11.758828163146973, | |
| "learning_rate": 4.7142088266953713e-05, | |
| "loss": 2.039, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.2888410477215644, | |
| "grad_norm": 9.403512001037598, | |
| "learning_rate": 4.712414782920703e-05, | |
| "loss": 2.0726, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2906350914962325, | |
| "grad_norm": 10.458137512207031, | |
| "learning_rate": 4.7106207391460355e-05, | |
| "loss": 2.1689, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.2924291352709006, | |
| "grad_norm": 7.665112495422363, | |
| "learning_rate": 4.708826695371367e-05, | |
| "loss": 1.9077, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.2942231790455687, | |
| "grad_norm": 11.904746055603027, | |
| "learning_rate": 4.707032651596699e-05, | |
| "loss": 2.0425, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.2960172228202368, | |
| "grad_norm": 8.795477867126465, | |
| "learning_rate": 4.705238607822031e-05, | |
| "loss": 1.9992, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.2978112665949049, | |
| "grad_norm": 7.0219550132751465, | |
| "learning_rate": 4.703444564047363e-05, | |
| "loss": 1.9143, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.299605310369573, | |
| "grad_norm": 7.745930194854736, | |
| "learning_rate": 4.7016505202726955e-05, | |
| "loss": 1.8667, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.3013993541442411, | |
| "grad_norm": 7.579682350158691, | |
| "learning_rate": 4.699856476498027e-05, | |
| "loss": 2.0123, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.3031933979189092, | |
| "grad_norm": 7.5498199462890625, | |
| "learning_rate": 4.698062432723358e-05, | |
| "loss": 2.1165, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.30498744169357733, | |
| "grad_norm": 8.588129043579102, | |
| "learning_rate": 4.6962683889486906e-05, | |
| "loss": 2.0662, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.3067814854682454, | |
| "grad_norm": 6.789324760437012, | |
| "learning_rate": 4.6944743451740224e-05, | |
| "loss": 1.9604, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.30857552924291354, | |
| "grad_norm": 8.46413803100586, | |
| "learning_rate": 4.692680301399354e-05, | |
| "loss": 2.0078, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.3103695730175816, | |
| "grad_norm": 9.2481689453125, | |
| "learning_rate": 4.6908862576246865e-05, | |
| "loss": 2.0001, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.31216361679224974, | |
| "grad_norm": 9.034040451049805, | |
| "learning_rate": 4.689092213850018e-05, | |
| "loss": 2.1038, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.3139576605669178, | |
| "grad_norm": 9.660133361816406, | |
| "learning_rate": 4.68729817007535e-05, | |
| "loss": 2.0491, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.31575170434158595, | |
| "grad_norm": 8.319841384887695, | |
| "learning_rate": 4.6855041263006823e-05, | |
| "loss": 1.9269, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.317545748116254, | |
| "grad_norm": 12.393325805664062, | |
| "learning_rate": 4.683710082526014e-05, | |
| "loss": 2.0983, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.31933979189092215, | |
| "grad_norm": 9.26393985748291, | |
| "learning_rate": 4.681916038751346e-05, | |
| "loss": 1.8658, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.32113383566559023, | |
| "grad_norm": 10.711555480957031, | |
| "learning_rate": 4.6801219949766775e-05, | |
| "loss": 2.0091, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.32292787944025836, | |
| "grad_norm": 9.118010520935059, | |
| "learning_rate": 4.678327951202009e-05, | |
| "loss": 2.119, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.32472192321492643, | |
| "grad_norm": 10.101414680480957, | |
| "learning_rate": 4.676533907427341e-05, | |
| "loss": 2.1448, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.32651596698959456, | |
| "grad_norm": 11.349935531616211, | |
| "learning_rate": 4.6747398636526734e-05, | |
| "loss": 2.0042, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.32831001076426264, | |
| "grad_norm": 8.576199531555176, | |
| "learning_rate": 4.672945819878005e-05, | |
| "loss": 2.0786, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.33010405453893077, | |
| "grad_norm": 8.958368301391602, | |
| "learning_rate": 4.671151776103337e-05, | |
| "loss": 1.9364, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.33189809831359884, | |
| "grad_norm": 10.209322929382324, | |
| "learning_rate": 4.669357732328669e-05, | |
| "loss": 2.1192, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.333692142088267, | |
| "grad_norm": 10.517390251159668, | |
| "learning_rate": 4.667563688554001e-05, | |
| "loss": 1.8833, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.33548618586293505, | |
| "grad_norm": 11.347796440124512, | |
| "learning_rate": 4.6657696447793334e-05, | |
| "loss": 2.0498, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3372802296376032, | |
| "grad_norm": 9.35456371307373, | |
| "learning_rate": 4.663975601004665e-05, | |
| "loss": 2.0158, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.33907427341227125, | |
| "grad_norm": 7.018476486206055, | |
| "learning_rate": 4.662181557229997e-05, | |
| "loss": 1.9527, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.3408683171869394, | |
| "grad_norm": 13.440537452697754, | |
| "learning_rate": 4.6603875134553285e-05, | |
| "loss": 1.9492, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.34266236096160746, | |
| "grad_norm": 7.040363788604736, | |
| "learning_rate": 4.65859346968066e-05, | |
| "loss": 2.085, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.3444564047362756, | |
| "grad_norm": 8.57967472076416, | |
| "learning_rate": 4.656799425905992e-05, | |
| "loss": 1.9639, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.34625044851094366, | |
| "grad_norm": 7.877899169921875, | |
| "learning_rate": 4.6550053821313244e-05, | |
| "loss": 1.9315, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3480444922856118, | |
| "grad_norm": 10.133997917175293, | |
| "learning_rate": 4.653211338356656e-05, | |
| "loss": 1.9489, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.34983853606027987, | |
| "grad_norm": 10.46199893951416, | |
| "learning_rate": 4.651417294581988e-05, | |
| "loss": 2.1294, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.351632579834948, | |
| "grad_norm": 10.541289329528809, | |
| "learning_rate": 4.64962325080732e-05, | |
| "loss": 2.0334, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.3534266236096161, | |
| "grad_norm": 10.026679039001465, | |
| "learning_rate": 4.647829207032652e-05, | |
| "loss": 1.9669, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.35522066738428415, | |
| "grad_norm": 9.2806978225708, | |
| "learning_rate": 4.646035163257984e-05, | |
| "loss": 2.0065, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.3570147111589523, | |
| "grad_norm": 11.332128524780273, | |
| "learning_rate": 4.6442411194833154e-05, | |
| "loss": 2.1402, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.35880875493362036, | |
| "grad_norm": 9.25808048248291, | |
| "learning_rate": 4.642447075708647e-05, | |
| "loss": 2.1242, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3606027987082885, | |
| "grad_norm": 9.891722679138184, | |
| "learning_rate": 4.640653031933979e-05, | |
| "loss": 2.0954, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.36239684248295656, | |
| "grad_norm": 8.027229309082031, | |
| "learning_rate": 4.638858988159311e-05, | |
| "loss": 1.94, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.3641908862576247, | |
| "grad_norm": 7.155013084411621, | |
| "learning_rate": 4.637064944384643e-05, | |
| "loss": 2.0099, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.36598493003229277, | |
| "grad_norm": 7.851609230041504, | |
| "learning_rate": 4.6352709006099754e-05, | |
| "loss": 2.0819, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.3677789738069609, | |
| "grad_norm": 9.718694686889648, | |
| "learning_rate": 4.633476856835307e-05, | |
| "loss": 2.0265, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.36957301758162897, | |
| "grad_norm": 7.794171333312988, | |
| "learning_rate": 4.631682813060639e-05, | |
| "loss": 1.9589, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.3713670613562971, | |
| "grad_norm": 9.896645545959473, | |
| "learning_rate": 4.629888769285971e-05, | |
| "loss": 1.8671, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.3731611051309652, | |
| "grad_norm": 8.257375717163086, | |
| "learning_rate": 4.628094725511303e-05, | |
| "loss": 2.0167, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.3749551489056333, | |
| "grad_norm": 9.882346153259277, | |
| "learning_rate": 4.626300681736635e-05, | |
| "loss": 2.079, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.3767491926803014, | |
| "grad_norm": 8.422935485839844, | |
| "learning_rate": 4.6245066379619664e-05, | |
| "loss": 1.998, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3785432364549695, | |
| "grad_norm": 9.090377807617188, | |
| "learning_rate": 4.622712594187298e-05, | |
| "loss": 1.8588, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.3803372802296376, | |
| "grad_norm": 9.11992359161377, | |
| "learning_rate": 4.62091855041263e-05, | |
| "loss": 1.9268, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.3821313240043057, | |
| "grad_norm": 7.935147285461426, | |
| "learning_rate": 4.619124506637962e-05, | |
| "loss": 2.1426, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.3839253677789738, | |
| "grad_norm": 6.065644264221191, | |
| "learning_rate": 4.617330462863294e-05, | |
| "loss": 1.9426, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.3857194115536419, | |
| "grad_norm": 7.789048194885254, | |
| "learning_rate": 4.615536419088626e-05, | |
| "loss": 1.9607, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.38751345532831, | |
| "grad_norm": 10.809876441955566, | |
| "learning_rate": 4.613742375313958e-05, | |
| "loss": 2.0248, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3893074991029781, | |
| "grad_norm": 7.871123790740967, | |
| "learning_rate": 4.61194833153929e-05, | |
| "loss": 2.0491, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3911015428776462, | |
| "grad_norm": 10.08164119720459, | |
| "learning_rate": 4.6101542877646216e-05, | |
| "loss": 1.8583, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.39289558665231433, | |
| "grad_norm": 8.326801300048828, | |
| "learning_rate": 4.608360243989954e-05, | |
| "loss": 2.0879, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.3946896304269824, | |
| "grad_norm": 9.958331108093262, | |
| "learning_rate": 4.606566200215286e-05, | |
| "loss": 1.9132, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.39648367420165054, | |
| "grad_norm": 8.36109733581543, | |
| "learning_rate": 4.604772156440617e-05, | |
| "loss": 1.9548, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3982777179763186, | |
| "grad_norm": 8.014139175415039, | |
| "learning_rate": 4.602978112665949e-05, | |
| "loss": 1.8601, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.40007176175098674, | |
| "grad_norm": 8.810429573059082, | |
| "learning_rate": 4.601184068891281e-05, | |
| "loss": 2.0101, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.4018658055256548, | |
| "grad_norm": 9.849298477172852, | |
| "learning_rate": 4.599390025116613e-05, | |
| "loss": 1.9178, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.40365984930032295, | |
| "grad_norm": 7.431755065917969, | |
| "learning_rate": 4.597595981341945e-05, | |
| "loss": 1.9154, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.405453893074991, | |
| "grad_norm": 10.968128204345703, | |
| "learning_rate": 4.595801937567277e-05, | |
| "loss": 2.0084, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.40724793684965915, | |
| "grad_norm": 8.786876678466797, | |
| "learning_rate": 4.594007893792609e-05, | |
| "loss": 2.1216, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.40904198062432723, | |
| "grad_norm": 8.992511749267578, | |
| "learning_rate": 4.592213850017941e-05, | |
| "loss": 1.9462, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.41083602439899536, | |
| "grad_norm": 8.601082801818848, | |
| "learning_rate": 4.5904198062432726e-05, | |
| "loss": 1.988, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.41263006817366343, | |
| "grad_norm": 9.053206443786621, | |
| "learning_rate": 4.5886257624686044e-05, | |
| "loss": 1.8687, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.41442411194833156, | |
| "grad_norm": 10.55015754699707, | |
| "learning_rate": 4.586831718693936e-05, | |
| "loss": 1.9853, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.41621815572299964, | |
| "grad_norm": 7.392729759216309, | |
| "learning_rate": 4.585037674919268e-05, | |
| "loss": 2.0305, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.41801219949766777, | |
| "grad_norm": 8.890578269958496, | |
| "learning_rate": 4.5832436311446e-05, | |
| "loss": 2.1245, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.41980624327233584, | |
| "grad_norm": 8.476337432861328, | |
| "learning_rate": 4.581449587369932e-05, | |
| "loss": 1.8404, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.421600287047004, | |
| "grad_norm": 11.671123504638672, | |
| "learning_rate": 4.5796555435952637e-05, | |
| "loss": 2.0296, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.42339433082167205, | |
| "grad_norm": 9.152827262878418, | |
| "learning_rate": 4.577861499820596e-05, | |
| "loss": 1.9146, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.4251883745963401, | |
| "grad_norm": 7.549522876739502, | |
| "learning_rate": 4.576067456045928e-05, | |
| "loss": 2.0178, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.42698241837100825, | |
| "grad_norm": 11.978499412536621, | |
| "learning_rate": 4.5742734122712595e-05, | |
| "loss": 1.7915, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.42877646214567633, | |
| "grad_norm": 10.303351402282715, | |
| "learning_rate": 4.572479368496592e-05, | |
| "loss": 1.8197, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.43057050592034446, | |
| "grad_norm": 9.132424354553223, | |
| "learning_rate": 4.5706853247219236e-05, | |
| "loss": 2.1076, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.43236454969501253, | |
| "grad_norm": 9.768096923828125, | |
| "learning_rate": 4.5688912809472554e-05, | |
| "loss": 1.8818, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.43415859346968066, | |
| "grad_norm": 7.977086067199707, | |
| "learning_rate": 4.567097237172587e-05, | |
| "loss": 1.9721, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.43595263724434874, | |
| "grad_norm": 10.111530303955078, | |
| "learning_rate": 4.565303193397919e-05, | |
| "loss": 1.9108, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.43774668101901687, | |
| "grad_norm": 8.572367668151855, | |
| "learning_rate": 4.563509149623251e-05, | |
| "loss": 1.8878, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.43954072479368494, | |
| "grad_norm": 15.499300956726074, | |
| "learning_rate": 4.561715105848583e-05, | |
| "loss": 2.0646, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.4413347685683531, | |
| "grad_norm": 8.507719993591309, | |
| "learning_rate": 4.559921062073915e-05, | |
| "loss": 2.0785, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.44312881234302115, | |
| "grad_norm": 8.560482025146484, | |
| "learning_rate": 4.558127018299247e-05, | |
| "loss": 1.919, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.4449228561176893, | |
| "grad_norm": 8.793256759643555, | |
| "learning_rate": 4.556332974524579e-05, | |
| "loss": 2.0275, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.44671689989235736, | |
| "grad_norm": 6.947265625, | |
| "learning_rate": 4.5545389307499105e-05, | |
| "loss": 1.8708, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.4485109436670255, | |
| "grad_norm": 8.579648971557617, | |
| "learning_rate": 4.552744886975242e-05, | |
| "loss": 2.0002, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.45030498744169356, | |
| "grad_norm": 7.623202323913574, | |
| "learning_rate": 4.550950843200574e-05, | |
| "loss": 1.9444, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.4520990312163617, | |
| "grad_norm": 8.441667556762695, | |
| "learning_rate": 4.549156799425906e-05, | |
| "loss": 2.0275, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.45389307499102977, | |
| "grad_norm": 9.18626594543457, | |
| "learning_rate": 4.547362755651238e-05, | |
| "loss": 1.9512, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.4556871187656979, | |
| "grad_norm": 8.11828899383545, | |
| "learning_rate": 4.54556871187657e-05, | |
| "loss": 1.9609, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.45748116254036597, | |
| "grad_norm": 7.3665852546691895, | |
| "learning_rate": 4.5437746681019016e-05, | |
| "loss": 1.9721, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.4592752063150341, | |
| "grad_norm": 8.686524391174316, | |
| "learning_rate": 4.541980624327234e-05, | |
| "loss": 2.0039, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.4610692500897022, | |
| "grad_norm": 8.56877326965332, | |
| "learning_rate": 4.540186580552566e-05, | |
| "loss": 2.0762, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.4628632938643703, | |
| "grad_norm": 9.964433670043945, | |
| "learning_rate": 4.5383925367778974e-05, | |
| "loss": 1.9773, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.4646573376390384, | |
| "grad_norm": 8.93453311920166, | |
| "learning_rate": 4.53659849300323e-05, | |
| "loss": 1.9763, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.4664513814137065, | |
| "grad_norm": 6.911281108856201, | |
| "learning_rate": 4.5348044492285615e-05, | |
| "loss": 1.8979, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4682454251883746, | |
| "grad_norm": 6.9323410987854, | |
| "learning_rate": 4.533010405453893e-05, | |
| "loss": 1.9935, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4700394689630427, | |
| "grad_norm": 7.738839626312256, | |
| "learning_rate": 4.531216361679225e-05, | |
| "loss": 1.9254, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.4718335127377108, | |
| "grad_norm": 6.664251804351807, | |
| "learning_rate": 4.529422317904557e-05, | |
| "loss": 1.9573, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4736275565123789, | |
| "grad_norm": 8.574616432189941, | |
| "learning_rate": 4.527628274129889e-05, | |
| "loss": 1.9251, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.475421600287047, | |
| "grad_norm": 7.2672834396362305, | |
| "learning_rate": 4.525834230355221e-05, | |
| "loss": 1.8782, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.4772156440617151, | |
| "grad_norm": 6.810856342315674, | |
| "learning_rate": 4.5240401865805526e-05, | |
| "loss": 1.9968, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.4790096878363832, | |
| "grad_norm": 16.551050186157227, | |
| "learning_rate": 4.522246142805885e-05, | |
| "loss": 1.9016, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.48080373161105133, | |
| "grad_norm": 9.01229190826416, | |
| "learning_rate": 4.520452099031217e-05, | |
| "loss": 1.9715, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.4825977753857194, | |
| "grad_norm": 9.2802152633667, | |
| "learning_rate": 4.5186580552565484e-05, | |
| "loss": 2.0513, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.48439181916038754, | |
| "grad_norm": 7.945450782775879, | |
| "learning_rate": 4.516864011481881e-05, | |
| "loss": 1.849, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4861858629350556, | |
| "grad_norm": 9.4662446975708, | |
| "learning_rate": 4.5150699677072126e-05, | |
| "loss": 1.9349, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.48797990670972374, | |
| "grad_norm": 7.22721004486084, | |
| "learning_rate": 4.5132759239325436e-05, | |
| "loss": 2.0256, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.4897739504843918, | |
| "grad_norm": 8.505002975463867, | |
| "learning_rate": 4.511481880157876e-05, | |
| "loss": 2.0187, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.49156799425905995, | |
| "grad_norm": 8.256681442260742, | |
| "learning_rate": 4.509687836383208e-05, | |
| "loss": 1.8934, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.493362038033728, | |
| "grad_norm": 13.187761306762695, | |
| "learning_rate": 4.5078937926085395e-05, | |
| "loss": 1.9564, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.4951560818083961, | |
| "grad_norm": 9.577531814575195, | |
| "learning_rate": 4.506099748833872e-05, | |
| "loss": 1.9369, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.49695012558306423, | |
| "grad_norm": 8.296804428100586, | |
| "learning_rate": 4.5043057050592036e-05, | |
| "loss": 2.043, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.4987441693577323, | |
| "grad_norm": 7.145248889923096, | |
| "learning_rate": 4.502511661284536e-05, | |
| "loss": 1.7847, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.5005382131324004, | |
| "grad_norm": 10.56977367401123, | |
| "learning_rate": 4.500717617509868e-05, | |
| "loss": 1.8364, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.5023322569070685, | |
| "grad_norm": 9.34067440032959, | |
| "learning_rate": 4.4989235737351995e-05, | |
| "loss": 1.9133, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.5041263006817366, | |
| "grad_norm": 8.067873001098633, | |
| "learning_rate": 4.497129529960531e-05, | |
| "loss": 1.8836, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.5059203444564048, | |
| "grad_norm": 8.651354789733887, | |
| "learning_rate": 4.495335486185863e-05, | |
| "loss": 1.8979, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.5077143882310728, | |
| "grad_norm": 6.594571590423584, | |
| "learning_rate": 4.4935414424111946e-05, | |
| "loss": 1.9502, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.5095084320057409, | |
| "grad_norm": 10.328874588012695, | |
| "learning_rate": 4.491747398636527e-05, | |
| "loss": 2.0826, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.511302475780409, | |
| "grad_norm": 6.956106185913086, | |
| "learning_rate": 4.489953354861859e-05, | |
| "loss": 1.9671, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.5130965195550772, | |
| "grad_norm": 7.3767313957214355, | |
| "learning_rate": 4.4881593110871905e-05, | |
| "loss": 1.902, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.5148905633297453, | |
| "grad_norm": 11.275064468383789, | |
| "learning_rate": 4.486365267312523e-05, | |
| "loss": 2.0932, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.5166846071044133, | |
| "grad_norm": 7.849559783935547, | |
| "learning_rate": 4.4845712235378546e-05, | |
| "loss": 1.9519, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.5184786508790814, | |
| "grad_norm": 7.759171962738037, | |
| "learning_rate": 4.4827771797631863e-05, | |
| "loss": 1.853, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.5202726946537496, | |
| "grad_norm": 12.3310546875, | |
| "learning_rate": 4.480983135988519e-05, | |
| "loss": 2.017, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.5220667384284177, | |
| "grad_norm": 7.477339267730713, | |
| "learning_rate": 4.4791890922138505e-05, | |
| "loss": 1.8495, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.5238607822030857, | |
| "grad_norm": 8.375692367553711, | |
| "learning_rate": 4.477395048439182e-05, | |
| "loss": 1.9654, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.5256548259777538, | |
| "grad_norm": 7.368261337280273, | |
| "learning_rate": 4.475601004664514e-05, | |
| "loss": 1.8585, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.527448869752422, | |
| "grad_norm": 7.570740699768066, | |
| "learning_rate": 4.4738069608898456e-05, | |
| "loss": 1.965, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.5292429135270901, | |
| "grad_norm": 7.81984281539917, | |
| "learning_rate": 4.4720129171151774e-05, | |
| "loss": 2.0262, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.5310369573017582, | |
| "grad_norm": 7.456051349639893, | |
| "learning_rate": 4.47021887334051e-05, | |
| "loss": 1.8934, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.5328310010764262, | |
| "grad_norm": 8.479120254516602, | |
| "learning_rate": 4.4684248295658415e-05, | |
| "loss": 1.9117, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.5346250448510944, | |
| "grad_norm": 8.73019027709961, | |
| "learning_rate": 4.466630785791174e-05, | |
| "loss": 2.0238, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.5364190886257625, | |
| "grad_norm": 8.31013298034668, | |
| "learning_rate": 4.4648367420165056e-05, | |
| "loss": 1.9404, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.5382131324004306, | |
| "grad_norm": 6.616645812988281, | |
| "learning_rate": 4.4630426982418374e-05, | |
| "loss": 1.9167, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.5400071761750986, | |
| "grad_norm": 7.4681172370910645, | |
| "learning_rate": 4.46124865446717e-05, | |
| "loss": 2.0548, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.5418012199497668, | |
| "grad_norm": 11.563379287719727, | |
| "learning_rate": 4.459454610692501e-05, | |
| "loss": 2.0929, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.5435952637244349, | |
| "grad_norm": 7.389353275299072, | |
| "learning_rate": 4.4576605669178325e-05, | |
| "loss": 1.7729, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.545389307499103, | |
| "grad_norm": 8.560032844543457, | |
| "learning_rate": 4.455866523143165e-05, | |
| "loss": 1.8803, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.547183351273771, | |
| "grad_norm": 8.208505630493164, | |
| "learning_rate": 4.454072479368497e-05, | |
| "loss": 1.8715, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.5489773950484392, | |
| "grad_norm": 8.2820463180542, | |
| "learning_rate": 4.4522784355938284e-05, | |
| "loss": 2.0147, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.5507714388231073, | |
| "grad_norm": 7.4810285568237305, | |
| "learning_rate": 4.450484391819161e-05, | |
| "loss": 1.9301, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.5525654825977754, | |
| "grad_norm": 7.442780017852783, | |
| "learning_rate": 4.4486903480444925e-05, | |
| "loss": 2.0132, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.5543595263724435, | |
| "grad_norm": 7.857019424438477, | |
| "learning_rate": 4.446896304269824e-05, | |
| "loss": 2.1327, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.5561535701471116, | |
| "grad_norm": 8.195714950561523, | |
| "learning_rate": 4.4451022604951567e-05, | |
| "loss": 1.9133, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.5579476139217797, | |
| "grad_norm": 9.424678802490234, | |
| "learning_rate": 4.4433082167204884e-05, | |
| "loss": 1.8079, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.5597416576964478, | |
| "grad_norm": 7.7518768310546875, | |
| "learning_rate": 4.44151417294582e-05, | |
| "loss": 2.0345, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.5615357014711159, | |
| "grad_norm": 8.170982360839844, | |
| "learning_rate": 4.439720129171152e-05, | |
| "loss": 2.089, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.563329745245784, | |
| "grad_norm": 7.443562030792236, | |
| "learning_rate": 4.4379260853964836e-05, | |
| "loss": 1.805, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.5651237890204521, | |
| "grad_norm": 8.59354019165039, | |
| "learning_rate": 4.436132041621816e-05, | |
| "loss": 1.9759, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.5669178327951202, | |
| "grad_norm": 13.471951484680176, | |
| "learning_rate": 4.434337997847148e-05, | |
| "loss": 1.7935, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.5687118765697883, | |
| "grad_norm": 9.660313606262207, | |
| "learning_rate": 4.4325439540724794e-05, | |
| "loss": 1.9807, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.5705059203444564, | |
| "grad_norm": 6.917825222015381, | |
| "learning_rate": 4.430749910297812e-05, | |
| "loss": 2.0491, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.5722999641191245, | |
| "grad_norm": 9.734037399291992, | |
| "learning_rate": 4.4289558665231435e-05, | |
| "loss": 1.8836, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.5740940078937926, | |
| "grad_norm": 7.515590190887451, | |
| "learning_rate": 4.427161822748475e-05, | |
| "loss": 1.9391, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.5758880516684607, | |
| "grad_norm": 7.2530670166015625, | |
| "learning_rate": 4.425367778973808e-05, | |
| "loss": 1.8615, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.5776820954431288, | |
| "grad_norm": 8.29240894317627, | |
| "learning_rate": 4.4235737351991394e-05, | |
| "loss": 2.0954, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.579476139217797, | |
| "grad_norm": 8.519623756408691, | |
| "learning_rate": 4.421779691424471e-05, | |
| "loss": 1.983, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.581270182992465, | |
| "grad_norm": 7.667181015014648, | |
| "learning_rate": 4.419985647649803e-05, | |
| "loss": 1.9309, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.5830642267671331, | |
| "grad_norm": 9.281998634338379, | |
| "learning_rate": 4.4181916038751346e-05, | |
| "loss": 1.9057, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.5848582705418012, | |
| "grad_norm": 7.8026123046875, | |
| "learning_rate": 4.416397560100466e-05, | |
| "loss": 1.8834, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.5866523143164694, | |
| "grad_norm": 7.448451042175293, | |
| "learning_rate": 4.414603516325799e-05, | |
| "loss": 1.8942, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.5884463580911374, | |
| "grad_norm": 8.204652786254883, | |
| "learning_rate": 4.4128094725511304e-05, | |
| "loss": 2.0565, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.5902404018658055, | |
| "grad_norm": 8.598773002624512, | |
| "learning_rate": 4.411015428776462e-05, | |
| "loss": 1.9708, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.5920344456404736, | |
| "grad_norm": 7.167200088500977, | |
| "learning_rate": 4.4092213850017946e-05, | |
| "loss": 1.8534, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.5938284894151418, | |
| "grad_norm": 11.437542915344238, | |
| "learning_rate": 4.407427341227126e-05, | |
| "loss": 2.0349, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.5956225331898098, | |
| "grad_norm": 8.82004451751709, | |
| "learning_rate": 4.405633297452458e-05, | |
| "loss": 1.9427, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.5974165769644779, | |
| "grad_norm": 6.192866802215576, | |
| "learning_rate": 4.40383925367779e-05, | |
| "loss": 1.9727, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.599210620739146, | |
| "grad_norm": 7.089540481567383, | |
| "learning_rate": 4.4020452099031215e-05, | |
| "loss": 1.9497, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.6010046645138142, | |
| "grad_norm": 8.135286331176758, | |
| "learning_rate": 4.400251166128454e-05, | |
| "loss": 1.8405, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.6027987082884823, | |
| "grad_norm": 8.762147903442383, | |
| "learning_rate": 4.3984571223537856e-05, | |
| "loss": 1.9011, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.6045927520631503, | |
| "grad_norm": 9.658001899719238, | |
| "learning_rate": 4.396663078579117e-05, | |
| "loss": 1.8995, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.6063867958378184, | |
| "grad_norm": 7.004101276397705, | |
| "learning_rate": 4.39486903480445e-05, | |
| "loss": 1.8548, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.6081808396124866, | |
| "grad_norm": 7.552975177764893, | |
| "learning_rate": 4.3930749910297814e-05, | |
| "loss": 1.9911, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.6099748833871547, | |
| "grad_norm": 7.0711774826049805, | |
| "learning_rate": 4.391280947255113e-05, | |
| "loss": 1.8611, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.6117689271618227, | |
| "grad_norm": 9.78061580657959, | |
| "learning_rate": 4.3894869034804456e-05, | |
| "loss": 1.9661, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.6135629709364908, | |
| "grad_norm": 8.065385818481445, | |
| "learning_rate": 4.387692859705777e-05, | |
| "loss": 1.975, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.615357014711159, | |
| "grad_norm": 9.28231430053711, | |
| "learning_rate": 4.385898815931109e-05, | |
| "loss": 1.9083, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.6171510584858271, | |
| "grad_norm": 8.710234642028809, | |
| "learning_rate": 4.384104772156441e-05, | |
| "loss": 2.0402, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.6189451022604952, | |
| "grad_norm": 8.679749488830566, | |
| "learning_rate": 4.3823107283817725e-05, | |
| "loss": 1.9162, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.6207391460351632, | |
| "grad_norm": 7.5028228759765625, | |
| "learning_rate": 4.380516684607104e-05, | |
| "loss": 2.0918, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.6225331898098314, | |
| "grad_norm": 8.817152976989746, | |
| "learning_rate": 4.3787226408324366e-05, | |
| "loss": 1.9089, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.6243272335844995, | |
| "grad_norm": 7.720176696777344, | |
| "learning_rate": 4.376928597057768e-05, | |
| "loss": 1.8395, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.6261212773591676, | |
| "grad_norm": 8.804288864135742, | |
| "learning_rate": 4.3751345532831e-05, | |
| "loss": 2.0384, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.6279153211338356, | |
| "grad_norm": 6.383409023284912, | |
| "learning_rate": 4.3733405095084325e-05, | |
| "loss": 1.917, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.6297093649085038, | |
| "grad_norm": 7.6271233558654785, | |
| "learning_rate": 4.371546465733764e-05, | |
| "loss": 1.884, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.6315034086831719, | |
| "grad_norm": 7.319206237792969, | |
| "learning_rate": 4.3697524219590966e-05, | |
| "loss": 2.0081, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.63329745245784, | |
| "grad_norm": 7.624337196350098, | |
| "learning_rate": 4.367958378184428e-05, | |
| "loss": 1.9573, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.635091496232508, | |
| "grad_norm": 8.105642318725586, | |
| "learning_rate": 4.3661643344097594e-05, | |
| "loss": 1.9264, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.6368855400071761, | |
| "grad_norm": 6.624615669250488, | |
| "learning_rate": 4.364370290635092e-05, | |
| "loss": 1.9273, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.6386795837818443, | |
| "grad_norm": 8.527731895446777, | |
| "learning_rate": 4.3625762468604235e-05, | |
| "loss": 1.8837, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.6404736275565124, | |
| "grad_norm": 8.374712944030762, | |
| "learning_rate": 4.360782203085755e-05, | |
| "loss": 1.9182, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.6422676713311805, | |
| "grad_norm": 6.726866245269775, | |
| "learning_rate": 4.3589881593110876e-05, | |
| "loss": 1.7699, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.6440617151058485, | |
| "grad_norm": 10.059771537780762, | |
| "learning_rate": 4.3571941155364194e-05, | |
| "loss": 1.8803, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.6458557588805167, | |
| "grad_norm": 6.3898024559021, | |
| "learning_rate": 4.355400071761751e-05, | |
| "loss": 1.9215, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.6476498026551848, | |
| "grad_norm": 9.93798542022705, | |
| "learning_rate": 4.3536060279870835e-05, | |
| "loss": 1.9015, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.6494438464298529, | |
| "grad_norm": 8.230923652648926, | |
| "learning_rate": 4.351811984212415e-05, | |
| "loss": 1.8842, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.6512378902045209, | |
| "grad_norm": 8.391541481018066, | |
| "learning_rate": 4.350017940437747e-05, | |
| "loss": 2.0254, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.6530319339791891, | |
| "grad_norm": 8.703720092773438, | |
| "learning_rate": 4.3482238966630787e-05, | |
| "loss": 1.9441, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.6548259777538572, | |
| "grad_norm": 8.434385299682617, | |
| "learning_rate": 4.3464298528884104e-05, | |
| "loss": 2.1388, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.6566200215285253, | |
| "grad_norm": 9.658421516418457, | |
| "learning_rate": 4.344635809113742e-05, | |
| "loss": 2.1284, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.6584140653031934, | |
| "grad_norm": 9.099438667297363, | |
| "learning_rate": 4.3428417653390745e-05, | |
| "loss": 1.9191, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.6602081090778615, | |
| "grad_norm": 5.330417633056641, | |
| "learning_rate": 4.341047721564406e-05, | |
| "loss": 1.7558, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.6620021528525296, | |
| "grad_norm": 8.256141662597656, | |
| "learning_rate": 4.339253677789738e-05, | |
| "loss": 1.904, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.6637961966271977, | |
| "grad_norm": 7.978524208068848, | |
| "learning_rate": 4.3374596340150704e-05, | |
| "loss": 1.9706, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.6655902404018658, | |
| "grad_norm": 7.254574298858643, | |
| "learning_rate": 4.335665590240402e-05, | |
| "loss": 1.8038, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.667384284176534, | |
| "grad_norm": 6.119344711303711, | |
| "learning_rate": 4.3338715464657345e-05, | |
| "loss": 2.0935, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.669178327951202, | |
| "grad_norm": 10.237481117248535, | |
| "learning_rate": 4.332077502691066e-05, | |
| "loss": 1.9197, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.6709723717258701, | |
| "grad_norm": 8.238425254821777, | |
| "learning_rate": 4.330283458916398e-05, | |
| "loss": 1.8335, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.6727664155005382, | |
| "grad_norm": 10.77346420288086, | |
| "learning_rate": 4.32848941514173e-05, | |
| "loss": 1.9849, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.6745604592752064, | |
| "grad_norm": 5.878365993499756, | |
| "learning_rate": 4.3266953713670614e-05, | |
| "loss": 1.8184, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.6763545030498744, | |
| "grad_norm": 5.962438583374023, | |
| "learning_rate": 4.324901327592393e-05, | |
| "loss": 1.942, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.6781485468245425, | |
| "grad_norm": 7.118070602416992, | |
| "learning_rate": 4.3231072838177255e-05, | |
| "loss": 1.8888, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.6799425905992106, | |
| "grad_norm": 7.19279146194458, | |
| "learning_rate": 4.321313240043057e-05, | |
| "loss": 1.9875, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.6817366343738788, | |
| "grad_norm": 9.004045486450195, | |
| "learning_rate": 4.319519196268389e-05, | |
| "loss": 1.8713, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.6835306781485468, | |
| "grad_norm": 6.644644737243652, | |
| "learning_rate": 4.3177251524937214e-05, | |
| "loss": 1.8883, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.6853247219232149, | |
| "grad_norm": 7.53091287612915, | |
| "learning_rate": 4.315931108719053e-05, | |
| "loss": 1.9133, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.687118765697883, | |
| "grad_norm": 7.283283710479736, | |
| "learning_rate": 4.314137064944385e-05, | |
| "loss": 1.8786, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.6889128094725512, | |
| "grad_norm": 8.955952644348145, | |
| "learning_rate": 4.3123430211697166e-05, | |
| "loss": 1.9249, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.6907068532472193, | |
| "grad_norm": 6.595334053039551, | |
| "learning_rate": 4.310548977395048e-05, | |
| "loss": 1.8995, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.6925008970218873, | |
| "grad_norm": 6.214903831481934, | |
| "learning_rate": 4.30875493362038e-05, | |
| "loss": 1.8988, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.6942949407965554, | |
| "grad_norm": 5.9941182136535645, | |
| "learning_rate": 4.3069608898457124e-05, | |
| "loss": 1.8008, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.6960889845712236, | |
| "grad_norm": 9.003954887390137, | |
| "learning_rate": 4.305166846071044e-05, | |
| "loss": 2.0354, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.6978830283458917, | |
| "grad_norm": 7.329159259796143, | |
| "learning_rate": 4.303372802296376e-05, | |
| "loss": 2.0086, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.6996770721205597, | |
| "grad_norm": 7.872637748718262, | |
| "learning_rate": 4.301578758521708e-05, | |
| "loss": 1.9595, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.7014711158952278, | |
| "grad_norm": 10.439992904663086, | |
| "learning_rate": 4.29978471474704e-05, | |
| "loss": 1.9187, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.703265159669896, | |
| "grad_norm": 10.078546524047852, | |
| "learning_rate": 4.2979906709723724e-05, | |
| "loss": 1.9979, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.7050592034445641, | |
| "grad_norm": 8.9290771484375, | |
| "learning_rate": 4.296196627197704e-05, | |
| "loss": 1.9665, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.7068532472192322, | |
| "grad_norm": 8.043295860290527, | |
| "learning_rate": 4.294402583423036e-05, | |
| "loss": 2.0803, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.7086472909939002, | |
| "grad_norm": 7.653200149536133, | |
| "learning_rate": 4.2926085396483676e-05, | |
| "loss": 2.1244, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.7104413347685683, | |
| "grad_norm": 12.027050971984863, | |
| "learning_rate": 4.290814495873699e-05, | |
| "loss": 2.0046, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.7122353785432365, | |
| "grad_norm": 6.169637680053711, | |
| "learning_rate": 4.289020452099031e-05, | |
| "loss": 1.9957, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.7140294223179046, | |
| "grad_norm": 8.492105484008789, | |
| "learning_rate": 4.2872264083243634e-05, | |
| "loss": 1.9959, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.7158234660925726, | |
| "grad_norm": 7.080846309661865, | |
| "learning_rate": 4.285432364549695e-05, | |
| "loss": 1.9098, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.7176175098672407, | |
| "grad_norm": 7.8670454025268555, | |
| "learning_rate": 4.283638320775027e-05, | |
| "loss": 1.8557, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.7194115536419089, | |
| "grad_norm": 8.910687446594238, | |
| "learning_rate": 4.281844277000359e-05, | |
| "loss": 2.0108, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.721205597416577, | |
| "grad_norm": 7.866197109222412, | |
| "learning_rate": 4.280050233225691e-05, | |
| "loss": 1.9711, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.722999641191245, | |
| "grad_norm": 7.0763044357299805, | |
| "learning_rate": 4.278256189451023e-05, | |
| "loss": 1.9565, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.7247936849659131, | |
| "grad_norm": 10.510825157165527, | |
| "learning_rate": 4.276462145676355e-05, | |
| "loss": 1.9513, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.7265877287405813, | |
| "grad_norm": 6.813848495483398, | |
| "learning_rate": 4.274668101901687e-05, | |
| "loss": 1.9841, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.7283817725152494, | |
| "grad_norm": 8.661791801452637, | |
| "learning_rate": 4.272874058127018e-05, | |
| "loss": 1.9382, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.7301758162899175, | |
| "grad_norm": 8.576088905334473, | |
| "learning_rate": 4.27108001435235e-05, | |
| "loss": 1.7463, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.7319698600645855, | |
| "grad_norm": 9.310657501220703, | |
| "learning_rate": 4.269285970577682e-05, | |
| "loss": 1.9832, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.7337639038392537, | |
| "grad_norm": 8.533422470092773, | |
| "learning_rate": 4.2674919268030145e-05, | |
| "loss": 2.0184, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.7355579476139218, | |
| "grad_norm": 6.781728267669678, | |
| "learning_rate": 4.265697883028346e-05, | |
| "loss": 1.9384, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.7373519913885899, | |
| "grad_norm": 7.908256530761719, | |
| "learning_rate": 4.263903839253678e-05, | |
| "loss": 1.9602, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.7391460351632579, | |
| "grad_norm": 6.28724479675293, | |
| "learning_rate": 4.26210979547901e-05, | |
| "loss": 2.0066, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.7409400789379261, | |
| "grad_norm": 6.198331832885742, | |
| "learning_rate": 4.260315751704342e-05, | |
| "loss": 2.132, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.7427341227125942, | |
| "grad_norm": 7.816977024078369, | |
| "learning_rate": 4.258521707929674e-05, | |
| "loss": 1.9536, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.7445281664872623, | |
| "grad_norm": 7.433613300323486, | |
| "learning_rate": 4.2567276641550055e-05, | |
| "loss": 1.885, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.7463222102619304, | |
| "grad_norm": 6.046330451965332, | |
| "learning_rate": 4.254933620380337e-05, | |
| "loss": 1.7975, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.7481162540365985, | |
| "grad_norm": 8.465211868286133, | |
| "learning_rate": 4.253139576605669e-05, | |
| "loss": 2.0454, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.7499102978112666, | |
| "grad_norm": 5.859003067016602, | |
| "learning_rate": 4.2513455328310013e-05, | |
| "loss": 2.0586, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.7517043415859347, | |
| "grad_norm": 8.045632362365723, | |
| "learning_rate": 4.249551489056333e-05, | |
| "loss": 1.9465, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.7534983853606028, | |
| "grad_norm": 6.814916133880615, | |
| "learning_rate": 4.247757445281665e-05, | |
| "loss": 1.8779, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.755292429135271, | |
| "grad_norm": 7.628875732421875, | |
| "learning_rate": 4.245963401506997e-05, | |
| "loss": 2.1836, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.757086472909939, | |
| "grad_norm": 6.975657939910889, | |
| "learning_rate": 4.244169357732329e-05, | |
| "loss": 1.7787, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.7588805166846071, | |
| "grad_norm": 7.706836700439453, | |
| "learning_rate": 4.2423753139576606e-05, | |
| "loss": 1.9749, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.7606745604592752, | |
| "grad_norm": 8.721719741821289, | |
| "learning_rate": 4.240581270182993e-05, | |
| "loss": 1.9422, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.7624686042339434, | |
| "grad_norm": 8.833395957946777, | |
| "learning_rate": 4.238787226408325e-05, | |
| "loss": 2.0886, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.7642626480086114, | |
| "grad_norm": 8.308511734008789, | |
| "learning_rate": 4.2369931826336565e-05, | |
| "loss": 1.9319, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.7660566917832795, | |
| "grad_norm": 7.8894476890563965, | |
| "learning_rate": 4.235199138858988e-05, | |
| "loss": 1.9378, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.7678507355579476, | |
| "grad_norm": 6.138456344604492, | |
| "learning_rate": 4.23340509508432e-05, | |
| "loss": 1.9009, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.7696447793326158, | |
| "grad_norm": 7.507815837860107, | |
| "learning_rate": 4.2316110513096524e-05, | |
| "loss": 1.9461, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.7714388231072838, | |
| "grad_norm": 7.388694763183594, | |
| "learning_rate": 4.229817007534984e-05, | |
| "loss": 1.7823, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.7732328668819519, | |
| "grad_norm": 7.483008861541748, | |
| "learning_rate": 4.228022963760316e-05, | |
| "loss": 2.0558, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.77502691065662, | |
| "grad_norm": 8.476717948913574, | |
| "learning_rate": 4.226228919985648e-05, | |
| "loss": 1.97, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.7768209544312881, | |
| "grad_norm": 8.137911796569824, | |
| "learning_rate": 4.22443487621098e-05, | |
| "loss": 2.0543, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.7786149982059563, | |
| "grad_norm": 7.722476482391357, | |
| "learning_rate": 4.222640832436312e-05, | |
| "loss": 1.8836, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.7804090419806243, | |
| "grad_norm": 7.449862003326416, | |
| "learning_rate": 4.220846788661644e-05, | |
| "loss": 1.9243, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.7822030857552924, | |
| "grad_norm": 6.4095563888549805, | |
| "learning_rate": 4.219052744886975e-05, | |
| "loss": 1.8377, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.7839971295299605, | |
| "grad_norm": 6.866125106811523, | |
| "learning_rate": 4.217258701112307e-05, | |
| "loss": 1.8246, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.7857911733046287, | |
| "grad_norm": 9.195448875427246, | |
| "learning_rate": 4.215464657337639e-05, | |
| "loss": 1.8019, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.7875852170792967, | |
| "grad_norm": 6.7557196617126465, | |
| "learning_rate": 4.213670613562971e-05, | |
| "loss": 1.8733, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.7893792608539648, | |
| "grad_norm": 7.832233905792236, | |
| "learning_rate": 4.211876569788303e-05, | |
| "loss": 1.727, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.7911733046286329, | |
| "grad_norm": 6.849626541137695, | |
| "learning_rate": 4.210082526013635e-05, | |
| "loss": 1.8051, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.7929673484033011, | |
| "grad_norm": 7.668883323669434, | |
| "learning_rate": 4.208288482238967e-05, | |
| "loss": 1.9021, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.7947613921779692, | |
| "grad_norm": 6.98665714263916, | |
| "learning_rate": 4.2064944384642986e-05, | |
| "loss": 1.9314, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.7965554359526372, | |
| "grad_norm": 7.1887383460998535, | |
| "learning_rate": 4.204700394689631e-05, | |
| "loss": 1.8046, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.7983494797273053, | |
| "grad_norm": 8.068037033081055, | |
| "learning_rate": 4.202906350914963e-05, | |
| "loss": 2.0018, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.8001435235019735, | |
| "grad_norm": 8.662615776062012, | |
| "learning_rate": 4.2011123071402944e-05, | |
| "loss": 1.9545, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.8019375672766416, | |
| "grad_norm": 6.354881286621094, | |
| "learning_rate": 4.199318263365626e-05, | |
| "loss": 1.699, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.8037316110513096, | |
| "grad_norm": 7.5929059982299805, | |
| "learning_rate": 4.197524219590958e-05, | |
| "loss": 1.7685, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.8055256548259777, | |
| "grad_norm": 7.677204608917236, | |
| "learning_rate": 4.19573017581629e-05, | |
| "loss": 1.876, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.8073196986006459, | |
| "grad_norm": 6.630999565124512, | |
| "learning_rate": 4.193936132041622e-05, | |
| "loss": 1.6986, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.809113742375314, | |
| "grad_norm": 6.430192947387695, | |
| "learning_rate": 4.192142088266954e-05, | |
| "loss": 1.9533, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.810907786149982, | |
| "grad_norm": 8.051733016967773, | |
| "learning_rate": 4.190348044492286e-05, | |
| "loss": 1.8555, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.8127018299246501, | |
| "grad_norm": 10.02955150604248, | |
| "learning_rate": 4.188554000717618e-05, | |
| "loss": 1.8035, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.8144958736993183, | |
| "grad_norm": 9.238576889038086, | |
| "learning_rate": 4.1867599569429496e-05, | |
| "loss": 1.9818, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.8162899174739864, | |
| "grad_norm": 6.678407192230225, | |
| "learning_rate": 4.184965913168282e-05, | |
| "loss": 1.808, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.8180839612486545, | |
| "grad_norm": 8.583653450012207, | |
| "learning_rate": 4.183171869393614e-05, | |
| "loss": 1.7423, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.8198780050233225, | |
| "grad_norm": 7.882835865020752, | |
| "learning_rate": 4.1813778256189454e-05, | |
| "loss": 1.9654, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.8216720487979907, | |
| "grad_norm": 7.367639541625977, | |
| "learning_rate": 4.179583781844277e-05, | |
| "loss": 1.8759, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.8234660925726588, | |
| "grad_norm": 5.808956146240234, | |
| "learning_rate": 4.177789738069609e-05, | |
| "loss": 1.8092, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.8252601363473269, | |
| "grad_norm": 8.538715362548828, | |
| "learning_rate": 4.1759956942949406e-05, | |
| "loss": 1.9468, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.8270541801219949, | |
| "grad_norm": 7.508995532989502, | |
| "learning_rate": 4.174201650520273e-05, | |
| "loss": 1.9887, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.8288482238966631, | |
| "grad_norm": 7.2825446128845215, | |
| "learning_rate": 4.172407606745605e-05, | |
| "loss": 1.9767, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.8306422676713312, | |
| "grad_norm": 7.998370170593262, | |
| "learning_rate": 4.1706135629709365e-05, | |
| "loss": 1.9921, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.8324363114459993, | |
| "grad_norm": 6.514996528625488, | |
| "learning_rate": 4.168819519196269e-05, | |
| "loss": 1.8236, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.8342303552206674, | |
| "grad_norm": 11.413960456848145, | |
| "learning_rate": 4.1670254754216006e-05, | |
| "loss": 1.9107, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.8360243989953355, | |
| "grad_norm": 10.405426025390625, | |
| "learning_rate": 4.165231431646932e-05, | |
| "loss": 1.936, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.8378184427700036, | |
| "grad_norm": 7.796232223510742, | |
| "learning_rate": 4.163437387872264e-05, | |
| "loss": 1.9083, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.8396124865446717, | |
| "grad_norm": 7.413230895996094, | |
| "learning_rate": 4.161643344097596e-05, | |
| "loss": 1.9956, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.8414065303193398, | |
| "grad_norm": 6.458949565887451, | |
| "learning_rate": 4.159849300322928e-05, | |
| "loss": 1.8525, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.843200574094008, | |
| "grad_norm": 7.839207172393799, | |
| "learning_rate": 4.15805525654826e-05, | |
| "loss": 1.9213, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.844994617868676, | |
| "grad_norm": 7.596676349639893, | |
| "learning_rate": 4.1562612127735916e-05, | |
| "loss": 1.7207, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.8467886616433441, | |
| "grad_norm": 8.215608596801758, | |
| "learning_rate": 4.154467168998924e-05, | |
| "loss": 1.8861, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.8485827054180122, | |
| "grad_norm": 8.735016822814941, | |
| "learning_rate": 4.152673125224256e-05, | |
| "loss": 1.7922, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.8503767491926802, | |
| "grad_norm": 7.5839667320251465, | |
| "learning_rate": 4.1508790814495875e-05, | |
| "loss": 1.8327, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.8521707929673484, | |
| "grad_norm": 8.134754180908203, | |
| "learning_rate": 4.14908503767492e-05, | |
| "loss": 2.0809, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.8539648367420165, | |
| "grad_norm": 6.481571674346924, | |
| "learning_rate": 4.1472909939002516e-05, | |
| "loss": 1.8645, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.8557588805166846, | |
| "grad_norm": 6.660789489746094, | |
| "learning_rate": 4.145496950125583e-05, | |
| "loss": 1.8249, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.8575529242913527, | |
| "grad_norm": 9.439289093017578, | |
| "learning_rate": 4.143702906350915e-05, | |
| "loss": 1.9101, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.8593469680660208, | |
| "grad_norm": 7.637185096740723, | |
| "learning_rate": 4.141908862576247e-05, | |
| "loss": 1.8607, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.8611410118406889, | |
| "grad_norm": 6.203429698944092, | |
| "learning_rate": 4.1401148188015785e-05, | |
| "loss": 1.6808, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.862935055615357, | |
| "grad_norm": 7.630656719207764, | |
| "learning_rate": 4.138320775026911e-05, | |
| "loss": 1.8628, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.8647290993900251, | |
| "grad_norm": 7.22261905670166, | |
| "learning_rate": 4.1365267312522426e-05, | |
| "loss": 1.8176, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.8665231431646933, | |
| "grad_norm": 7.170011520385742, | |
| "learning_rate": 4.134732687477575e-05, | |
| "loss": 1.9082, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.8683171869393613, | |
| "grad_norm": 6.344324588775635, | |
| "learning_rate": 4.132938643702907e-05, | |
| "loss": 1.8772, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.8701112307140294, | |
| "grad_norm": 8.296944618225098, | |
| "learning_rate": 4.1311445999282385e-05, | |
| "loss": 1.7995, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.8719052744886975, | |
| "grad_norm": 10.2393159866333, | |
| "learning_rate": 4.129350556153571e-05, | |
| "loss": 1.7471, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.8736993182633657, | |
| "grad_norm": 6.657433032989502, | |
| "learning_rate": 4.127556512378902e-05, | |
| "loss": 1.8175, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.8754933620380337, | |
| "grad_norm": 9.101529121398926, | |
| "learning_rate": 4.125762468604234e-05, | |
| "loss": 1.8132, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.8772874058127018, | |
| "grad_norm": 8.015061378479004, | |
| "learning_rate": 4.123968424829566e-05, | |
| "loss": 1.7798, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.8790814495873699, | |
| "grad_norm": 8.774176597595215, | |
| "learning_rate": 4.122174381054898e-05, | |
| "loss": 1.823, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.8808754933620381, | |
| "grad_norm": 8.144107818603516, | |
| "learning_rate": 4.1203803372802295e-05, | |
| "loss": 1.8932, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.8826695371367062, | |
| "grad_norm": 9.238556861877441, | |
| "learning_rate": 4.118586293505562e-05, | |
| "loss": 1.9033, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.8844635809113742, | |
| "grad_norm": 7.814840316772461, | |
| "learning_rate": 4.1167922497308937e-05, | |
| "loss": 1.9437, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.8862576246860423, | |
| "grad_norm": 8.720809936523438, | |
| "learning_rate": 4.1149982059562254e-05, | |
| "loss": 2.0195, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.8880516684607105, | |
| "grad_norm": 8.207976341247559, | |
| "learning_rate": 4.113204162181558e-05, | |
| "loss": 1.6992, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.8898457122353786, | |
| "grad_norm": 6.2852301597595215, | |
| "learning_rate": 4.1114101184068895e-05, | |
| "loss": 1.9237, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.8916397560100466, | |
| "grad_norm": 7.93939208984375, | |
| "learning_rate": 4.109616074632221e-05, | |
| "loss": 1.8576, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.8934337997847147, | |
| "grad_norm": 7.219597339630127, | |
| "learning_rate": 4.107822030857553e-05, | |
| "loss": 1.9087, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.8952278435593829, | |
| "grad_norm": 6.388714790344238, | |
| "learning_rate": 4.106027987082885e-05, | |
| "loss": 1.8785, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.897021887334051, | |
| "grad_norm": 7.048055648803711, | |
| "learning_rate": 4.1042339433082164e-05, | |
| "loss": 1.7598, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.898815931108719, | |
| "grad_norm": 9.254620552062988, | |
| "learning_rate": 4.102439899533549e-05, | |
| "loss": 1.8903, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.9006099748833871, | |
| "grad_norm": 7.499938488006592, | |
| "learning_rate": 4.1006458557588805e-05, | |
| "loss": 2.0262, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.9024040186580553, | |
| "grad_norm": 7.229835033416748, | |
| "learning_rate": 4.098851811984213e-05, | |
| "loss": 1.7852, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.9041980624327234, | |
| "grad_norm": 7.887816429138184, | |
| "learning_rate": 4.097057768209545e-05, | |
| "loss": 1.8154, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.9059921062073915, | |
| "grad_norm": 11.909371376037598, | |
| "learning_rate": 4.0952637244348764e-05, | |
| "loss": 1.8699, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.9077861499820595, | |
| "grad_norm": 9.439387321472168, | |
| "learning_rate": 4.093469680660209e-05, | |
| "loss": 2.0234, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.9095801937567277, | |
| "grad_norm": 7.028390884399414, | |
| "learning_rate": 4.0916756368855405e-05, | |
| "loss": 1.8577, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.9113742375313958, | |
| "grad_norm": 8.708728790283203, | |
| "learning_rate": 4.089881593110872e-05, | |
| "loss": 1.8517, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.9131682813060639, | |
| "grad_norm": 7.153110027313232, | |
| "learning_rate": 4.088087549336204e-05, | |
| "loss": 1.8174, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.9149623250807319, | |
| "grad_norm": 10.017348289489746, | |
| "learning_rate": 4.086293505561536e-05, | |
| "loss": 1.7079, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.9167563688554, | |
| "grad_norm": 8.1173734664917, | |
| "learning_rate": 4.0844994617868674e-05, | |
| "loss": 1.914, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.9185504126300682, | |
| "grad_norm": 7.267770290374756, | |
| "learning_rate": 4.0827054180122e-05, | |
| "loss": 1.8668, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.9203444564047363, | |
| "grad_norm": 8.86645793914795, | |
| "learning_rate": 4.0809113742375316e-05, | |
| "loss": 1.8357, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.9221385001794044, | |
| "grad_norm": 8.062966346740723, | |
| "learning_rate": 4.079117330462863e-05, | |
| "loss": 1.8604, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.9239325439540724, | |
| "grad_norm": 7.72880220413208, | |
| "learning_rate": 4.077323286688196e-05, | |
| "loss": 1.9482, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.9257265877287406, | |
| "grad_norm": 8.267526626586914, | |
| "learning_rate": 4.0755292429135274e-05, | |
| "loss": 1.6839, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.9275206315034087, | |
| "grad_norm": 10.039454460144043, | |
| "learning_rate": 4.073735199138859e-05, | |
| "loss": 1.9365, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.9293146752780768, | |
| "grad_norm": 9.875499725341797, | |
| "learning_rate": 4.071941155364191e-05, | |
| "loss": 1.8115, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.9311087190527448, | |
| "grad_norm": 7.694703102111816, | |
| "learning_rate": 4.0701471115895226e-05, | |
| "loss": 1.8054, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.932902762827413, | |
| "grad_norm": 6.072929382324219, | |
| "learning_rate": 4.068353067814855e-05, | |
| "loss": 1.9556, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.9346968066020811, | |
| "grad_norm": 9.701952934265137, | |
| "learning_rate": 4.066559024040187e-05, | |
| "loss": 1.9576, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.9364908503767492, | |
| "grad_norm": 8.353795051574707, | |
| "learning_rate": 4.0647649802655185e-05, | |
| "loss": 1.8579, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.9382848941514172, | |
| "grad_norm": 7.89420747756958, | |
| "learning_rate": 4.062970936490851e-05, | |
| "loss": 1.8866, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.9400789379260854, | |
| "grad_norm": 5.647555351257324, | |
| "learning_rate": 4.0611768927161826e-05, | |
| "loss": 1.742, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.9418729817007535, | |
| "grad_norm": 8.360129356384277, | |
| "learning_rate": 4.059382848941514e-05, | |
| "loss": 1.9297, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.9436670254754216, | |
| "grad_norm": 10.252833366394043, | |
| "learning_rate": 4.057588805166847e-05, | |
| "loss": 1.9036, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.9454610692500897, | |
| "grad_norm": 8.29806900024414, | |
| "learning_rate": 4.0557947613921784e-05, | |
| "loss": 1.7497, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.9472551130247578, | |
| "grad_norm": 6.64280891418457, | |
| "learning_rate": 4.05400071761751e-05, | |
| "loss": 1.9239, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.9490491567994259, | |
| "grad_norm": 7.992321014404297, | |
| "learning_rate": 4.052206673842842e-05, | |
| "loss": 1.8058, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.950843200574094, | |
| "grad_norm": 9.094325065612793, | |
| "learning_rate": 4.0504126300681736e-05, | |
| "loss": 1.9108, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.9526372443487621, | |
| "grad_norm": 8.64013385772705, | |
| "learning_rate": 4.048618586293505e-05, | |
| "loss": 2.1198, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.9544312881234303, | |
| "grad_norm": 6.910890579223633, | |
| "learning_rate": 4.046824542518838e-05, | |
| "loss": 1.9502, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.9562253318980983, | |
| "grad_norm": 7.383171081542969, | |
| "learning_rate": 4.0450304987441695e-05, | |
| "loss": 1.7905, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.9580193756727664, | |
| "grad_norm": 8.704651832580566, | |
| "learning_rate": 4.043236454969501e-05, | |
| "loss": 1.9428, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.9598134194474345, | |
| "grad_norm": 8.296703338623047, | |
| "learning_rate": 4.0414424111948336e-05, | |
| "loss": 1.8346, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.9616074632221027, | |
| "grad_norm": 7.613495349884033, | |
| "learning_rate": 4.039648367420165e-05, | |
| "loss": 1.9988, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.9634015069967707, | |
| "grad_norm": 8.145048141479492, | |
| "learning_rate": 4.037854323645497e-05, | |
| "loss": 1.8056, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.9651955507714388, | |
| "grad_norm": 9.955933570861816, | |
| "learning_rate": 4.0360602798708295e-05, | |
| "loss": 1.87, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.9669895945461069, | |
| "grad_norm": 5.713760852813721, | |
| "learning_rate": 4.0342662360961605e-05, | |
| "loss": 1.8019, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.9687836383207751, | |
| "grad_norm": 9.013385772705078, | |
| "learning_rate": 4.032472192321493e-05, | |
| "loss": 1.922, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.9705776820954432, | |
| "grad_norm": 8.361518859863281, | |
| "learning_rate": 4.0306781485468246e-05, | |
| "loss": 1.8847, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.9723717258701112, | |
| "grad_norm": 8.547319412231445, | |
| "learning_rate": 4.0288841047721564e-05, | |
| "loss": 1.9232, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.9741657696447793, | |
| "grad_norm": 8.168745994567871, | |
| "learning_rate": 4.027090060997489e-05, | |
| "loss": 1.834, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.9759598134194475, | |
| "grad_norm": 7.9649810791015625, | |
| "learning_rate": 4.0252960172228205e-05, | |
| "loss": 1.8909, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.9777538571941156, | |
| "grad_norm": 7.361401557922363, | |
| "learning_rate": 4.023501973448152e-05, | |
| "loss": 1.8859, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.9795479009687836, | |
| "grad_norm": 6.616429328918457, | |
| "learning_rate": 4.0217079296734846e-05, | |
| "loss": 1.8936, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.9813419447434517, | |
| "grad_norm": 8.482680320739746, | |
| "learning_rate": 4.0199138858988163e-05, | |
| "loss": 1.8714, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.9831359885181199, | |
| "grad_norm": 7.886808395385742, | |
| "learning_rate": 4.018119842124148e-05, | |
| "loss": 1.8772, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.984930032292788, | |
| "grad_norm": 8.63215446472168, | |
| "learning_rate": 4.01632579834948e-05, | |
| "loss": 1.8233, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.986724076067456, | |
| "grad_norm": 7.060615539550781, | |
| "learning_rate": 4.0145317545748115e-05, | |
| "loss": 1.7991, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.9885181198421241, | |
| "grad_norm": 7.455167293548584, | |
| "learning_rate": 4.012737710800143e-05, | |
| "loss": 1.8871, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.9903121636167922, | |
| "grad_norm": 8.120213508605957, | |
| "learning_rate": 4.0109436670254756e-05, | |
| "loss": 1.7909, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.9921062073914604, | |
| "grad_norm": 8.847984313964844, | |
| "learning_rate": 4.0091496232508074e-05, | |
| "loss": 1.9595, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.9939002511661285, | |
| "grad_norm": 7.193493366241455, | |
| "learning_rate": 4.007355579476139e-05, | |
| "loss": 1.7494, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.9956942949407965, | |
| "grad_norm": 9.286087989807129, | |
| "learning_rate": 4.0055615357014715e-05, | |
| "loss": 1.7659, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.9974883387154646, | |
| "grad_norm": 7.888460159301758, | |
| "learning_rate": 4.003767491926803e-05, | |
| "loss": 1.9127, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.9992823824901328, | |
| "grad_norm": 9.469123840332031, | |
| "learning_rate": 4.0019734481521356e-05, | |
| "loss": 1.9231, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.932178020477295, | |
| "eval_runtime": 189.8033, | |
| "eval_samples_per_second": 13.05, | |
| "eval_steps_per_second": 13.05, | |
| "step": 5574 | |
| }, | |
| { | |
| "epoch": 1.0010764262648009, | |
| "grad_norm": 8.350410461425781, | |
| "learning_rate": 4.0001794043774674e-05, | |
| "loss": 1.593, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.002870470039469, | |
| "grad_norm": 6.830000877380371, | |
| "learning_rate": 3.998385360602799e-05, | |
| "loss": 1.5654, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.004664513814137, | |
| "grad_norm": 7.733310222625732, | |
| "learning_rate": 3.996591316828131e-05, | |
| "loss": 1.3989, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.0064585575888052, | |
| "grad_norm": 8.72770881652832, | |
| "learning_rate": 3.9947972730534625e-05, | |
| "loss": 1.3938, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.0082526013634732, | |
| "grad_norm": 8.793525695800781, | |
| "learning_rate": 3.993003229278794e-05, | |
| "loss": 1.3272, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.0100466451381414, | |
| "grad_norm": 7.241087436676025, | |
| "learning_rate": 3.991209185504127e-05, | |
| "loss": 1.2756, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.0118406889128095, | |
| "grad_norm": 5.840522289276123, | |
| "learning_rate": 3.9894151417294584e-05, | |
| "loss": 1.3777, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.0136347326874775, | |
| "grad_norm": 8.860857009887695, | |
| "learning_rate": 3.98762109795479e-05, | |
| "loss": 1.3392, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.0154287764621457, | |
| "grad_norm": 7.003204822540283, | |
| "learning_rate": 3.9858270541801225e-05, | |
| "loss": 1.4117, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.0172228202368139, | |
| "grad_norm": 10.400496482849121, | |
| "learning_rate": 3.984033010405454e-05, | |
| "loss": 1.3564, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.0190168640114818, | |
| "grad_norm": 11.60659122467041, | |
| "learning_rate": 3.982238966630786e-05, | |
| "loss": 1.4216, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.02081090778615, | |
| "grad_norm": 8.097295761108398, | |
| "learning_rate": 3.980444922856118e-05, | |
| "loss": 1.4146, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.022604951560818, | |
| "grad_norm": 6.870365142822266, | |
| "learning_rate": 3.9786508790814494e-05, | |
| "loss": 1.4028, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.0243989953354862, | |
| "grad_norm": 8.893730163574219, | |
| "learning_rate": 3.976856835306781e-05, | |
| "loss": 1.4853, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.0261930391101544, | |
| "grad_norm": 9.703378677368164, | |
| "learning_rate": 3.9750627915321136e-05, | |
| "loss": 1.3958, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.0279870828848223, | |
| "grad_norm": 7.569676399230957, | |
| "learning_rate": 3.973268747757445e-05, | |
| "loss": 1.3828, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.0297811266594905, | |
| "grad_norm": 9.702757835388184, | |
| "learning_rate": 3.971474703982777e-05, | |
| "loss": 1.4878, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.0315751704341587, | |
| "grad_norm": 8.123019218444824, | |
| "learning_rate": 3.9696806602081094e-05, | |
| "loss": 1.3594, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.0333692142088267, | |
| "grad_norm": 7.301413059234619, | |
| "learning_rate": 3.967886616433441e-05, | |
| "loss": 1.4338, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.0351632579834948, | |
| "grad_norm": 9.144363403320312, | |
| "learning_rate": 3.9660925726587735e-05, | |
| "loss": 1.3546, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.0369573017581628, | |
| "grad_norm": 10.08015251159668, | |
| "learning_rate": 3.964298528884105e-05, | |
| "loss": 1.3771, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.038751345532831, | |
| "grad_norm": 7.706661701202393, | |
| "learning_rate": 3.962504485109437e-05, | |
| "loss": 1.3893, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.0405453893074992, | |
| "grad_norm": 7.710210800170898, | |
| "learning_rate": 3.960710441334769e-05, | |
| "loss": 1.4166, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.0423394330821671, | |
| "grad_norm": 9.725653648376465, | |
| "learning_rate": 3.9589163975601004e-05, | |
| "loss": 1.487, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.0441334768568353, | |
| "grad_norm": 6.656285762786865, | |
| "learning_rate": 3.957122353785432e-05, | |
| "loss": 1.2852, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.0459275206315035, | |
| "grad_norm": 11.030705451965332, | |
| "learning_rate": 3.9553283100107646e-05, | |
| "loss": 1.3853, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.0477215644061715, | |
| "grad_norm": 6.811320781707764, | |
| "learning_rate": 3.953534266236096e-05, | |
| "loss": 1.4783, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.0495156081808397, | |
| "grad_norm": 7.825105667114258, | |
| "learning_rate": 3.951740222461428e-05, | |
| "loss": 1.3688, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.0513096519555076, | |
| "grad_norm": 8.75365161895752, | |
| "learning_rate": 3.9499461786867604e-05, | |
| "loss": 1.3969, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.0531036957301758, | |
| "grad_norm": 9.158076286315918, | |
| "learning_rate": 3.948152134912092e-05, | |
| "loss": 1.4, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.054897739504844, | |
| "grad_norm": 9.204401016235352, | |
| "learning_rate": 3.946358091137424e-05, | |
| "loss": 1.3364, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.056691783279512, | |
| "grad_norm": 6.7577223777771, | |
| "learning_rate": 3.944564047362756e-05, | |
| "loss": 1.34, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.0584858270541802, | |
| "grad_norm": 7.4541497230529785, | |
| "learning_rate": 3.942770003588088e-05, | |
| "loss": 1.339, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.0602798708288481, | |
| "grad_norm": 8.368121147155762, | |
| "learning_rate": 3.940975959813419e-05, | |
| "loss": 1.453, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.0620739146035163, | |
| "grad_norm": 9.812310218811035, | |
| "learning_rate": 3.9391819160387515e-05, | |
| "loss": 1.4421, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.0638679583781845, | |
| "grad_norm": 8.019757270812988, | |
| "learning_rate": 3.937387872264083e-05, | |
| "loss": 1.3697, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.0656620021528525, | |
| "grad_norm": 7.330229759216309, | |
| "learning_rate": 3.9355938284894156e-05, | |
| "loss": 1.3469, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.0674560459275206, | |
| "grad_norm": 9.923416137695312, | |
| "learning_rate": 3.933799784714747e-05, | |
| "loss": 1.3891, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.0692500897021888, | |
| "grad_norm": 7.279324531555176, | |
| "learning_rate": 3.932005740940079e-05, | |
| "loss": 1.4288, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.0710441334768568, | |
| "grad_norm": 7.16546106338501, | |
| "learning_rate": 3.9302116971654114e-05, | |
| "loss": 1.347, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.072838177251525, | |
| "grad_norm": 8.2477445602417, | |
| "learning_rate": 3.928417653390743e-05, | |
| "loss": 1.3416, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.074632221026193, | |
| "grad_norm": 8.947925567626953, | |
| "learning_rate": 3.926623609616075e-05, | |
| "loss": 1.325, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.0764262648008611, | |
| "grad_norm": 10.749951362609863, | |
| "learning_rate": 3.9248295658414066e-05, | |
| "loss": 1.4012, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.0782203085755293, | |
| "grad_norm": 8.726066589355469, | |
| "learning_rate": 3.9230355220667383e-05, | |
| "loss": 1.3915, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.0800143523501973, | |
| "grad_norm": 8.84666919708252, | |
| "learning_rate": 3.92124147829207e-05, | |
| "loss": 1.4247, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.0818083961248655, | |
| "grad_norm": 8.546545028686523, | |
| "learning_rate": 3.9194474345174025e-05, | |
| "loss": 1.3545, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.0836024398995336, | |
| "grad_norm": 7.942462921142578, | |
| "learning_rate": 3.917653390742734e-05, | |
| "loss": 1.3572, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.0853964836742016, | |
| "grad_norm": 7.295432090759277, | |
| "learning_rate": 3.915859346968066e-05, | |
| "loss": 1.3773, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.0871905274488698, | |
| "grad_norm": 5.990417003631592, | |
| "learning_rate": 3.914065303193398e-05, | |
| "loss": 1.3617, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.0889845712235378, | |
| "grad_norm": 7.127756595611572, | |
| "learning_rate": 3.91227125941873e-05, | |
| "loss": 1.3374, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.090778614998206, | |
| "grad_norm": 9.005511283874512, | |
| "learning_rate": 3.910477215644062e-05, | |
| "loss": 1.4517, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.0925726587728741, | |
| "grad_norm": 7.71065616607666, | |
| "learning_rate": 3.908683171869394e-05, | |
| "loss": 1.4037, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.094366702547542, | |
| "grad_norm": 9.480076789855957, | |
| "learning_rate": 3.906889128094726e-05, | |
| "loss": 1.4425, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.0961607463222103, | |
| "grad_norm": 6.972677707672119, | |
| "learning_rate": 3.9050950843200576e-05, | |
| "loss": 1.356, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.0979547900968785, | |
| "grad_norm": 7.207665920257568, | |
| "learning_rate": 3.9033010405453894e-05, | |
| "loss": 1.3262, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.0997488338715464, | |
| "grad_norm": 6.460121154785156, | |
| "learning_rate": 3.901506996770721e-05, | |
| "loss": 1.3096, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.1015428776462146, | |
| "grad_norm": 10.60547924041748, | |
| "learning_rate": 3.8997129529960535e-05, | |
| "loss": 1.4342, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.1033369214208826, | |
| "grad_norm": 7.947498798370361, | |
| "learning_rate": 3.897918909221385e-05, | |
| "loss": 1.5028, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.1051309651955508, | |
| "grad_norm": 6.16427755355835, | |
| "learning_rate": 3.896124865446717e-05, | |
| "loss": 1.4129, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.106925008970219, | |
| "grad_norm": 6.936514377593994, | |
| "learning_rate": 3.8943308216720493e-05, | |
| "loss": 1.3718, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.108719052744887, | |
| "grad_norm": 6.0827555656433105, | |
| "learning_rate": 3.892536777897381e-05, | |
| "loss": 1.4196, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.110513096519555, | |
| "grad_norm": 9.733860969543457, | |
| "learning_rate": 3.890742734122713e-05, | |
| "loss": 1.302, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.1123071402942233, | |
| "grad_norm": 7.239533424377441, | |
| "learning_rate": 3.888948690348045e-05, | |
| "loss": 1.4005, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.1141011840688912, | |
| "grad_norm": 8.426368713378906, | |
| "learning_rate": 3.887154646573376e-05, | |
| "loss": 1.3118, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.1158952278435594, | |
| "grad_norm": 7.965389251708984, | |
| "learning_rate": 3.885360602798708e-05, | |
| "loss": 1.4111, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.1176892716182274, | |
| "grad_norm": 8.81957721710205, | |
| "learning_rate": 3.8835665590240404e-05, | |
| "loss": 1.4134, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.1194833153928956, | |
| "grad_norm": 7.341325283050537, | |
| "learning_rate": 3.881772515249372e-05, | |
| "loss": 1.2605, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.1212773591675638, | |
| "grad_norm": 8.882126808166504, | |
| "learning_rate": 3.879978471474704e-05, | |
| "loss": 1.4756, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.1230714029422317, | |
| "grad_norm": 7.364952564239502, | |
| "learning_rate": 3.878184427700036e-05, | |
| "loss": 1.3999, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.1248654467169, | |
| "grad_norm": 8.056137084960938, | |
| "learning_rate": 3.876390383925368e-05, | |
| "loss": 1.3775, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.1266594904915679, | |
| "grad_norm": 8.733875274658203, | |
| "learning_rate": 3.8745963401507e-05, | |
| "loss": 1.378, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.128453534266236, | |
| "grad_norm": 7.141792297363281, | |
| "learning_rate": 3.872802296376032e-05, | |
| "loss": 1.3789, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.1302475780409043, | |
| "grad_norm": 6.646508693695068, | |
| "learning_rate": 3.871008252601364e-05, | |
| "loss": 1.3432, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.1320416218155722, | |
| "grad_norm": 8.025551795959473, | |
| "learning_rate": 3.8692142088266955e-05, | |
| "loss": 1.4156, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.1338356655902404, | |
| "grad_norm": 9.390379905700684, | |
| "learning_rate": 3.867420165052027e-05, | |
| "loss": 1.3846, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.1356297093649086, | |
| "grad_norm": 7.891889572143555, | |
| "learning_rate": 3.865626121277359e-05, | |
| "loss": 1.4263, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.1374237531395766, | |
| "grad_norm": 7.289933681488037, | |
| "learning_rate": 3.8638320775026914e-05, | |
| "loss": 1.4822, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.1392177969142447, | |
| "grad_norm": 9.825151443481445, | |
| "learning_rate": 3.862038033728023e-05, | |
| "loss": 1.4937, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.141011840688913, | |
| "grad_norm": 7.730086803436279, | |
| "learning_rate": 3.860243989953355e-05, | |
| "loss": 1.3898, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.142805884463581, | |
| "grad_norm": 8.31853199005127, | |
| "learning_rate": 3.858449946178687e-05, | |
| "loss": 1.4197, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.144599928238249, | |
| "grad_norm": 9.047391891479492, | |
| "learning_rate": 3.856655902404019e-05, | |
| "loss": 1.3876, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.146393972012917, | |
| "grad_norm": 7.734961032867432, | |
| "learning_rate": 3.854861858629351e-05, | |
| "loss": 1.3464, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.1481880157875852, | |
| "grad_norm": 6.981812953948975, | |
| "learning_rate": 3.853067814854683e-05, | |
| "loss": 1.4332, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.1499820595622534, | |
| "grad_norm": 11.00357437133789, | |
| "learning_rate": 3.851273771080015e-05, | |
| "loss": 1.4755, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.1517761033369214, | |
| "grad_norm": 11.093791007995605, | |
| "learning_rate": 3.8494797273053466e-05, | |
| "loss": 1.3979, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.1535701471115896, | |
| "grad_norm": 6.121286392211914, | |
| "learning_rate": 3.847685683530678e-05, | |
| "loss": 1.3293, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.1553641908862575, | |
| "grad_norm": 8.417799949645996, | |
| "learning_rate": 3.84589163975601e-05, | |
| "loss": 1.4656, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.1571582346609257, | |
| "grad_norm": 7.4152655601501465, | |
| "learning_rate": 3.844097595981342e-05, | |
| "loss": 1.3806, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.158952278435594, | |
| "grad_norm": 6.638498783111572, | |
| "learning_rate": 3.842303552206674e-05, | |
| "loss": 1.4396, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.1607463222102619, | |
| "grad_norm": 12.287036895751953, | |
| "learning_rate": 3.840509508432006e-05, | |
| "loss": 1.3652, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.16254036598493, | |
| "grad_norm": 7.202840805053711, | |
| "learning_rate": 3.8387154646573376e-05, | |
| "loss": 1.5139, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.164334409759598, | |
| "grad_norm": 9.261022567749023, | |
| "learning_rate": 3.83692142088267e-05, | |
| "loss": 1.4731, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.1661284535342662, | |
| "grad_norm": 7.603264331817627, | |
| "learning_rate": 3.835127377108002e-05, | |
| "loss": 1.446, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.1679224973089344, | |
| "grad_norm": 7.141061305999756, | |
| "learning_rate": 3.8333333333333334e-05, | |
| "loss": 1.4075, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.1697165410836023, | |
| "grad_norm": 5.73789119720459, | |
| "learning_rate": 3.831539289558665e-05, | |
| "loss": 1.3154, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.1715105848582705, | |
| "grad_norm": 6.780825138092041, | |
| "learning_rate": 3.829745245783997e-05, | |
| "loss": 1.3756, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.1733046286329387, | |
| "grad_norm": 6.844207286834717, | |
| "learning_rate": 3.827951202009329e-05, | |
| "loss": 1.4014, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.1750986724076067, | |
| "grad_norm": 5.899712085723877, | |
| "learning_rate": 3.826157158234661e-05, | |
| "loss": 1.43, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.1768927161822749, | |
| "grad_norm": 7.629055976867676, | |
| "learning_rate": 3.824363114459993e-05, | |
| "loss": 1.369, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.178686759956943, | |
| "grad_norm": 7.492854118347168, | |
| "learning_rate": 3.822569070685325e-05, | |
| "loss": 1.3977, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.180480803731611, | |
| "grad_norm": 6.248719215393066, | |
| "learning_rate": 3.820775026910657e-05, | |
| "loss": 1.3657, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.1822748475062792, | |
| "grad_norm": 7.622773170471191, | |
| "learning_rate": 3.8189809831359886e-05, | |
| "loss": 1.3337, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.1840688912809472, | |
| "grad_norm": 6.013118267059326, | |
| "learning_rate": 3.817186939361321e-05, | |
| "loss": 1.3953, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.1858629350556154, | |
| "grad_norm": 7.73339319229126, | |
| "learning_rate": 3.815392895586653e-05, | |
| "loss": 1.389, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.1876569788302835, | |
| "grad_norm": 7.737553119659424, | |
| "learning_rate": 3.8135988518119845e-05, | |
| "loss": 1.4428, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.1894510226049515, | |
| "grad_norm": 8.525728225708008, | |
| "learning_rate": 3.811804808037316e-05, | |
| "loss": 1.4168, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.1912450663796197, | |
| "grad_norm": 8.121135711669922, | |
| "learning_rate": 3.810010764262648e-05, | |
| "loss": 1.4458, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.1930391101542877, | |
| "grad_norm": 8.677117347717285, | |
| "learning_rate": 3.8082167204879796e-05, | |
| "loss": 1.4039, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.1948331539289558, | |
| "grad_norm": 7.736855506896973, | |
| "learning_rate": 3.806422676713312e-05, | |
| "loss": 1.5071, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.196627197703624, | |
| "grad_norm": 8.960105895996094, | |
| "learning_rate": 3.804628632938644e-05, | |
| "loss": 1.5036, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.198421241478292, | |
| "grad_norm": 8.783327102661133, | |
| "learning_rate": 3.802834589163976e-05, | |
| "loss": 1.4161, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.2002152852529602, | |
| "grad_norm": 7.308254241943359, | |
| "learning_rate": 3.801040545389308e-05, | |
| "loss": 1.4173, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.2020093290276284, | |
| "grad_norm": 7.417787075042725, | |
| "learning_rate": 3.7992465016146396e-05, | |
| "loss": 1.3161, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.2038033728022963, | |
| "grad_norm": 7.6823320388793945, | |
| "learning_rate": 3.797452457839972e-05, | |
| "loss": 1.419, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.2055974165769645, | |
| "grad_norm": 9.797237396240234, | |
| "learning_rate": 3.795658414065304e-05, | |
| "loss": 1.3969, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.2073914603516327, | |
| "grad_norm": 7.217626094818115, | |
| "learning_rate": 3.793864370290635e-05, | |
| "loss": 1.3485, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.2091855041263007, | |
| "grad_norm": 7.798620223999023, | |
| "learning_rate": 3.792070326515967e-05, | |
| "loss": 1.2904, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.2109795479009688, | |
| "grad_norm": 9.841931343078613, | |
| "learning_rate": 3.790276282741299e-05, | |
| "loss": 1.4078, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.2127735916756368, | |
| "grad_norm": 8.47020435333252, | |
| "learning_rate": 3.788482238966631e-05, | |
| "loss": 1.4216, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.214567635450305, | |
| "grad_norm": 7.135184288024902, | |
| "learning_rate": 3.786688195191963e-05, | |
| "loss": 1.4159, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.2163616792249732, | |
| "grad_norm": 7.083540916442871, | |
| "learning_rate": 3.784894151417295e-05, | |
| "loss": 1.4004, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.2181557229996411, | |
| "grad_norm": 9.321927070617676, | |
| "learning_rate": 3.7831001076426265e-05, | |
| "loss": 1.4452, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.2199497667743093, | |
| "grad_norm": 8.969941139221191, | |
| "learning_rate": 3.781306063867959e-05, | |
| "loss": 1.4588, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.2217438105489773, | |
| "grad_norm": 7.412861347198486, | |
| "learning_rate": 3.7795120200932906e-05, | |
| "loss": 1.3526, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.2235378543236455, | |
| "grad_norm": 7.843968868255615, | |
| "learning_rate": 3.7777179763186224e-05, | |
| "loss": 1.3309, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.2253318980983137, | |
| "grad_norm": 6.551264762878418, | |
| "learning_rate": 3.775923932543954e-05, | |
| "loss": 1.4204, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.2271259418729816, | |
| "grad_norm": 8.688389778137207, | |
| "learning_rate": 3.774129888769286e-05, | |
| "loss": 1.4399, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.2289199856476498, | |
| "grad_norm": 8.688478469848633, | |
| "learning_rate": 3.7723358449946175e-05, | |
| "loss": 1.3618, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.230714029422318, | |
| "grad_norm": 8.20807933807373, | |
| "learning_rate": 3.77054180121995e-05, | |
| "loss": 1.4266, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.232508073196986, | |
| "grad_norm": 8.579695701599121, | |
| "learning_rate": 3.768747757445282e-05, | |
| "loss": 1.4258, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.2343021169716542, | |
| "grad_norm": 6.802687168121338, | |
| "learning_rate": 3.766953713670614e-05, | |
| "loss": 1.324, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.2360961607463221, | |
| "grad_norm": 6.273397445678711, | |
| "learning_rate": 3.765159669895946e-05, | |
| "loss": 1.375, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.2378902045209903, | |
| "grad_norm": 7.909563064575195, | |
| "learning_rate": 3.7633656261212775e-05, | |
| "loss": 1.297, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.2396842482956585, | |
| "grad_norm": 6.895613193511963, | |
| "learning_rate": 3.76157158234661e-05, | |
| "loss": 1.4579, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.2414782920703265, | |
| "grad_norm": 7.742472171783447, | |
| "learning_rate": 3.759777538571942e-05, | |
| "loss": 1.4445, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.2432723358449946, | |
| "grad_norm": 8.930936813354492, | |
| "learning_rate": 3.7579834947972734e-05, | |
| "loss": 1.2682, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.2450663796196628, | |
| "grad_norm": 6.448217391967773, | |
| "learning_rate": 3.756189451022605e-05, | |
| "loss": 1.4701, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.2468604233943308, | |
| "grad_norm": 6.52114200592041, | |
| "learning_rate": 3.754395407247937e-05, | |
| "loss": 1.4056, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.248654467168999, | |
| "grad_norm": 6.87349271774292, | |
| "learning_rate": 3.7526013634732686e-05, | |
| "loss": 1.3001, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.250448510943667, | |
| "grad_norm": 7.917197227478027, | |
| "learning_rate": 3.750807319698601e-05, | |
| "loss": 1.3976, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.2522425547183351, | |
| "grad_norm": 7.251535892486572, | |
| "learning_rate": 3.749013275923933e-05, | |
| "loss": 1.3153, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.2540365984930033, | |
| "grad_norm": 7.524764537811279, | |
| "learning_rate": 3.7472192321492644e-05, | |
| "loss": 1.4582, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.2558306422676713, | |
| "grad_norm": 8.992074012756348, | |
| "learning_rate": 3.745425188374597e-05, | |
| "loss": 1.3374, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.2576246860423395, | |
| "grad_norm": 10.817506790161133, | |
| "learning_rate": 3.7436311445999286e-05, | |
| "loss": 1.3457, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.2594187298170074, | |
| "grad_norm": 7.901443004608154, | |
| "learning_rate": 3.74183710082526e-05, | |
| "loss": 1.4439, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.2612127735916756, | |
| "grad_norm": 7.716637134552002, | |
| "learning_rate": 3.740043057050592e-05, | |
| "loss": 1.382, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.2630068173663438, | |
| "grad_norm": 7.97553014755249, | |
| "learning_rate": 3.738249013275924e-05, | |
| "loss": 1.3515, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.264800861141012, | |
| "grad_norm": 9.756081581115723, | |
| "learning_rate": 3.736454969501256e-05, | |
| "loss": 1.3815, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.26659490491568, | |
| "grad_norm": 7.838413238525391, | |
| "learning_rate": 3.734660925726588e-05, | |
| "loss": 1.4392, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.268388948690348, | |
| "grad_norm": 6.7297468185424805, | |
| "learning_rate": 3.7328668819519196e-05, | |
| "loss": 1.3719, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.270182992465016, | |
| "grad_norm": 7.776455879211426, | |
| "learning_rate": 3.731072838177252e-05, | |
| "loss": 1.3124, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.2719770362396843, | |
| "grad_norm": 8.554944038391113, | |
| "learning_rate": 3.729278794402584e-05, | |
| "loss": 1.3695, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.2737710800143525, | |
| "grad_norm": 6.043045997619629, | |
| "learning_rate": 3.7274847506279154e-05, | |
| "loss": 1.4149, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.2755651237890204, | |
| "grad_norm": 8.197552680969238, | |
| "learning_rate": 3.725690706853248e-05, | |
| "loss": 1.3095, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.2773591675636886, | |
| "grad_norm": 8.089428901672363, | |
| "learning_rate": 3.7238966630785796e-05, | |
| "loss": 1.4164, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.2791532113383566, | |
| "grad_norm": 12.543819427490234, | |
| "learning_rate": 3.722102619303911e-05, | |
| "loss": 1.2828, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.2809472551130248, | |
| "grad_norm": 7.321937561035156, | |
| "learning_rate": 3.720308575529243e-05, | |
| "loss": 1.2754, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.282741298887693, | |
| "grad_norm": 8.445917129516602, | |
| "learning_rate": 3.718514531754575e-05, | |
| "loss": 1.4625, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.284535342662361, | |
| "grad_norm": 5.982776165008545, | |
| "learning_rate": 3.7167204879799065e-05, | |
| "loss": 1.4169, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.286329386437029, | |
| "grad_norm": 7.333141803741455, | |
| "learning_rate": 3.714926444205239e-05, | |
| "loss": 1.466, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.288123430211697, | |
| "grad_norm": 7.591367244720459, | |
| "learning_rate": 3.7131324004305706e-05, | |
| "loss": 1.4827, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.2899174739863652, | |
| "grad_norm": 8.127729415893555, | |
| "learning_rate": 3.711338356655902e-05, | |
| "loss": 1.4232, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.2917115177610334, | |
| "grad_norm": 6.65064001083374, | |
| "learning_rate": 3.709544312881235e-05, | |
| "loss": 1.5094, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.2935055615357014, | |
| "grad_norm": 6.2712202072143555, | |
| "learning_rate": 3.7077502691065665e-05, | |
| "loss": 1.3632, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.2952996053103696, | |
| "grad_norm": 5.723544120788574, | |
| "learning_rate": 3.705956225331898e-05, | |
| "loss": 1.3911, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.2970936490850375, | |
| "grad_norm": 8.133112907409668, | |
| "learning_rate": 3.7041621815572306e-05, | |
| "loss": 1.3334, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.2988876928597057, | |
| "grad_norm": 9.744547843933105, | |
| "learning_rate": 3.702368137782562e-05, | |
| "loss": 1.4248, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.300681736634374, | |
| "grad_norm": 8.395071983337402, | |
| "learning_rate": 3.700574094007894e-05, | |
| "loss": 1.5036, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.302475780409042, | |
| "grad_norm": 8.343621253967285, | |
| "learning_rate": 3.698780050233226e-05, | |
| "loss": 1.3782, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.30426982418371, | |
| "grad_norm": 10.779861450195312, | |
| "learning_rate": 3.6969860064585575e-05, | |
| "loss": 1.4092, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.3060638679583783, | |
| "grad_norm": 12.210502624511719, | |
| "learning_rate": 3.69519196268389e-05, | |
| "loss": 1.3272, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.3078579117330462, | |
| "grad_norm": 8.902063369750977, | |
| "learning_rate": 3.6933979189092216e-05, | |
| "loss": 1.5054, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.3096519555077144, | |
| "grad_norm": 7.104801177978516, | |
| "learning_rate": 3.6916038751345533e-05, | |
| "loss": 1.4203, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.3114459992823826, | |
| "grad_norm": 5.748100757598877, | |
| "learning_rate": 3.689809831359886e-05, | |
| "loss": 1.4724, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.3132400430570506, | |
| "grad_norm": 8.453279495239258, | |
| "learning_rate": 3.6880157875852175e-05, | |
| "loss": 1.3665, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.3150340868317187, | |
| "grad_norm": 7.534119606018066, | |
| "learning_rate": 3.686221743810549e-05, | |
| "loss": 1.3748, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.3168281306063867, | |
| "grad_norm": 7.105281829833984, | |
| "learning_rate": 3.684427700035881e-05, | |
| "loss": 1.4103, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.318622174381055, | |
| "grad_norm": 7.355068683624268, | |
| "learning_rate": 3.6826336562612127e-05, | |
| "loss": 1.4492, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.320416218155723, | |
| "grad_norm": 6.437581539154053, | |
| "learning_rate": 3.6808396124865444e-05, | |
| "loss": 1.4176, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.322210261930391, | |
| "grad_norm": 8.563718795776367, | |
| "learning_rate": 3.679045568711877e-05, | |
| "loss": 1.3382, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.3240043057050592, | |
| "grad_norm": 7.735002040863037, | |
| "learning_rate": 3.6772515249372085e-05, | |
| "loss": 1.429, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.3257983494797272, | |
| "grad_norm": 7.190489768981934, | |
| "learning_rate": 3.67545748116254e-05, | |
| "loss": 1.3862, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.3275923932543954, | |
| "grad_norm": 8.7313814163208, | |
| "learning_rate": 3.6736634373878726e-05, | |
| "loss": 1.4154, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.3293864370290636, | |
| "grad_norm": 7.9539313316345215, | |
| "learning_rate": 3.6718693936132044e-05, | |
| "loss": 1.4132, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.3311804808037317, | |
| "grad_norm": 7.147149562835693, | |
| "learning_rate": 3.670075349838536e-05, | |
| "loss": 1.4062, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.3329745245783997, | |
| "grad_norm": 10.268470764160156, | |
| "learning_rate": 3.6682813060638685e-05, | |
| "loss": 1.4003, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.334768568353068, | |
| "grad_norm": 8.228311538696289, | |
| "learning_rate": 3.6664872622892e-05, | |
| "loss": 1.4234, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.3365626121277359, | |
| "grad_norm": 7.816580772399902, | |
| "learning_rate": 3.664693218514532e-05, | |
| "loss": 1.3835, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.338356655902404, | |
| "grad_norm": 5.961045742034912, | |
| "learning_rate": 3.662899174739864e-05, | |
| "loss": 1.4774, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.3401506996770722, | |
| "grad_norm": 9.59859848022461, | |
| "learning_rate": 3.6611051309651954e-05, | |
| "loss": 1.4122, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.3419447434517402, | |
| "grad_norm": 7.651191234588623, | |
| "learning_rate": 3.659311087190528e-05, | |
| "loss": 1.4664, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.3437387872264084, | |
| "grad_norm": 6.948186874389648, | |
| "learning_rate": 3.6575170434158595e-05, | |
| "loss": 1.3762, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.3455328310010763, | |
| "grad_norm": 5.879620552062988, | |
| "learning_rate": 3.655722999641191e-05, | |
| "loss": 1.3472, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.3473268747757445, | |
| "grad_norm": 10.246485710144043, | |
| "learning_rate": 3.6539289558665237e-05, | |
| "loss": 1.4756, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.3491209185504127, | |
| "grad_norm": 6.890244007110596, | |
| "learning_rate": 3.6521349120918554e-05, | |
| "loss": 1.3634, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.3509149623250807, | |
| "grad_norm": 9.019742012023926, | |
| "learning_rate": 3.650340868317187e-05, | |
| "loss": 1.5307, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.3527090060997489, | |
| "grad_norm": 7.515359401702881, | |
| "learning_rate": 3.648546824542519e-05, | |
| "loss": 1.2479, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.3545030498744168, | |
| "grad_norm": 7.437852382659912, | |
| "learning_rate": 3.6467527807678506e-05, | |
| "loss": 1.372, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.356297093649085, | |
| "grad_norm": 7.529526233673096, | |
| "learning_rate": 3.644958736993182e-05, | |
| "loss": 1.3672, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.3580911374237532, | |
| "grad_norm": 9.4348726272583, | |
| "learning_rate": 3.643164693218515e-05, | |
| "loss": 1.438, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.3598851811984212, | |
| "grad_norm": 8.870586395263672, | |
| "learning_rate": 3.6413706494438464e-05, | |
| "loss": 1.4271, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.3616792249730894, | |
| "grad_norm": 7.35816764831543, | |
| "learning_rate": 3.639576605669178e-05, | |
| "loss": 1.4001, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.3634732687477573, | |
| "grad_norm": 7.633657455444336, | |
| "learning_rate": 3.6377825618945105e-05, | |
| "loss": 1.4443, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.3652673125224255, | |
| "grad_norm": 8.26876163482666, | |
| "learning_rate": 3.635988518119842e-05, | |
| "loss": 1.3539, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.3670613562970937, | |
| "grad_norm": 9.011847496032715, | |
| "learning_rate": 3.634194474345175e-05, | |
| "loss": 1.3701, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.3688554000717619, | |
| "grad_norm": 8.344417572021484, | |
| "learning_rate": 3.6324004305705064e-05, | |
| "loss": 1.4872, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.3706494438464298, | |
| "grad_norm": 6.184938907623291, | |
| "learning_rate": 3.630606386795838e-05, | |
| "loss": 1.284, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.372443487621098, | |
| "grad_norm": 8.20331859588623, | |
| "learning_rate": 3.62881234302117e-05, | |
| "loss": 1.363, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.374237531395766, | |
| "grad_norm": 6.358704090118408, | |
| "learning_rate": 3.6270182992465016e-05, | |
| "loss": 1.3962, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.3760315751704342, | |
| "grad_norm": 7.734986305236816, | |
| "learning_rate": 3.625224255471833e-05, | |
| "loss": 1.3673, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.3778256189451024, | |
| "grad_norm": 8.597131729125977, | |
| "learning_rate": 3.623430211697166e-05, | |
| "loss": 1.372, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.3796196627197703, | |
| "grad_norm": 9.663629531860352, | |
| "learning_rate": 3.6216361679224974e-05, | |
| "loss": 1.3264, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.3814137064944385, | |
| "grad_norm": 8.822710990905762, | |
| "learning_rate": 3.619842124147829e-05, | |
| "loss": 1.4173, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.3832077502691065, | |
| "grad_norm": 6.7463459968566895, | |
| "learning_rate": 3.6180480803731616e-05, | |
| "loss": 1.3586, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.3850017940437747, | |
| "grad_norm": 7.816486835479736, | |
| "learning_rate": 3.616254036598493e-05, | |
| "loss": 1.4229, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.3867958378184428, | |
| "grad_norm": 8.482382774353027, | |
| "learning_rate": 3.614459992823825e-05, | |
| "loss": 1.4222, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.3885898815931108, | |
| "grad_norm": 8.289495468139648, | |
| "learning_rate": 3.6126659490491574e-05, | |
| "loss": 1.387, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.390383925367779, | |
| "grad_norm": 7.847174167633057, | |
| "learning_rate": 3.610871905274489e-05, | |
| "loss": 1.3878, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.392177969142447, | |
| "grad_norm": 7.200404167175293, | |
| "learning_rate": 3.60907786149982e-05, | |
| "loss": 1.4336, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.3939720129171151, | |
| "grad_norm": 7.738436698913574, | |
| "learning_rate": 3.6072838177251526e-05, | |
| "loss": 1.3772, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.3957660566917833, | |
| "grad_norm": 6.875908374786377, | |
| "learning_rate": 3.605489773950484e-05, | |
| "loss": 1.4554, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.3975601004664515, | |
| "grad_norm": 9.192317008972168, | |
| "learning_rate": 3.603695730175816e-05, | |
| "loss": 1.3069, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.3993541442411195, | |
| "grad_norm": 10.130300521850586, | |
| "learning_rate": 3.6019016864011484e-05, | |
| "loss": 1.3736, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.4011481880157877, | |
| "grad_norm": 7.170609951019287, | |
| "learning_rate": 3.60010764262648e-05, | |
| "loss": 1.3729, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.4029422317904556, | |
| "grad_norm": 9.054718971252441, | |
| "learning_rate": 3.5983135988518126e-05, | |
| "loss": 1.43, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.4047362755651238, | |
| "grad_norm": 7.545653820037842, | |
| "learning_rate": 3.596519555077144e-05, | |
| "loss": 1.3767, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 1.406530319339792, | |
| "grad_norm": 8.128164291381836, | |
| "learning_rate": 3.594725511302476e-05, | |
| "loss": 1.3917, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 1.40832436311446, | |
| "grad_norm": 8.18182373046875, | |
| "learning_rate": 3.592931467527808e-05, | |
| "loss": 1.4083, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.4101184068891282, | |
| "grad_norm": 9.797805786132812, | |
| "learning_rate": 3.5911374237531395e-05, | |
| "loss": 1.4863, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 1.4119124506637961, | |
| "grad_norm": 25.17400550842285, | |
| "learning_rate": 3.589343379978471e-05, | |
| "loss": 1.5191, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 1.4137064944384643, | |
| "grad_norm": 7.167262077331543, | |
| "learning_rate": 3.5875493362038036e-05, | |
| "loss": 1.4151, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 1.4155005382131325, | |
| "grad_norm": 6.82859992980957, | |
| "learning_rate": 3.585755292429135e-05, | |
| "loss": 1.374, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 1.4172945819878005, | |
| "grad_norm": 8.16007137298584, | |
| "learning_rate": 3.583961248654467e-05, | |
| "loss": 1.3523, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.4190886257624686, | |
| "grad_norm": 7.259387969970703, | |
| "learning_rate": 3.5821672048797995e-05, | |
| "loss": 1.3811, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 1.4208826695371366, | |
| "grad_norm": 7.798694610595703, | |
| "learning_rate": 3.580373161105131e-05, | |
| "loss": 1.4331, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 1.4226767133118048, | |
| "grad_norm": 6.832756519317627, | |
| "learning_rate": 3.578579117330463e-05, | |
| "loss": 1.3798, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 1.424470757086473, | |
| "grad_norm": 8.465885162353516, | |
| "learning_rate": 3.576785073555795e-05, | |
| "loss": 1.368, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 1.426264800861141, | |
| "grad_norm": 7.031029224395752, | |
| "learning_rate": 3.574991029781127e-05, | |
| "loss": 1.2938, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.4280588446358091, | |
| "grad_norm": 8.129319190979004, | |
| "learning_rate": 3.573196986006459e-05, | |
| "loss": 1.3336, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 1.429852888410477, | |
| "grad_norm": 7.408348560333252, | |
| "learning_rate": 3.5714029422317905e-05, | |
| "loss": 1.2998, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 1.4316469321851453, | |
| "grad_norm": 7.85107421875, | |
| "learning_rate": 3.569608898457122e-05, | |
| "loss": 1.483, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 1.4334409759598135, | |
| "grad_norm": 7.82799768447876, | |
| "learning_rate": 3.5678148546824546e-05, | |
| "loss": 1.4505, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 1.4352350197344816, | |
| "grad_norm": 10.371716499328613, | |
| "learning_rate": 3.5660208109077864e-05, | |
| "loss": 1.3282, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.4370290635091496, | |
| "grad_norm": 9.943699836730957, | |
| "learning_rate": 3.564226767133118e-05, | |
| "loss": 1.4015, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 1.4388231072838178, | |
| "grad_norm": 6.916280746459961, | |
| "learning_rate": 3.5624327233584505e-05, | |
| "loss": 1.4301, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 1.4406171510584858, | |
| "grad_norm": 8.76120662689209, | |
| "learning_rate": 3.560638679583782e-05, | |
| "loss": 1.386, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 1.442411194833154, | |
| "grad_norm": 8.706786155700684, | |
| "learning_rate": 3.558844635809114e-05, | |
| "loss": 1.4428, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 1.4442052386078221, | |
| "grad_norm": 6.031268119812012, | |
| "learning_rate": 3.557050592034446e-05, | |
| "loss": 1.347, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 1.44599928238249, | |
| "grad_norm": 9.217594146728516, | |
| "learning_rate": 3.5552565482597774e-05, | |
| "loss": 1.5703, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 1.4477933261571583, | |
| "grad_norm": 8.151002883911133, | |
| "learning_rate": 3.553462504485109e-05, | |
| "loss": 1.3317, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 1.4495873699318262, | |
| "grad_norm": 8.76799488067627, | |
| "learning_rate": 3.5516684607104415e-05, | |
| "loss": 1.4876, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 1.4513814137064944, | |
| "grad_norm": 11.447850227355957, | |
| "learning_rate": 3.549874416935773e-05, | |
| "loss": 1.3937, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 1.4531754574811626, | |
| "grad_norm": 7.755031585693359, | |
| "learning_rate": 3.548080373161105e-05, | |
| "loss": 1.4311, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 1.4549695012558306, | |
| "grad_norm": 5.995707988739014, | |
| "learning_rate": 3.5462863293864374e-05, | |
| "loss": 1.4221, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 1.4567635450304988, | |
| "grad_norm": 6.535067558288574, | |
| "learning_rate": 3.544492285611769e-05, | |
| "loss": 1.3953, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 1.4585575888051667, | |
| "grad_norm": 14.317142486572266, | |
| "learning_rate": 3.542698241837101e-05, | |
| "loss": 1.4777, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 1.460351632579835, | |
| "grad_norm": 9.180553436279297, | |
| "learning_rate": 3.540904198062433e-05, | |
| "loss": 1.3552, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 1.462145676354503, | |
| "grad_norm": 7.935247898101807, | |
| "learning_rate": 3.539110154287765e-05, | |
| "loss": 1.296, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 1.4639397201291713, | |
| "grad_norm": 9.270010948181152, | |
| "learning_rate": 3.537316110513097e-05, | |
| "loss": 1.3484, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 1.4657337639038392, | |
| "grad_norm": 7.344752311706543, | |
| "learning_rate": 3.5355220667384284e-05, | |
| "loss": 1.3878, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 1.4675278076785074, | |
| "grad_norm": 5.794569492340088, | |
| "learning_rate": 3.53372802296376e-05, | |
| "loss": 1.2688, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 1.4693218514531754, | |
| "grad_norm": 8.966668128967285, | |
| "learning_rate": 3.5319339791890925e-05, | |
| "loss": 1.3682, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 1.4711158952278436, | |
| "grad_norm": 8.947368621826172, | |
| "learning_rate": 3.530139935414424e-05, | |
| "loss": 1.3869, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 1.4729099390025118, | |
| "grad_norm": 8.610750198364258, | |
| "learning_rate": 3.528345891639756e-05, | |
| "loss": 1.3788, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 1.4747039827771797, | |
| "grad_norm": 8.098158836364746, | |
| "learning_rate": 3.5265518478650884e-05, | |
| "loss": 1.364, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 1.476498026551848, | |
| "grad_norm": 5.487945079803467, | |
| "learning_rate": 3.52475780409042e-05, | |
| "loss": 1.4057, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 1.4782920703265159, | |
| "grad_norm": 7.017474174499512, | |
| "learning_rate": 3.522963760315752e-05, | |
| "loss": 1.4042, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 1.480086114101184, | |
| "grad_norm": 7.3450469970703125, | |
| "learning_rate": 3.521169716541084e-05, | |
| "loss": 1.3999, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 1.4818801578758523, | |
| "grad_norm": 7.210866928100586, | |
| "learning_rate": 3.519375672766416e-05, | |
| "loss": 1.3868, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 1.4836742016505202, | |
| "grad_norm": 8.641252517700195, | |
| "learning_rate": 3.517581628991748e-05, | |
| "loss": 1.374, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 1.4854682454251884, | |
| "grad_norm": 8.86153507232666, | |
| "learning_rate": 3.5157875852170794e-05, | |
| "loss": 1.4113, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 1.4872622891998564, | |
| "grad_norm": 8.298069953918457, | |
| "learning_rate": 3.513993541442411e-05, | |
| "loss": 1.3787, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 1.4890563329745246, | |
| "grad_norm": 7.584449291229248, | |
| "learning_rate": 3.512199497667743e-05, | |
| "loss": 1.4623, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 1.4908503767491927, | |
| "grad_norm": 9.79563045501709, | |
| "learning_rate": 3.510405453893075e-05, | |
| "loss": 1.3634, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 1.4926444205238607, | |
| "grad_norm": 7.416035175323486, | |
| "learning_rate": 3.508611410118407e-05, | |
| "loss": 1.3367, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 1.494438464298529, | |
| "grad_norm": 7.025708198547363, | |
| "learning_rate": 3.506817366343739e-05, | |
| "loss": 1.4087, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 1.4962325080731969, | |
| "grad_norm": 6.349498271942139, | |
| "learning_rate": 3.505023322569071e-05, | |
| "loss": 1.2794, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 1.498026551847865, | |
| "grad_norm": 9.222763061523438, | |
| "learning_rate": 3.503229278794403e-05, | |
| "loss": 1.4334, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 1.4998205956225332, | |
| "grad_norm": 7.805530548095703, | |
| "learning_rate": 3.5014352350197346e-05, | |
| "loss": 1.5309, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 1.5016146393972014, | |
| "grad_norm": 7.701581954956055, | |
| "learning_rate": 3.499641191245066e-05, | |
| "loss": 1.4599, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 1.5034086831718694, | |
| "grad_norm": 7.224992275238037, | |
| "learning_rate": 3.497847147470398e-05, | |
| "loss": 1.3609, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 1.5052027269465373, | |
| "grad_norm": 6.719163417816162, | |
| "learning_rate": 3.4960531036957304e-05, | |
| "loss": 1.3586, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 1.5069967707212055, | |
| "grad_norm": 6.938652038574219, | |
| "learning_rate": 3.494259059921062e-05, | |
| "loss": 1.455, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 1.5087908144958737, | |
| "grad_norm": 7.92690896987915, | |
| "learning_rate": 3.492465016146394e-05, | |
| "loss": 1.4174, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 1.510584858270542, | |
| "grad_norm": 6.6979660987854, | |
| "learning_rate": 3.490670972371726e-05, | |
| "loss": 1.337, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 1.5123789020452099, | |
| "grad_norm": 7.451391220092773, | |
| "learning_rate": 3.488876928597058e-05, | |
| "loss": 1.3879, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 1.514172945819878, | |
| "grad_norm": 8.262466430664062, | |
| "learning_rate": 3.48708288482239e-05, | |
| "loss": 1.3828, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 1.515966989594546, | |
| "grad_norm": 8.4102201461792, | |
| "learning_rate": 3.485288841047722e-05, | |
| "loss": 1.4623, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 1.5177610333692142, | |
| "grad_norm": 7.731327533721924, | |
| "learning_rate": 3.483494797273054e-05, | |
| "loss": 1.381, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 1.5195550771438824, | |
| "grad_norm": 8.887019157409668, | |
| "learning_rate": 3.4817007534983856e-05, | |
| "loss": 1.3625, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 1.5213491209185506, | |
| "grad_norm": 7.205885410308838, | |
| "learning_rate": 3.479906709723717e-05, | |
| "loss": 1.3577, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 1.5231431646932185, | |
| "grad_norm": 12.000414848327637, | |
| "learning_rate": 3.478112665949049e-05, | |
| "loss": 1.4075, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 1.5249372084678865, | |
| "grad_norm": 7.290775775909424, | |
| "learning_rate": 3.476318622174381e-05, | |
| "loss": 1.3658, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.5267312522425547, | |
| "grad_norm": 7.6026225090026855, | |
| "learning_rate": 3.474524578399713e-05, | |
| "loss": 1.3091, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 1.5285252960172229, | |
| "grad_norm": 8.19789981842041, | |
| "learning_rate": 3.472730534625045e-05, | |
| "loss": 1.4242, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 1.530319339791891, | |
| "grad_norm": 9.370061874389648, | |
| "learning_rate": 3.4709364908503766e-05, | |
| "loss": 1.4437, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 1.532113383566559, | |
| "grad_norm": 7.536168575286865, | |
| "learning_rate": 3.469142447075709e-05, | |
| "loss": 1.3764, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 1.533907427341227, | |
| "grad_norm": 8.509814262390137, | |
| "learning_rate": 3.467348403301041e-05, | |
| "loss": 1.3138, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 1.5357014711158952, | |
| "grad_norm": 7.023370265960693, | |
| "learning_rate": 3.465554359526373e-05, | |
| "loss": 1.43, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 1.5374955148905634, | |
| "grad_norm": 8.391500473022461, | |
| "learning_rate": 3.463760315751705e-05, | |
| "loss": 1.3419, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 1.5392895586652315, | |
| "grad_norm": 8.216898918151855, | |
| "learning_rate": 3.461966271977036e-05, | |
| "loss": 1.3849, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 1.5410836024398995, | |
| "grad_norm": 7.629865646362305, | |
| "learning_rate": 3.4601722282023683e-05, | |
| "loss": 1.4518, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 1.5428776462145677, | |
| "grad_norm": 7.480790138244629, | |
| "learning_rate": 3.4583781844277e-05, | |
| "loss": 1.3378, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 1.5446716899892357, | |
| "grad_norm": 7.571674346923828, | |
| "learning_rate": 3.456584140653032e-05, | |
| "loss": 1.4035, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 1.5464657337639038, | |
| "grad_norm": 8.145289421081543, | |
| "learning_rate": 3.454790096878364e-05, | |
| "loss": 1.3878, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 1.548259777538572, | |
| "grad_norm": 8.560420036315918, | |
| "learning_rate": 3.452996053103696e-05, | |
| "loss": 1.4033, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 1.55005382131324, | |
| "grad_norm": 8.526228904724121, | |
| "learning_rate": 3.4512020093290277e-05, | |
| "loss": 1.3257, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 1.5518478650879082, | |
| "grad_norm": 7.641489505767822, | |
| "learning_rate": 3.44940796555436e-05, | |
| "loss": 1.37, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 1.5536419088625761, | |
| "grad_norm": 7.322695732116699, | |
| "learning_rate": 3.447613921779692e-05, | |
| "loss": 1.3798, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 1.5554359526372443, | |
| "grad_norm": 9.153738975524902, | |
| "learning_rate": 3.4458198780050235e-05, | |
| "loss": 1.5305, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 1.5572299964119125, | |
| "grad_norm": 6.289071559906006, | |
| "learning_rate": 3.444025834230355e-05, | |
| "loss": 1.213, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 1.5590240401865807, | |
| "grad_norm": 8.083537101745605, | |
| "learning_rate": 3.442231790455687e-05, | |
| "loss": 1.3258, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 1.5608180839612487, | |
| "grad_norm": 8.730308532714844, | |
| "learning_rate": 3.440437746681019e-05, | |
| "loss": 1.4032, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 1.5626121277359166, | |
| "grad_norm": 8.361651420593262, | |
| "learning_rate": 3.438643702906351e-05, | |
| "loss": 1.3918, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 1.5644061715105848, | |
| "grad_norm": 7.762195110321045, | |
| "learning_rate": 3.436849659131683e-05, | |
| "loss": 1.4657, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 1.566200215285253, | |
| "grad_norm": 8.610489845275879, | |
| "learning_rate": 3.435055615357015e-05, | |
| "loss": 1.4472, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 1.5679942590599212, | |
| "grad_norm": 6.9795098304748535, | |
| "learning_rate": 3.433261571582347e-05, | |
| "loss": 1.5167, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 1.5697883028345891, | |
| "grad_norm": 6.0824408531188965, | |
| "learning_rate": 3.431467527807679e-05, | |
| "loss": 1.446, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 1.571582346609257, | |
| "grad_norm": 6.787125587463379, | |
| "learning_rate": 3.429673484033011e-05, | |
| "loss": 1.5047, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 1.5733763903839253, | |
| "grad_norm": 8.646109580993652, | |
| "learning_rate": 3.427879440258343e-05, | |
| "loss": 1.3271, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 1.5751704341585935, | |
| "grad_norm": 7.65358304977417, | |
| "learning_rate": 3.4260853964836745e-05, | |
| "loss": 1.3963, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 1.5769644779332617, | |
| "grad_norm": 9.546274185180664, | |
| "learning_rate": 3.424291352709006e-05, | |
| "loss": 1.4044, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 1.5787585217079296, | |
| "grad_norm": 8.250378608703613, | |
| "learning_rate": 3.422497308934338e-05, | |
| "loss": 1.3717, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 1.5805525654825978, | |
| "grad_norm": 8.04243278503418, | |
| "learning_rate": 3.42070326515967e-05, | |
| "loss": 1.3954, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 1.5823466092572658, | |
| "grad_norm": 7.013770580291748, | |
| "learning_rate": 3.418909221385002e-05, | |
| "loss": 1.3597, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 1.584140653031934, | |
| "grad_norm": 7.129620552062988, | |
| "learning_rate": 3.417115177610334e-05, | |
| "loss": 1.3492, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 1.5859346968066022, | |
| "grad_norm": 7.23296594619751, | |
| "learning_rate": 3.4153211338356656e-05, | |
| "loss": 1.4067, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 1.5877287405812703, | |
| "grad_norm": 6.118582248687744, | |
| "learning_rate": 3.413527090060998e-05, | |
| "loss": 1.3626, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 1.5895227843559383, | |
| "grad_norm": 8.783863067626953, | |
| "learning_rate": 3.41173304628633e-05, | |
| "loss": 1.4064, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 1.5913168281306063, | |
| "grad_norm": 7.83473014831543, | |
| "learning_rate": 3.4099390025116614e-05, | |
| "loss": 1.391, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 1.5931108719052745, | |
| "grad_norm": 7.351261138916016, | |
| "learning_rate": 3.408144958736993e-05, | |
| "loss": 1.3933, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 1.5949049156799426, | |
| "grad_norm": 7.231532096862793, | |
| "learning_rate": 3.406350914962325e-05, | |
| "loss": 1.4398, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 1.5966989594546108, | |
| "grad_norm": 7.532142639160156, | |
| "learning_rate": 3.4045568711876566e-05, | |
| "loss": 1.3316, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 1.5984930032292788, | |
| "grad_norm": 6.363405227661133, | |
| "learning_rate": 3.402762827412989e-05, | |
| "loss": 1.4548, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 1.6002870470039467, | |
| "grad_norm": 8.271924018859863, | |
| "learning_rate": 3.400968783638321e-05, | |
| "loss": 1.2696, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 1.602081090778615, | |
| "grad_norm": 6.662013053894043, | |
| "learning_rate": 3.399174739863653e-05, | |
| "loss": 1.3041, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 1.6038751345532831, | |
| "grad_norm": 9.983985900878906, | |
| "learning_rate": 3.397380696088985e-05, | |
| "loss": 1.3968, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 1.6056691783279513, | |
| "grad_norm": 7.235292911529541, | |
| "learning_rate": 3.3955866523143166e-05, | |
| "loss": 1.3696, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 1.6074632221026193, | |
| "grad_norm": 10.884222984313965, | |
| "learning_rate": 3.393792608539649e-05, | |
| "loss": 1.5237, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 1.6092572658772875, | |
| "grad_norm": 7.906916618347168, | |
| "learning_rate": 3.391998564764981e-05, | |
| "loss": 1.5189, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 1.6110513096519554, | |
| "grad_norm": 6.324611186981201, | |
| "learning_rate": 3.3902045209903124e-05, | |
| "loss": 1.4566, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 1.6128453534266236, | |
| "grad_norm": 9.540352821350098, | |
| "learning_rate": 3.388410477215644e-05, | |
| "loss": 1.3418, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 1.6146393972012918, | |
| "grad_norm": 9.794340133666992, | |
| "learning_rate": 3.386616433440976e-05, | |
| "loss": 1.532, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.61643344097596, | |
| "grad_norm": 7.887220859527588, | |
| "learning_rate": 3.3848223896663076e-05, | |
| "loss": 1.3238, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 1.618227484750628, | |
| "grad_norm": 8.027101516723633, | |
| "learning_rate": 3.38302834589164e-05, | |
| "loss": 1.255, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 1.620021528525296, | |
| "grad_norm": 13.341578483581543, | |
| "learning_rate": 3.381234302116972e-05, | |
| "loss": 1.4349, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 1.621815572299964, | |
| "grad_norm": 8.621784210205078, | |
| "learning_rate": 3.3794402583423035e-05, | |
| "loss": 1.3003, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 1.6236096160746323, | |
| "grad_norm": 6.994039058685303, | |
| "learning_rate": 3.377646214567636e-05, | |
| "loss": 1.4052, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 1.6254036598493005, | |
| "grad_norm": 7.07314920425415, | |
| "learning_rate": 3.3758521707929676e-05, | |
| "loss": 1.4283, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 1.6271977036239684, | |
| "grad_norm": 6.580167293548584, | |
| "learning_rate": 3.374058127018299e-05, | |
| "loss": 1.3234, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 1.6289917473986364, | |
| "grad_norm": 9.062819480895996, | |
| "learning_rate": 3.372264083243632e-05, | |
| "loss": 1.4362, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 1.6307857911733046, | |
| "grad_norm": 7.770894527435303, | |
| "learning_rate": 3.3704700394689634e-05, | |
| "loss": 1.3414, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 1.6325798349479728, | |
| "grad_norm": 6.877737522125244, | |
| "learning_rate": 3.368675995694295e-05, | |
| "loss": 1.2824, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 1.634373878722641, | |
| "grad_norm": 8.166614532470703, | |
| "learning_rate": 3.366881951919627e-05, | |
| "loss": 1.4358, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 1.636167922497309, | |
| "grad_norm": 8.125406265258789, | |
| "learning_rate": 3.3650879081449586e-05, | |
| "loss": 1.4451, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 1.6379619662719769, | |
| "grad_norm": 6.3429412841796875, | |
| "learning_rate": 3.363293864370291e-05, | |
| "loss": 1.3547, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 1.639756010046645, | |
| "grad_norm": 9.225153923034668, | |
| "learning_rate": 3.361499820595623e-05, | |
| "loss": 1.3582, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 1.6415500538213132, | |
| "grad_norm": 6.7825117111206055, | |
| "learning_rate": 3.3597057768209545e-05, | |
| "loss": 1.3817, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 1.6433440975959814, | |
| "grad_norm": 7.816966533660889, | |
| "learning_rate": 3.357911733046287e-05, | |
| "loss": 1.2981, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 1.6451381413706494, | |
| "grad_norm": 8.143369674682617, | |
| "learning_rate": 3.3561176892716186e-05, | |
| "loss": 1.3185, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 1.6469321851453176, | |
| "grad_norm": 9.256525993347168, | |
| "learning_rate": 3.35432364549695e-05, | |
| "loss": 1.4458, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 1.6487262289199855, | |
| "grad_norm": 8.836395263671875, | |
| "learning_rate": 3.352529601722282e-05, | |
| "loss": 1.3648, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 1.6505202726946537, | |
| "grad_norm": 10.375576972961426, | |
| "learning_rate": 3.350735557947614e-05, | |
| "loss": 1.4796, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 1.652314316469322, | |
| "grad_norm": 8.008833885192871, | |
| "learning_rate": 3.3489415141729455e-05, | |
| "loss": 1.3293, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 1.65410836024399, | |
| "grad_norm": 7.188244819641113, | |
| "learning_rate": 3.347147470398278e-05, | |
| "loss": 1.4948, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 1.655902404018658, | |
| "grad_norm": 8.808504104614258, | |
| "learning_rate": 3.3453534266236096e-05, | |
| "loss": 1.4974, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 1.657696447793326, | |
| "grad_norm": 9.407508850097656, | |
| "learning_rate": 3.3435593828489414e-05, | |
| "loss": 1.4261, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 1.6594904915679942, | |
| "grad_norm": 7.234744548797607, | |
| "learning_rate": 3.341765339074274e-05, | |
| "loss": 1.3784, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 1.6612845353426624, | |
| "grad_norm": 10.013113021850586, | |
| "learning_rate": 3.3399712952996055e-05, | |
| "loss": 1.4982, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 1.6630785791173306, | |
| "grad_norm": 7.584069728851318, | |
| "learning_rate": 3.338177251524937e-05, | |
| "loss": 1.3648, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 1.6648726228919986, | |
| "grad_norm": 8.876629829406738, | |
| "learning_rate": 3.3363832077502696e-05, | |
| "loss": 1.416, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 6.557656288146973, | |
| "learning_rate": 3.3345891639756014e-05, | |
| "loss": 1.37, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 1.6684607104413347, | |
| "grad_norm": 6.436371803283691, | |
| "learning_rate": 3.332795120200933e-05, | |
| "loss": 1.3965, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 1.670254754216003, | |
| "grad_norm": 11.4998197555542, | |
| "learning_rate": 3.331001076426265e-05, | |
| "loss": 1.5301, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 1.672048797990671, | |
| "grad_norm": 8.139901161193848, | |
| "learning_rate": 3.3292070326515965e-05, | |
| "loss": 1.4604, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 1.673842841765339, | |
| "grad_norm": 6.9658074378967285, | |
| "learning_rate": 3.327412988876929e-05, | |
| "loss": 1.3549, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 1.6756368855400072, | |
| "grad_norm": 7.35159158706665, | |
| "learning_rate": 3.3256189451022607e-05, | |
| "loss": 1.3491, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 1.6774309293146752, | |
| "grad_norm": 8.391031265258789, | |
| "learning_rate": 3.3238249013275924e-05, | |
| "loss": 1.4108, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 1.6792249730893434, | |
| "grad_norm": 9.44320011138916, | |
| "learning_rate": 3.322030857552925e-05, | |
| "loss": 1.46, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 1.6810190168640116, | |
| "grad_norm": 7.805198669433594, | |
| "learning_rate": 3.3202368137782565e-05, | |
| "loss": 1.4888, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 1.6828130606386797, | |
| "grad_norm": 8.305425643920898, | |
| "learning_rate": 3.318442770003588e-05, | |
| "loss": 1.4868, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 1.6846071044133477, | |
| "grad_norm": 7.926264762878418, | |
| "learning_rate": 3.3166487262289206e-05, | |
| "loss": 1.4545, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 1.6864011481880157, | |
| "grad_norm": 7.380917549133301, | |
| "learning_rate": 3.314854682454252e-05, | |
| "loss": 1.349, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 1.6881951919626839, | |
| "grad_norm": 8.860836029052734, | |
| "learning_rate": 3.3130606386795834e-05, | |
| "loss": 1.3751, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 1.689989235737352, | |
| "grad_norm": 9.027103424072266, | |
| "learning_rate": 3.311266594904916e-05, | |
| "loss": 1.4403, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 1.6917832795120202, | |
| "grad_norm": 7.279397010803223, | |
| "learning_rate": 3.3094725511302475e-05, | |
| "loss": 1.3976, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 1.6935773232866882, | |
| "grad_norm": 8.56206226348877, | |
| "learning_rate": 3.307678507355579e-05, | |
| "loss": 1.3561, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 1.6953713670613562, | |
| "grad_norm": 7.4955925941467285, | |
| "learning_rate": 3.305884463580912e-05, | |
| "loss": 1.4076, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 1.6971654108360243, | |
| "grad_norm": 7.064661026000977, | |
| "learning_rate": 3.3040904198062434e-05, | |
| "loss": 1.3777, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 1.6989594546106925, | |
| "grad_norm": 6.131346702575684, | |
| "learning_rate": 3.302296376031576e-05, | |
| "loss": 1.3927, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 1.7007534983853607, | |
| "grad_norm": 7.0382866859436035, | |
| "learning_rate": 3.3005023322569075e-05, | |
| "loss": 1.3617, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 1.7025475421600287, | |
| "grad_norm": 6.852908134460449, | |
| "learning_rate": 3.298708288482239e-05, | |
| "loss": 1.3312, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 1.7043415859346966, | |
| "grad_norm": 8.102025985717773, | |
| "learning_rate": 3.296914244707571e-05, | |
| "loss": 1.4041, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.7061356297093648, | |
| "grad_norm": 6.2295403480529785, | |
| "learning_rate": 3.295120200932903e-05, | |
| "loss": 1.4131, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 1.707929673484033, | |
| "grad_norm": 6.514602184295654, | |
| "learning_rate": 3.2933261571582344e-05, | |
| "loss": 1.3411, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 1.7097237172587012, | |
| "grad_norm": 8.146480560302734, | |
| "learning_rate": 3.291532113383567e-05, | |
| "loss": 1.4027, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 1.7115177610333692, | |
| "grad_norm": 7.84113883972168, | |
| "learning_rate": 3.2897380696088986e-05, | |
| "loss": 1.4448, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 1.7133118048080374, | |
| "grad_norm": 7.640854835510254, | |
| "learning_rate": 3.28794402583423e-05, | |
| "loss": 1.4225, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 1.7151058485827053, | |
| "grad_norm": 8.230134963989258, | |
| "learning_rate": 3.286149982059563e-05, | |
| "loss": 1.4214, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 1.7168998923573735, | |
| "grad_norm": 9.04704761505127, | |
| "learning_rate": 3.2843559382848944e-05, | |
| "loss": 1.3879, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 1.7186939361320417, | |
| "grad_norm": 7.6292619705200195, | |
| "learning_rate": 3.282561894510226e-05, | |
| "loss": 1.4369, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 1.7204879799067099, | |
| "grad_norm": 7.765014171600342, | |
| "learning_rate": 3.2807678507355585e-05, | |
| "loss": 1.4207, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 1.7222820236813778, | |
| "grad_norm": 8.163656234741211, | |
| "learning_rate": 3.27897380696089e-05, | |
| "loss": 1.5078, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 1.7240760674560458, | |
| "grad_norm": 9.697617530822754, | |
| "learning_rate": 3.277179763186222e-05, | |
| "loss": 1.4383, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 1.725870111230714, | |
| "grad_norm": 6.583641052246094, | |
| "learning_rate": 3.275385719411554e-05, | |
| "loss": 1.4345, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 1.7276641550053822, | |
| "grad_norm": 7.1716437339782715, | |
| "learning_rate": 3.2735916756368855e-05, | |
| "loss": 1.4338, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 1.7294581987800504, | |
| "grad_norm": 8.799662590026855, | |
| "learning_rate": 3.271797631862217e-05, | |
| "loss": 1.4069, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 1.7312522425547183, | |
| "grad_norm": 8.040757179260254, | |
| "learning_rate": 3.2700035880875496e-05, | |
| "loss": 1.5181, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 1.7330462863293863, | |
| "grad_norm": 8.507600784301758, | |
| "learning_rate": 3.268209544312881e-05, | |
| "loss": 1.4489, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 1.7348403301040545, | |
| "grad_norm": 7.433869361877441, | |
| "learning_rate": 3.266415500538214e-05, | |
| "loss": 1.3749, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 1.7366343738787227, | |
| "grad_norm": 9.173955917358398, | |
| "learning_rate": 3.2646214567635454e-05, | |
| "loss": 1.325, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 1.7384284176533908, | |
| "grad_norm": 8.32906436920166, | |
| "learning_rate": 3.262827412988877e-05, | |
| "loss": 1.4502, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 1.7402224614280588, | |
| "grad_norm": 7.358117580413818, | |
| "learning_rate": 3.261033369214209e-05, | |
| "loss": 1.3309, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 1.742016505202727, | |
| "grad_norm": 6.05413293838501, | |
| "learning_rate": 3.2592393254395406e-05, | |
| "loss": 1.4092, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 1.743810548977395, | |
| "grad_norm": 6.9378132820129395, | |
| "learning_rate": 3.2574452816648723e-05, | |
| "loss": 1.3743, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 1.7456045927520631, | |
| "grad_norm": 8.749573707580566, | |
| "learning_rate": 3.255830642267672e-05, | |
| "loss": 1.3122, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 1.7473986365267313, | |
| "grad_norm": 10.618117332458496, | |
| "learning_rate": 3.2540365984930035e-05, | |
| "loss": 1.4237, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 1.7491926803013995, | |
| "grad_norm": 7.997283935546875, | |
| "learning_rate": 3.252242554718335e-05, | |
| "loss": 1.3925, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 1.7509867240760675, | |
| "grad_norm": 8.978703498840332, | |
| "learning_rate": 3.250448510943667e-05, | |
| "loss": 1.3118, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 1.7527807678507354, | |
| "grad_norm": 7.240395545959473, | |
| "learning_rate": 3.248654467168999e-05, | |
| "loss": 1.3387, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 1.7545748116254036, | |
| "grad_norm": 8.210497856140137, | |
| "learning_rate": 3.246860423394331e-05, | |
| "loss": 1.3689, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 1.7563688554000718, | |
| "grad_norm": 7.8586344718933105, | |
| "learning_rate": 3.245066379619663e-05, | |
| "loss": 1.383, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 1.75816289917474, | |
| "grad_norm": 10.08283805847168, | |
| "learning_rate": 3.2432723358449945e-05, | |
| "loss": 1.4394, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 1.759956942949408, | |
| "grad_norm": 9.116758346557617, | |
| "learning_rate": 3.241478292070327e-05, | |
| "loss": 1.423, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 1.761750986724076, | |
| "grad_norm": 7.434940814971924, | |
| "learning_rate": 3.239684248295659e-05, | |
| "loss": 1.3744, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 1.7635450304987441, | |
| "grad_norm": 10.682246208190918, | |
| "learning_rate": 3.2378902045209904e-05, | |
| "loss": 1.3878, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 1.7653390742734123, | |
| "grad_norm": 6.566300868988037, | |
| "learning_rate": 3.236096160746323e-05, | |
| "loss": 1.431, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 1.7671331180480805, | |
| "grad_norm": 6.842972278594971, | |
| "learning_rate": 3.2343021169716545e-05, | |
| "loss": 1.4323, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 1.7689271618227485, | |
| "grad_norm": 6.9640374183654785, | |
| "learning_rate": 3.2325080731969856e-05, | |
| "loss": 1.3759, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 1.7707212055974164, | |
| "grad_norm": 7.9980363845825195, | |
| "learning_rate": 3.230714029422318e-05, | |
| "loss": 1.3429, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 1.7725152493720846, | |
| "grad_norm": 7.928410530090332, | |
| "learning_rate": 3.22891998564765e-05, | |
| "loss": 1.3738, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 1.7743092931467528, | |
| "grad_norm": 8.214715003967285, | |
| "learning_rate": 3.227125941872982e-05, | |
| "loss": 1.4069, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 1.776103336921421, | |
| "grad_norm": 7.336577415466309, | |
| "learning_rate": 3.225331898098314e-05, | |
| "loss": 1.4363, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 1.777897380696089, | |
| "grad_norm": 13.41515064239502, | |
| "learning_rate": 3.2235378543236456e-05, | |
| "loss": 1.4401, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 1.7796914244707571, | |
| "grad_norm": 8.566363334655762, | |
| "learning_rate": 3.221743810548978e-05, | |
| "loss": 1.4079, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 1.781485468245425, | |
| "grad_norm": 8.366653442382812, | |
| "learning_rate": 3.21994976677431e-05, | |
| "loss": 1.3635, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 1.7832795120200933, | |
| "grad_norm": 8.311139106750488, | |
| "learning_rate": 3.2181557229996414e-05, | |
| "loss": 1.4176, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 1.7850735557947615, | |
| "grad_norm": 9.073042869567871, | |
| "learning_rate": 3.216361679224973e-05, | |
| "loss": 1.4217, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 1.7868675995694296, | |
| "grad_norm": 8.806724548339844, | |
| "learning_rate": 3.214567635450305e-05, | |
| "loss": 1.3746, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 1.7886616433440976, | |
| "grad_norm": 6.92349910736084, | |
| "learning_rate": 3.2127735916756366e-05, | |
| "loss": 1.3341, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 1.7904556871187656, | |
| "grad_norm": 6.854549884796143, | |
| "learning_rate": 3.210979547900969e-05, | |
| "loss": 1.3557, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 1.7922497308934338, | |
| "grad_norm": 6.593012809753418, | |
| "learning_rate": 3.209185504126301e-05, | |
| "loss": 1.4896, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 1.794043774668102, | |
| "grad_norm": 7.10095739364624, | |
| "learning_rate": 3.2073914603516324e-05, | |
| "loss": 1.3983, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.7958378184427701, | |
| "grad_norm": 7.834686756134033, | |
| "learning_rate": 3.205597416576965e-05, | |
| "loss": 1.3975, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 1.797631862217438, | |
| "grad_norm": 6.41478157043457, | |
| "learning_rate": 3.2038033728022966e-05, | |
| "loss": 1.3854, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 1.799425905992106, | |
| "grad_norm": 6.298712253570557, | |
| "learning_rate": 3.202009329027628e-05, | |
| "loss": 1.3197, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 1.8012199497667742, | |
| "grad_norm": 8.800332069396973, | |
| "learning_rate": 3.200215285252961e-05, | |
| "loss": 1.3579, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 1.8030139935414424, | |
| "grad_norm": 7.364461898803711, | |
| "learning_rate": 3.1984212414782924e-05, | |
| "loss": 1.3631, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 1.8048080373161106, | |
| "grad_norm": 6.772685527801514, | |
| "learning_rate": 3.196627197703624e-05, | |
| "loss": 1.2572, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 1.8066020810907786, | |
| "grad_norm": 7.126546382904053, | |
| "learning_rate": 3.194833153928956e-05, | |
| "loss": 1.3785, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 1.8083961248654468, | |
| "grad_norm": 8.27735424041748, | |
| "learning_rate": 3.1930391101542876e-05, | |
| "loss": 1.3255, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 1.8101901686401147, | |
| "grad_norm": 9.396734237670898, | |
| "learning_rate": 3.19124506637962e-05, | |
| "loss": 1.3753, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 1.811984212414783, | |
| "grad_norm": 6.9930267333984375, | |
| "learning_rate": 3.189451022604952e-05, | |
| "loss": 1.4313, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 1.813778256189451, | |
| "grad_norm": 8.459685325622559, | |
| "learning_rate": 3.1876569788302835e-05, | |
| "loss": 1.3857, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 1.8155722999641193, | |
| "grad_norm": 9.824789047241211, | |
| "learning_rate": 3.185862935055616e-05, | |
| "loss": 1.4138, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 1.8173663437387872, | |
| "grad_norm": 6.947724342346191, | |
| "learning_rate": 3.1840688912809476e-05, | |
| "loss": 1.3951, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 1.8191603875134552, | |
| "grad_norm": 7.63868522644043, | |
| "learning_rate": 3.182274847506279e-05, | |
| "loss": 1.3397, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 1.8209544312881234, | |
| "grad_norm": 7.462377071380615, | |
| "learning_rate": 3.180480803731612e-05, | |
| "loss": 1.3776, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 1.8227484750627916, | |
| "grad_norm": 8.488241195678711, | |
| "learning_rate": 3.178686759956943e-05, | |
| "loss": 1.3902, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 1.8245425188374598, | |
| "grad_norm": 7.8031158447265625, | |
| "learning_rate": 3.1768927161822745e-05, | |
| "loss": 1.4022, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 1.8263365626121277, | |
| "grad_norm": 8.310538291931152, | |
| "learning_rate": 3.175098672407607e-05, | |
| "loss": 1.4432, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 1.8281306063867957, | |
| "grad_norm": 5.958616733551025, | |
| "learning_rate": 3.1733046286329386e-05, | |
| "loss": 1.4089, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 1.8299246501614639, | |
| "grad_norm": 8.99071216583252, | |
| "learning_rate": 3.1715105848582703e-05, | |
| "loss": 1.4212, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 1.831718693936132, | |
| "grad_norm": 9.389041900634766, | |
| "learning_rate": 3.169716541083603e-05, | |
| "loss": 1.3511, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 1.8335127377108003, | |
| "grad_norm": 9.519119262695312, | |
| "learning_rate": 3.1679224973089345e-05, | |
| "loss": 1.4521, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 1.8353067814854682, | |
| "grad_norm": 8.499699592590332, | |
| "learning_rate": 3.166128453534266e-05, | |
| "loss": 1.2829, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 1.8371008252601362, | |
| "grad_norm": 8.038819313049316, | |
| "learning_rate": 3.1643344097595986e-05, | |
| "loss": 1.4808, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 1.8388948690348044, | |
| "grad_norm": 9.12056827545166, | |
| "learning_rate": 3.16254036598493e-05, | |
| "loss": 1.4188, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 1.8406889128094726, | |
| "grad_norm": 9.239951133728027, | |
| "learning_rate": 3.160746322210262e-05, | |
| "loss": 1.3091, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 1.8424829565841407, | |
| "grad_norm": 7.837065696716309, | |
| "learning_rate": 3.158952278435594e-05, | |
| "loss": 1.4372, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 1.8442770003588087, | |
| "grad_norm": 8.48546314239502, | |
| "learning_rate": 3.1571582346609255e-05, | |
| "loss": 1.4756, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 1.846071044133477, | |
| "grad_norm": 7.222843647003174, | |
| "learning_rate": 3.155364190886258e-05, | |
| "loss": 1.3421, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 1.8478650879081449, | |
| "grad_norm": 6.328611373901367, | |
| "learning_rate": 3.1535701471115896e-05, | |
| "loss": 1.3897, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 1.849659131682813, | |
| "grad_norm": 7.180152416229248, | |
| "learning_rate": 3.1517761033369214e-05, | |
| "loss": 1.4222, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 1.8514531754574812, | |
| "grad_norm": 7.584138870239258, | |
| "learning_rate": 3.149982059562254e-05, | |
| "loss": 1.4284, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 1.8532472192321494, | |
| "grad_norm": 6.921727180480957, | |
| "learning_rate": 3.1481880157875855e-05, | |
| "loss": 1.3764, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 1.8550412630068174, | |
| "grad_norm": 7.458225250244141, | |
| "learning_rate": 3.146393972012917e-05, | |
| "loss": 1.3253, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 1.8568353067814853, | |
| "grad_norm": 8.784356117248535, | |
| "learning_rate": 3.1445999282382496e-05, | |
| "loss": 1.3682, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 1.8586293505561535, | |
| "grad_norm": 7.8843674659729, | |
| "learning_rate": 3.1428058844635813e-05, | |
| "loss": 1.3611, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 1.8604233943308217, | |
| "grad_norm": 6.8401198387146, | |
| "learning_rate": 3.141011840688913e-05, | |
| "loss": 1.3852, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 1.86221743810549, | |
| "grad_norm": 7.674398899078369, | |
| "learning_rate": 3.139217796914245e-05, | |
| "loss": 1.3522, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 1.8640114818801579, | |
| "grad_norm": 6.873990535736084, | |
| "learning_rate": 3.1374237531395765e-05, | |
| "loss": 1.3986, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 1.8658055256548258, | |
| "grad_norm": 6.411556243896484, | |
| "learning_rate": 3.135629709364908e-05, | |
| "loss": 1.3846, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 1.867599569429494, | |
| "grad_norm": 7.64192533493042, | |
| "learning_rate": 3.1338356655902407e-05, | |
| "loss": 1.3235, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 1.8693936132041622, | |
| "grad_norm": 8.541569709777832, | |
| "learning_rate": 3.1320416218155724e-05, | |
| "loss": 1.3913, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 1.8711876569788304, | |
| "grad_norm": 10.887429237365723, | |
| "learning_rate": 3.130247578040904e-05, | |
| "loss": 1.4691, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 1.8729817007534983, | |
| "grad_norm": 5.951722145080566, | |
| "learning_rate": 3.1284535342662365e-05, | |
| "loss": 1.3866, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 1.8747757445281665, | |
| "grad_norm": 9.592148780822754, | |
| "learning_rate": 3.126659490491568e-05, | |
| "loss": 1.4277, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 1.8765697883028345, | |
| "grad_norm": 5.6519060134887695, | |
| "learning_rate": 3.1248654467169e-05, | |
| "loss": 1.3481, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 1.8783638320775027, | |
| "grad_norm": 6.1073408126831055, | |
| "learning_rate": 3.123071402942232e-05, | |
| "loss": 1.4777, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 1.8801578758521709, | |
| "grad_norm": 6.2250871658325195, | |
| "learning_rate": 3.1212773591675634e-05, | |
| "loss": 1.5386, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 1.881951919626839, | |
| "grad_norm": 7.318933486938477, | |
| "learning_rate": 3.119483315392896e-05, | |
| "loss": 1.3898, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 1.883745963401507, | |
| "grad_norm": 7.27461576461792, | |
| "learning_rate": 3.1176892716182275e-05, | |
| "loss": 1.4696, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.885540007176175, | |
| "grad_norm": 7.856091022491455, | |
| "learning_rate": 3.115895227843559e-05, | |
| "loss": 1.3571, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 1.8873340509508432, | |
| "grad_norm": 6.904455661773682, | |
| "learning_rate": 3.114101184068892e-05, | |
| "loss": 1.386, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 1.8891280947255114, | |
| "grad_norm": 7.072729587554932, | |
| "learning_rate": 3.1123071402942234e-05, | |
| "loss": 1.3334, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 1.8909221385001795, | |
| "grad_norm": 7.027041912078857, | |
| "learning_rate": 3.110513096519555e-05, | |
| "loss": 1.3813, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 1.8927161822748475, | |
| "grad_norm": 8.132376670837402, | |
| "learning_rate": 3.1087190527448875e-05, | |
| "loss": 1.3923, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 1.8945102260495155, | |
| "grad_norm": 7.2380146980285645, | |
| "learning_rate": 3.106925008970219e-05, | |
| "loss": 1.4301, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 1.8963042698241837, | |
| "grad_norm": 7.411573886871338, | |
| "learning_rate": 3.105130965195551e-05, | |
| "loss": 1.4376, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 1.8980983135988518, | |
| "grad_norm": 8.479894638061523, | |
| "learning_rate": 3.103336921420883e-05, | |
| "loss": 1.3932, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 1.89989235737352, | |
| "grad_norm": 5.64423942565918, | |
| "learning_rate": 3.1015428776462144e-05, | |
| "loss": 1.4356, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 1.901686401148188, | |
| "grad_norm": 7.496821403503418, | |
| "learning_rate": 3.099748833871546e-05, | |
| "loss": 1.3875, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 1.903480444922856, | |
| "grad_norm": 8.228056907653809, | |
| "learning_rate": 3.0979547900968786e-05, | |
| "loss": 1.4076, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 1.9052744886975241, | |
| "grad_norm": 6.483157634735107, | |
| "learning_rate": 3.09616074632221e-05, | |
| "loss": 1.3674, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 1.9070685324721923, | |
| "grad_norm": 7.570273399353027, | |
| "learning_rate": 3.094366702547543e-05, | |
| "loss": 1.3251, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 1.9088625762468605, | |
| "grad_norm": 9.55526065826416, | |
| "learning_rate": 3.0925726587728744e-05, | |
| "loss": 1.4677, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 1.9106566200215285, | |
| "grad_norm": 7.437054634094238, | |
| "learning_rate": 3.090778614998206e-05, | |
| "loss": 1.3634, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 1.9124506637961967, | |
| "grad_norm": 8.50203800201416, | |
| "learning_rate": 3.0889845712235385e-05, | |
| "loss": 1.406, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 1.9142447075708646, | |
| "grad_norm": 7.820067405700684, | |
| "learning_rate": 3.08719052744887e-05, | |
| "loss": 1.4127, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 1.9160387513455328, | |
| "grad_norm": 8.92634105682373, | |
| "learning_rate": 3.085396483674201e-05, | |
| "loss": 1.3228, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 1.917832795120201, | |
| "grad_norm": 7.616683006286621, | |
| "learning_rate": 3.083602439899534e-05, | |
| "loss": 1.4236, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 1.9196268388948692, | |
| "grad_norm": 7.309317588806152, | |
| "learning_rate": 3.0818083961248654e-05, | |
| "loss": 1.3839, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 1.9214208826695371, | |
| "grad_norm": 9.383467674255371, | |
| "learning_rate": 3.080014352350197e-05, | |
| "loss": 1.4143, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 1.923214926444205, | |
| "grad_norm": 7.853944778442383, | |
| "learning_rate": 3.0782203085755296e-05, | |
| "loss": 1.4278, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 1.9250089702188733, | |
| "grad_norm": 7.185943603515625, | |
| "learning_rate": 3.076426264800861e-05, | |
| "loss": 1.3744, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 1.9268030139935415, | |
| "grad_norm": 6.492997646331787, | |
| "learning_rate": 3.074632221026193e-05, | |
| "loss": 1.3643, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 1.9285970577682097, | |
| "grad_norm": 11.078304290771484, | |
| "learning_rate": 3.0728381772515254e-05, | |
| "loss": 1.3984, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 1.9303911015428776, | |
| "grad_norm": 7.419142246246338, | |
| "learning_rate": 3.071044133476857e-05, | |
| "loss": 1.4019, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 1.9321851453175456, | |
| "grad_norm": 6.431114673614502, | |
| "learning_rate": 3.069250089702189e-05, | |
| "loss": 1.3596, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 1.9339791890922138, | |
| "grad_norm": 7.534803867340088, | |
| "learning_rate": 3.0674560459275206e-05, | |
| "loss": 1.3333, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 1.935773232866882, | |
| "grad_norm": 9.282038688659668, | |
| "learning_rate": 3.065662002152852e-05, | |
| "loss": 1.4125, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 1.9375672766415502, | |
| "grad_norm": 7.056236267089844, | |
| "learning_rate": 3.063867958378184e-05, | |
| "loss": 1.4697, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 1.9393613204162181, | |
| "grad_norm": 7.336516380310059, | |
| "learning_rate": 3.0620739146035165e-05, | |
| "loss": 1.4819, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 1.9411553641908863, | |
| "grad_norm": 7.675684928894043, | |
| "learning_rate": 3.060279870828848e-05, | |
| "loss": 1.2941, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 1.9429494079655543, | |
| "grad_norm": 6.298435211181641, | |
| "learning_rate": 3.0584858270541806e-05, | |
| "loss": 1.3083, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 1.9447434517402225, | |
| "grad_norm": 8.348226547241211, | |
| "learning_rate": 3.056691783279512e-05, | |
| "loss": 1.4821, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 1.9465374955148906, | |
| "grad_norm": 7.188017845153809, | |
| "learning_rate": 3.054897739504844e-05, | |
| "loss": 1.4011, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 1.9483315392895588, | |
| "grad_norm": 7.320749759674072, | |
| "learning_rate": 3.0531036957301765e-05, | |
| "loss": 1.4383, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 1.9501255830642268, | |
| "grad_norm": 8.11515998840332, | |
| "learning_rate": 3.051309651955508e-05, | |
| "loss": 1.4512, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 1.9519196268388948, | |
| "grad_norm": 5.978071689605713, | |
| "learning_rate": 3.0495156081808396e-05, | |
| "loss": 1.3383, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 1.953713670613563, | |
| "grad_norm": 8.691400527954102, | |
| "learning_rate": 3.047721564406172e-05, | |
| "loss": 1.3759, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 1.9555077143882311, | |
| "grad_norm": 7.324152946472168, | |
| "learning_rate": 3.0459275206315037e-05, | |
| "loss": 1.4581, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 1.9573017581628993, | |
| "grad_norm": 7.364433288574219, | |
| "learning_rate": 3.0441334768568354e-05, | |
| "loss": 1.356, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 1.9590958019375673, | |
| "grad_norm": 8.051295280456543, | |
| "learning_rate": 3.0423394330821675e-05, | |
| "loss": 1.3423, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 1.9608898457122352, | |
| "grad_norm": 7.994751930236816, | |
| "learning_rate": 3.0405453893074992e-05, | |
| "loss": 1.4558, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 1.9626838894869034, | |
| "grad_norm": 7.786356449127197, | |
| "learning_rate": 3.038751345532831e-05, | |
| "loss": 1.3661, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 1.9644779332615716, | |
| "grad_norm": 7.44194221496582, | |
| "learning_rate": 3.0369573017581633e-05, | |
| "loss": 1.3659, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 1.9662719770362398, | |
| "grad_norm": 9.217162132263184, | |
| "learning_rate": 3.0351632579834947e-05, | |
| "loss": 1.4716, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 1.9680660208109078, | |
| "grad_norm": 7.545557022094727, | |
| "learning_rate": 3.0333692142088265e-05, | |
| "loss": 1.4073, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 1.969860064585576, | |
| "grad_norm": 7.098927021026611, | |
| "learning_rate": 3.031575170434159e-05, | |
| "loss": 1.3973, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 1.971654108360244, | |
| "grad_norm": 8.295470237731934, | |
| "learning_rate": 3.0297811266594906e-05, | |
| "loss": 1.3848, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 1.973448152134912, | |
| "grad_norm": 5.602086544036865, | |
| "learning_rate": 3.0279870828848226e-05, | |
| "loss": 1.3453, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.9752421959095803, | |
| "grad_norm": 7.365663051605225, | |
| "learning_rate": 3.0261930391101544e-05, | |
| "loss": 1.3177, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 1.9770362396842482, | |
| "grad_norm": 9.722752571105957, | |
| "learning_rate": 3.024398995335486e-05, | |
| "loss": 1.4265, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 1.9788302834589164, | |
| "grad_norm": 7.279769420623779, | |
| "learning_rate": 3.0226049515608185e-05, | |
| "loss": 1.33, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 1.9806243272335844, | |
| "grad_norm": 7.57685661315918, | |
| "learning_rate": 3.0208109077861502e-05, | |
| "loss": 1.4305, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 1.9824183710082526, | |
| "grad_norm": 8.122096061706543, | |
| "learning_rate": 3.019016864011482e-05, | |
| "loss": 1.3327, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 1.9842124147829208, | |
| "grad_norm": 9.706563949584961, | |
| "learning_rate": 3.017222820236814e-05, | |
| "loss": 1.4532, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 1.986006458557589, | |
| "grad_norm": 9.653244972229004, | |
| "learning_rate": 3.0154287764621457e-05, | |
| "loss": 1.4331, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 1.987800502332257, | |
| "grad_norm": 6.598245620727539, | |
| "learning_rate": 3.0136347326874775e-05, | |
| "loss": 1.3661, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 1.9895945461069249, | |
| "grad_norm": 8.551617622375488, | |
| "learning_rate": 3.01184068891281e-05, | |
| "loss": 1.3247, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 1.991388589881593, | |
| "grad_norm": 6.94644021987915, | |
| "learning_rate": 3.0100466451381416e-05, | |
| "loss": 1.376, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 1.9931826336562612, | |
| "grad_norm": 8.096078872680664, | |
| "learning_rate": 3.0082526013634733e-05, | |
| "loss": 1.4114, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 1.9949766774309294, | |
| "grad_norm": 9.47823429107666, | |
| "learning_rate": 3.0064585575888054e-05, | |
| "loss": 1.4147, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 1.9967707212055974, | |
| "grad_norm": 6.910612106323242, | |
| "learning_rate": 3.004664513814137e-05, | |
| "loss": 1.365, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 1.9985647649802654, | |
| "grad_norm": 9.393196105957031, | |
| "learning_rate": 3.002870470039469e-05, | |
| "loss": 1.4358, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.9240386486053467, | |
| "eval_runtime": 199.4981, | |
| "eval_samples_per_second": 12.416, | |
| "eval_steps_per_second": 12.416, | |
| "step": 11148 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 27870, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.033003108315955e+16, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |