Invalid JSON:
Unexpected token 'I', ..."ad_norm": Infinity,
"... is not valid JSON
| { | |
| "best_global_step": 5574, | |
| "best_metric": 1.932178020477295, | |
| "best_model_checkpoint": "./best_mcqa_model/checkpoint-5574", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 5574, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0001794043774668102, | |
| "grad_norm": Infinity, | |
| "learning_rate": 5e-05, | |
| "loss": 2.0537, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.001794043774668102, | |
| "grad_norm": 50.121803283691406, | |
| "learning_rate": 4.9994617868676e-05, | |
| "loss": 2.3674, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.003588087549336204, | |
| "grad_norm": 25.215618133544922, | |
| "learning_rate": 4.997667743092932e-05, | |
| "loss": 2.2324, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.005382131324004306, | |
| "grad_norm": 17.250703811645508, | |
| "learning_rate": 4.995873699318264e-05, | |
| "loss": 2.1936, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.007176175098672408, | |
| "grad_norm": 16.687049865722656, | |
| "learning_rate": 4.994079655543596e-05, | |
| "loss": 2.1778, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.00897021887334051, | |
| "grad_norm": 18.19718360900879, | |
| "learning_rate": 4.9922856117689274e-05, | |
| "loss": 1.9997, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.010764262648008612, | |
| "grad_norm": 18.301307678222656, | |
| "learning_rate": 4.990491567994259e-05, | |
| "loss": 2.1654, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.012558306422676713, | |
| "grad_norm": 18.938064575195312, | |
| "learning_rate": 4.988697524219591e-05, | |
| "loss": 2.1311, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.014352350197344816, | |
| "grad_norm": 24.058290481567383, | |
| "learning_rate": 4.986903480444923e-05, | |
| "loss": 2.1101, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.016146393972012917, | |
| "grad_norm": 20.780832290649414, | |
| "learning_rate": 4.985109436670255e-05, | |
| "loss": 2.043, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.01794043774668102, | |
| "grad_norm": 18.546377182006836, | |
| "learning_rate": 4.983315392895587e-05, | |
| "loss": 2.1232, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.019734481521349122, | |
| "grad_norm": 15.411616325378418, | |
| "learning_rate": 4.981521349120919e-05, | |
| "loss": 2.1469, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.021528525296017224, | |
| "grad_norm": 20.646089553833008, | |
| "learning_rate": 4.979727305346251e-05, | |
| "loss": 2.2586, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.023322569070685324, | |
| "grad_norm": 17.679092407226562, | |
| "learning_rate": 4.9779332615715826e-05, | |
| "loss": 2.0833, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.025116612845353426, | |
| "grad_norm": 16.85576629638672, | |
| "learning_rate": 4.976139217796914e-05, | |
| "loss": 2.0967, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02691065662002153, | |
| "grad_norm": 13.570154190063477, | |
| "learning_rate": 4.974345174022246e-05, | |
| "loss": 2.1794, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.02870470039468963, | |
| "grad_norm": 20.535985946655273, | |
| "learning_rate": 4.972551130247578e-05, | |
| "loss": 2.1686, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.030498744169357734, | |
| "grad_norm": 16.294347763061523, | |
| "learning_rate": 4.97075708647291e-05, | |
| "loss": 2.0392, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03229278794402583, | |
| "grad_norm": 12.76968765258789, | |
| "learning_rate": 4.968963042698242e-05, | |
| "loss": 2.1039, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.034086831718693936, | |
| "grad_norm": 14.646772384643555, | |
| "learning_rate": 4.967168998923574e-05, | |
| "loss": 2.0321, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.03588087549336204, | |
| "grad_norm": 13.305708885192871, | |
| "learning_rate": 4.965374955148906e-05, | |
| "loss": 2.1168, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.03767491926803014, | |
| "grad_norm": 14.862943649291992, | |
| "learning_rate": 4.963580911374238e-05, | |
| "loss": 2.0901, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.039468963042698244, | |
| "grad_norm": 15.443259239196777, | |
| "learning_rate": 4.96178686759957e-05, | |
| "loss": 2.0921, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.041263006817366346, | |
| "grad_norm": 15.883851051330566, | |
| "learning_rate": 4.959992823824902e-05, | |
| "loss": 2.2321, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04305705059203445, | |
| "grad_norm": 12.791455268859863, | |
| "learning_rate": 4.9581987800502336e-05, | |
| "loss": 2.0071, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.044851094366702544, | |
| "grad_norm": 15.706707954406738, | |
| "learning_rate": 4.956404736275565e-05, | |
| "loss": 2.1264, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.04664513814137065, | |
| "grad_norm": 18.551183700561523, | |
| "learning_rate": 4.954610692500897e-05, | |
| "loss": 1.8886, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.04843918191603875, | |
| "grad_norm": 24.170883178710938, | |
| "learning_rate": 4.952816648726229e-05, | |
| "loss": 2.2594, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.05023322569070685, | |
| "grad_norm": 12.178893089294434, | |
| "learning_rate": 4.951022604951561e-05, | |
| "loss": 2.0534, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.052027269465374955, | |
| "grad_norm": 14.537866592407227, | |
| "learning_rate": 4.949228561176893e-05, | |
| "loss": 1.9462, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.05382131324004306, | |
| "grad_norm": 15.807729721069336, | |
| "learning_rate": 4.9474345174022246e-05, | |
| "loss": 2.0243, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.05561535701471116, | |
| "grad_norm": 14.98117446899414, | |
| "learning_rate": 4.945640473627557e-05, | |
| "loss": 2.0688, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.05740940078937926, | |
| "grad_norm": 17.541954040527344, | |
| "learning_rate": 4.943846429852889e-05, | |
| "loss": 1.9925, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.059203444564047365, | |
| "grad_norm": 9.95796012878418, | |
| "learning_rate": 4.9420523860782205e-05, | |
| "loss": 1.9715, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.06099748833871547, | |
| "grad_norm": 12.977546691894531, | |
| "learning_rate": 4.940258342303553e-05, | |
| "loss": 2.0491, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.06279153211338356, | |
| "grad_norm": 15.887458801269531, | |
| "learning_rate": 4.938464298528884e-05, | |
| "loss": 2.1484, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.06458557588805167, | |
| "grad_norm": 11.04765510559082, | |
| "learning_rate": 4.9366702547542157e-05, | |
| "loss": 2.1138, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.06637961966271977, | |
| "grad_norm": 13.182682991027832, | |
| "learning_rate": 4.934876210979548e-05, | |
| "loss": 2.1559, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.06817366343738787, | |
| "grad_norm": 11.596452713012695, | |
| "learning_rate": 4.93308216720488e-05, | |
| "loss": 2.0895, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.06996770721205597, | |
| "grad_norm": 13.062545776367188, | |
| "learning_rate": 4.931288123430212e-05, | |
| "loss": 2.1482, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.07176175098672408, | |
| "grad_norm": 12.839446067810059, | |
| "learning_rate": 4.929494079655544e-05, | |
| "loss": 2.0827, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.07355579476139218, | |
| "grad_norm": 11.77519702911377, | |
| "learning_rate": 4.9277000358808756e-05, | |
| "loss": 2.1411, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.07534983853606028, | |
| "grad_norm": 10.112687110900879, | |
| "learning_rate": 4.925905992106208e-05, | |
| "loss": 2.0311, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.07714388231072838, | |
| "grad_norm": 13.721075057983398, | |
| "learning_rate": 4.92411194833154e-05, | |
| "loss": 1.9533, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.07893792608539649, | |
| "grad_norm": 13.185443878173828, | |
| "learning_rate": 4.9223179045568715e-05, | |
| "loss": 2.1598, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.08073196986006459, | |
| "grad_norm": 11.687894821166992, | |
| "learning_rate": 4.920523860782203e-05, | |
| "loss": 2.2673, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.08252601363473269, | |
| "grad_norm": 17.916851043701172, | |
| "learning_rate": 4.918729817007535e-05, | |
| "loss": 2.1737, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.0843200574094008, | |
| "grad_norm": 11.431639671325684, | |
| "learning_rate": 4.916935773232867e-05, | |
| "loss": 2.1655, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.0861141011840689, | |
| "grad_norm": 14.9763822555542, | |
| "learning_rate": 4.915141729458199e-05, | |
| "loss": 2.0351, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.087908144958737, | |
| "grad_norm": 12.372869491577148, | |
| "learning_rate": 4.913347685683531e-05, | |
| "loss": 2.2227, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08970218873340509, | |
| "grad_norm": 11.000882148742676, | |
| "learning_rate": 4.9115536419088625e-05, | |
| "loss": 2.0837, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.09149623250807319, | |
| "grad_norm": 22.3309326171875, | |
| "learning_rate": 4.909759598134195e-05, | |
| "loss": 2.1196, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.0932902762827413, | |
| "grad_norm": 15.499113082885742, | |
| "learning_rate": 4.9079655543595267e-05, | |
| "loss": 2.2164, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.0950843200574094, | |
| "grad_norm": 9.863581657409668, | |
| "learning_rate": 4.9061715105848584e-05, | |
| "loss": 2.1818, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.0968783638320775, | |
| "grad_norm": 14.396673202514648, | |
| "learning_rate": 4.904377466810191e-05, | |
| "loss": 2.0059, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.0986724076067456, | |
| "grad_norm": 10.207282066345215, | |
| "learning_rate": 4.9025834230355225e-05, | |
| "loss": 2.1265, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.1004664513814137, | |
| "grad_norm": 11.846447944641113, | |
| "learning_rate": 4.900789379260854e-05, | |
| "loss": 2.2348, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.10226049515608181, | |
| "grad_norm": 15.00061321258545, | |
| "learning_rate": 4.898995335486186e-05, | |
| "loss": 2.0822, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.10405453893074991, | |
| "grad_norm": 19.457279205322266, | |
| "learning_rate": 4.897201291711518e-05, | |
| "loss": 2.1838, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.10584858270541801, | |
| "grad_norm": 9.659820556640625, | |
| "learning_rate": 4.89540724793685e-05, | |
| "loss": 1.9626, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.10764262648008611, | |
| "grad_norm": 14.713663101196289, | |
| "learning_rate": 4.893613204162182e-05, | |
| "loss": 2.2248, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.10943667025475422, | |
| "grad_norm": 10.287516593933105, | |
| "learning_rate": 4.8918191603875135e-05, | |
| "loss": 2.1525, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.11123071402942232, | |
| "grad_norm": 10.284120559692383, | |
| "learning_rate": 4.890025116612846e-05, | |
| "loss": 2.1112, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.11302475780409042, | |
| "grad_norm": 9.741727828979492, | |
| "learning_rate": 4.888231072838178e-05, | |
| "loss": 2.0442, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.11481880157875853, | |
| "grad_norm": 8.978920936584473, | |
| "learning_rate": 4.8864370290635094e-05, | |
| "loss": 2.0471, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.11661284535342663, | |
| "grad_norm": 12.450641632080078, | |
| "learning_rate": 4.884642985288841e-05, | |
| "loss": 2.1083, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.11840688912809473, | |
| "grad_norm": 11.721051216125488, | |
| "learning_rate": 4.882848941514173e-05, | |
| "loss": 2.2182, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.12020093290276283, | |
| "grad_norm": 9.241230964660645, | |
| "learning_rate": 4.8810548977395046e-05, | |
| "loss": 2.1022, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.12199497667743094, | |
| "grad_norm": 9.87799072265625, | |
| "learning_rate": 4.879260853964837e-05, | |
| "loss": 1.9777, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.12378902045209902, | |
| "grad_norm": 13.317924499511719, | |
| "learning_rate": 4.877466810190169e-05, | |
| "loss": 2.0508, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.12558306422676713, | |
| "grad_norm": 11.3270845413208, | |
| "learning_rate": 4.8756727664155004e-05, | |
| "loss": 2.0391, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.12737710800143523, | |
| "grad_norm": 10.925955772399902, | |
| "learning_rate": 4.873878722640833e-05, | |
| "loss": 2.1051, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.12917115177610333, | |
| "grad_norm": 11.26830768585205, | |
| "learning_rate": 4.8720846788661646e-05, | |
| "loss": 1.994, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.13096519555077143, | |
| "grad_norm": 16.53739356994629, | |
| "learning_rate": 4.870290635091496e-05, | |
| "loss": 2.1491, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.13275923932543954, | |
| "grad_norm": 11.774067878723145, | |
| "learning_rate": 4.868496591316829e-05, | |
| "loss": 2.1163, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.13455328310010764, | |
| "grad_norm": 11.250940322875977, | |
| "learning_rate": 4.8667025475421604e-05, | |
| "loss": 2.1339, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.13634732687477574, | |
| "grad_norm": 12.242491722106934, | |
| "learning_rate": 4.864908503767492e-05, | |
| "loss": 1.8935, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.13814137064944385, | |
| "grad_norm": 9.951498985290527, | |
| "learning_rate": 4.863114459992824e-05, | |
| "loss": 1.9869, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.13993541442411195, | |
| "grad_norm": 8.47032356262207, | |
| "learning_rate": 4.8613204162181556e-05, | |
| "loss": 2.008, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.14172945819878005, | |
| "grad_norm": 10.949905395507812, | |
| "learning_rate": 4.859526372443488e-05, | |
| "loss": 2.1019, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.14352350197344815, | |
| "grad_norm": 11.062277793884277, | |
| "learning_rate": 4.85773232866882e-05, | |
| "loss": 2.1903, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.14531754574811626, | |
| "grad_norm": 16.415149688720703, | |
| "learning_rate": 4.8559382848941514e-05, | |
| "loss": 2.1214, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.14711158952278436, | |
| "grad_norm": 10.11135196685791, | |
| "learning_rate": 4.854144241119484e-05, | |
| "loss": 1.8267, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.14890563329745246, | |
| "grad_norm": 10.645978927612305, | |
| "learning_rate": 4.8523501973448156e-05, | |
| "loss": 2.0422, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.15069967707212056, | |
| "grad_norm": 8.619297981262207, | |
| "learning_rate": 4.850556153570147e-05, | |
| "loss": 2.2584, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.15249372084678867, | |
| "grad_norm": 9.050743103027344, | |
| "learning_rate": 4.84876210979548e-05, | |
| "loss": 2.0256, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.15428776462145677, | |
| "grad_norm": 10.283220291137695, | |
| "learning_rate": 4.8469680660208114e-05, | |
| "loss": 2.0351, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.15608180839612487, | |
| "grad_norm": 10.877766609191895, | |
| "learning_rate": 4.8451740222461425e-05, | |
| "loss": 2.0931, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.15787585217079297, | |
| "grad_norm": 13.12762451171875, | |
| "learning_rate": 4.843379978471475e-05, | |
| "loss": 2.1018, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.15966989594546108, | |
| "grad_norm": 9.463711738586426, | |
| "learning_rate": 4.8415859346968066e-05, | |
| "loss": 2.0868, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.16146393972012918, | |
| "grad_norm": 7.113370418548584, | |
| "learning_rate": 4.839791890922138e-05, | |
| "loss": 2.0611, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.16325798349479728, | |
| "grad_norm": 11.066067695617676, | |
| "learning_rate": 4.837997847147471e-05, | |
| "loss": 2.0621, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.16505202726946538, | |
| "grad_norm": 12.794750213623047, | |
| "learning_rate": 4.8362038033728025e-05, | |
| "loss": 2.0361, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.1668460710441335, | |
| "grad_norm": 10.758418083190918, | |
| "learning_rate": 4.834409759598135e-05, | |
| "loss": 1.9764, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.1686401148188016, | |
| "grad_norm": 13.04681396484375, | |
| "learning_rate": 4.8326157158234666e-05, | |
| "loss": 2.0678, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.1704341585934697, | |
| "grad_norm": 12.170126914978027, | |
| "learning_rate": 4.830821672048798e-05, | |
| "loss": 2.1812, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.1722282023681378, | |
| "grad_norm": 9.722840309143066, | |
| "learning_rate": 4.82902762827413e-05, | |
| "loss": 2.0603, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1740222461428059, | |
| "grad_norm": 11.84780216217041, | |
| "learning_rate": 4.827233584499462e-05, | |
| "loss": 1.9741, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.175816289917474, | |
| "grad_norm": 11.813640594482422, | |
| "learning_rate": 4.8254395407247935e-05, | |
| "loss": 2.0533, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.17761033369214208, | |
| "grad_norm": 14.080793380737305, | |
| "learning_rate": 4.823645496950126e-05, | |
| "loss": 2.0094, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.17940437746681018, | |
| "grad_norm": 10.01248550415039, | |
| "learning_rate": 4.8218514531754576e-05, | |
| "loss": 2.1019, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.18119842124147828, | |
| "grad_norm": 10.648508071899414, | |
| "learning_rate": 4.8200574094007894e-05, | |
| "loss": 2.018, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.18299246501614638, | |
| "grad_norm": 12.019718170166016, | |
| "learning_rate": 4.818263365626122e-05, | |
| "loss": 1.9094, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.18478650879081449, | |
| "grad_norm": 8.853078842163086, | |
| "learning_rate": 4.8164693218514535e-05, | |
| "loss": 2.1722, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.1865805525654826, | |
| "grad_norm": 8.063078880310059, | |
| "learning_rate": 4.814675278076785e-05, | |
| "loss": 2.0377, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.1883745963401507, | |
| "grad_norm": 9.952075958251953, | |
| "learning_rate": 4.8128812343021176e-05, | |
| "loss": 2.1291, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.1901686401148188, | |
| "grad_norm": 10.393296241760254, | |
| "learning_rate": 4.811087190527449e-05, | |
| "loss": 2.0707, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.1919626838894869, | |
| "grad_norm": 10.250103950500488, | |
| "learning_rate": 4.809293146752781e-05, | |
| "loss": 2.0411, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.193756727664155, | |
| "grad_norm": 10.035394668579102, | |
| "learning_rate": 4.807499102978113e-05, | |
| "loss": 1.9696, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1955507714388231, | |
| "grad_norm": 12.22006893157959, | |
| "learning_rate": 4.8057050592034445e-05, | |
| "loss": 2.0681, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.1973448152134912, | |
| "grad_norm": 9.505834579467773, | |
| "learning_rate": 4.803911015428776e-05, | |
| "loss": 2.0469, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.1991388589881593, | |
| "grad_norm": 8.87895393371582, | |
| "learning_rate": 4.8021169716541086e-05, | |
| "loss": 2.1125, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2009329027628274, | |
| "grad_norm": 8.545978546142578, | |
| "learning_rate": 4.8003229278794404e-05, | |
| "loss": 2.008, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2027269465374955, | |
| "grad_norm": 9.056685447692871, | |
| "learning_rate": 4.798528884104773e-05, | |
| "loss": 1.9602, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.20452099031216361, | |
| "grad_norm": 15.332698822021484, | |
| "learning_rate": 4.7967348403301045e-05, | |
| "loss": 1.8802, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.20631503408683172, | |
| "grad_norm": 12.204120635986328, | |
| "learning_rate": 4.794940796555436e-05, | |
| "loss": 1.9994, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.20810907786149982, | |
| "grad_norm": 10.399602890014648, | |
| "learning_rate": 4.7931467527807686e-05, | |
| "loss": 2.1549, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.20990312163616792, | |
| "grad_norm": 9.073091506958008, | |
| "learning_rate": 4.7913527090061e-05, | |
| "loss": 2.1483, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.21169716541083602, | |
| "grad_norm": 11.733457565307617, | |
| "learning_rate": 4.7895586652314314e-05, | |
| "loss": 2.1155, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.21349120918550413, | |
| "grad_norm": 7.083014011383057, | |
| "learning_rate": 4.787764621456764e-05, | |
| "loss": 2.0738, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.21528525296017223, | |
| "grad_norm": 11.031323432922363, | |
| "learning_rate": 4.7859705776820955e-05, | |
| "loss": 2.1221, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.21707929673484033, | |
| "grad_norm": 9.98110580444336, | |
| "learning_rate": 4.784176533907427e-05, | |
| "loss": 2.0356, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.21887334050950844, | |
| "grad_norm": 13.873873710632324, | |
| "learning_rate": 4.78238249013276e-05, | |
| "loss": 2.0523, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.22066738428417654, | |
| "grad_norm": 10.365226745605469, | |
| "learning_rate": 4.7805884463580914e-05, | |
| "loss": 2.0989, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.22246142805884464, | |
| "grad_norm": 9.975032806396484, | |
| "learning_rate": 4.778794402583423e-05, | |
| "loss": 2.0278, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.22425547183351274, | |
| "grad_norm": 10.711677551269531, | |
| "learning_rate": 4.7770003588087555e-05, | |
| "loss": 2.1347, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.22604951560818085, | |
| "grad_norm": 8.741484642028809, | |
| "learning_rate": 4.775206315034087e-05, | |
| "loss": 2.0043, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.22784355938284895, | |
| "grad_norm": 11.853199005126953, | |
| "learning_rate": 4.773412271259419e-05, | |
| "loss": 2.1624, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.22963760315751705, | |
| "grad_norm": 8.458366394042969, | |
| "learning_rate": 4.771618227484751e-05, | |
| "loss": 2.0635, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.23143164693218515, | |
| "grad_norm": 9.640998840332031, | |
| "learning_rate": 4.7698241837100824e-05, | |
| "loss": 1.9351, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.23322569070685326, | |
| "grad_norm": 11.35208511352539, | |
| "learning_rate": 4.768030139935415e-05, | |
| "loss": 2.0016, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.23501973448152136, | |
| "grad_norm": 12.047490119934082, | |
| "learning_rate": 4.7662360961607465e-05, | |
| "loss": 1.9982, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.23681377825618946, | |
| "grad_norm": 7.340638637542725, | |
| "learning_rate": 4.764442052386078e-05, | |
| "loss": 1.9317, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.23860782203085756, | |
| "grad_norm": 9.259571075439453, | |
| "learning_rate": 4.762648008611411e-05, | |
| "loss": 1.867, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.24040186580552567, | |
| "grad_norm": 10.415928840637207, | |
| "learning_rate": 4.7608539648367424e-05, | |
| "loss": 1.8926, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.24219590958019377, | |
| "grad_norm": 10.523111343383789, | |
| "learning_rate": 4.759059921062074e-05, | |
| "loss": 1.9432, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.24398995335486187, | |
| "grad_norm": 10.31205940246582, | |
| "learning_rate": 4.7572658772874065e-05, | |
| "loss": 2.0679, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.24578399712952997, | |
| "grad_norm": 11.592795372009277, | |
| "learning_rate": 4.755471833512738e-05, | |
| "loss": 1.8786, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.24757804090419805, | |
| "grad_norm": 13.131718635559082, | |
| "learning_rate": 4.75367778973807e-05, | |
| "loss": 2.1331, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.24937208467886615, | |
| "grad_norm": 8.448001861572266, | |
| "learning_rate": 4.751883745963402e-05, | |
| "loss": 2.2519, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.25116612845353425, | |
| "grad_norm": 9.400849342346191, | |
| "learning_rate": 4.7500897021887334e-05, | |
| "loss": 2.162, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2529601722282024, | |
| "grad_norm": 9.269533157348633, | |
| "learning_rate": 4.748295658414065e-05, | |
| "loss": 2.0009, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.25475421600287046, | |
| "grad_norm": 12.824959754943848, | |
| "learning_rate": 4.7465016146393976e-05, | |
| "loss": 2.1757, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.2565482597775386, | |
| "grad_norm": 6.765061378479004, | |
| "learning_rate": 4.744707570864729e-05, | |
| "loss": 1.9529, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.25834230355220666, | |
| "grad_norm": 8.876534461975098, | |
| "learning_rate": 4.742913527090061e-05, | |
| "loss": 1.9964, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.2601363473268748, | |
| "grad_norm": 13.438302040100098, | |
| "learning_rate": 4.7411194833153934e-05, | |
| "loss": 2.0535, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.26193039110154287, | |
| "grad_norm": 11.55459213256836, | |
| "learning_rate": 4.739325439540725e-05, | |
| "loss": 2.0917, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.263724434876211, | |
| "grad_norm": 13.218855857849121, | |
| "learning_rate": 4.737531395766057e-05, | |
| "loss": 1.9388, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2655184786508791, | |
| "grad_norm": 8.17874526977539, | |
| "learning_rate": 4.7357373519913886e-05, | |
| "loss": 1.9581, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.2673125224255472, | |
| "grad_norm": 9.68444538116455, | |
| "learning_rate": 4.73394330821672e-05, | |
| "loss": 2.0179, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.2691065662002153, | |
| "grad_norm": 10.616148948669434, | |
| "learning_rate": 4.732149264442053e-05, | |
| "loss": 2.024, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.2709006099748834, | |
| "grad_norm": 11.970083236694336, | |
| "learning_rate": 4.7303552206673845e-05, | |
| "loss": 2.1263, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.2726946537495515, | |
| "grad_norm": 9.173564910888672, | |
| "learning_rate": 4.728561176892716e-05, | |
| "loss": 1.9028, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2744886975242196, | |
| "grad_norm": 12.469501495361328, | |
| "learning_rate": 4.7267671331180486e-05, | |
| "loss": 2.1234, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2762827412988877, | |
| "grad_norm": 7.576513290405273, | |
| "learning_rate": 4.72497308934338e-05, | |
| "loss": 1.9832, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.2780767850735558, | |
| "grad_norm": 7.813937187194824, | |
| "learning_rate": 4.723179045568712e-05, | |
| "loss": 2.0337, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.2798708288482239, | |
| "grad_norm": 8.702999114990234, | |
| "learning_rate": 4.7213850017940444e-05, | |
| "loss": 1.8968, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.281664872622892, | |
| "grad_norm": 9.221305847167969, | |
| "learning_rate": 4.719590958019376e-05, | |
| "loss": 1.8972, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.2834589163975601, | |
| "grad_norm": 7.7957892417907715, | |
| "learning_rate": 4.717796914244708e-05, | |
| "loss": 1.985, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.2852529601722282, | |
| "grad_norm": 15.21849250793457, | |
| "learning_rate": 4.7160028704700396e-05, | |
| "loss": 1.8525, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.2870470039468963, | |
| "grad_norm": 11.758828163146973, | |
| "learning_rate": 4.7142088266953713e-05, | |
| "loss": 2.039, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.2888410477215644, | |
| "grad_norm": 9.403512001037598, | |
| "learning_rate": 4.712414782920703e-05, | |
| "loss": 2.0726, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2906350914962325, | |
| "grad_norm": 10.458137512207031, | |
| "learning_rate": 4.7106207391460355e-05, | |
| "loss": 2.1689, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.2924291352709006, | |
| "grad_norm": 7.665112495422363, | |
| "learning_rate": 4.708826695371367e-05, | |
| "loss": 1.9077, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.2942231790455687, | |
| "grad_norm": 11.904746055603027, | |
| "learning_rate": 4.707032651596699e-05, | |
| "loss": 2.0425, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.2960172228202368, | |
| "grad_norm": 8.795477867126465, | |
| "learning_rate": 4.705238607822031e-05, | |
| "loss": 1.9992, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.2978112665949049, | |
| "grad_norm": 7.0219550132751465, | |
| "learning_rate": 4.703444564047363e-05, | |
| "loss": 1.9143, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.299605310369573, | |
| "grad_norm": 7.745930194854736, | |
| "learning_rate": 4.7016505202726955e-05, | |
| "loss": 1.8667, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.3013993541442411, | |
| "grad_norm": 7.579682350158691, | |
| "learning_rate": 4.699856476498027e-05, | |
| "loss": 2.0123, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.3031933979189092, | |
| "grad_norm": 7.5498199462890625, | |
| "learning_rate": 4.698062432723358e-05, | |
| "loss": 2.1165, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.30498744169357733, | |
| "grad_norm": 8.588129043579102, | |
| "learning_rate": 4.6962683889486906e-05, | |
| "loss": 2.0662, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.3067814854682454, | |
| "grad_norm": 6.789324760437012, | |
| "learning_rate": 4.6944743451740224e-05, | |
| "loss": 1.9604, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.30857552924291354, | |
| "grad_norm": 8.46413803100586, | |
| "learning_rate": 4.692680301399354e-05, | |
| "loss": 2.0078, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.3103695730175816, | |
| "grad_norm": 9.2481689453125, | |
| "learning_rate": 4.6908862576246865e-05, | |
| "loss": 2.0001, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.31216361679224974, | |
| "grad_norm": 9.034040451049805, | |
| "learning_rate": 4.689092213850018e-05, | |
| "loss": 2.1038, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.3139576605669178, | |
| "grad_norm": 9.660133361816406, | |
| "learning_rate": 4.68729817007535e-05, | |
| "loss": 2.0491, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.31575170434158595, | |
| "grad_norm": 8.319841384887695, | |
| "learning_rate": 4.6855041263006823e-05, | |
| "loss": 1.9269, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.317545748116254, | |
| "grad_norm": 12.393325805664062, | |
| "learning_rate": 4.683710082526014e-05, | |
| "loss": 2.0983, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.31933979189092215, | |
| "grad_norm": 9.26393985748291, | |
| "learning_rate": 4.681916038751346e-05, | |
| "loss": 1.8658, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.32113383566559023, | |
| "grad_norm": 10.711555480957031, | |
| "learning_rate": 4.6801219949766775e-05, | |
| "loss": 2.0091, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.32292787944025836, | |
| "grad_norm": 9.118010520935059, | |
| "learning_rate": 4.678327951202009e-05, | |
| "loss": 2.119, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.32472192321492643, | |
| "grad_norm": 10.101414680480957, | |
| "learning_rate": 4.676533907427341e-05, | |
| "loss": 2.1448, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.32651596698959456, | |
| "grad_norm": 11.349935531616211, | |
| "learning_rate": 4.6747398636526734e-05, | |
| "loss": 2.0042, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.32831001076426264, | |
| "grad_norm": 8.576199531555176, | |
| "learning_rate": 4.672945819878005e-05, | |
| "loss": 2.0786, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.33010405453893077, | |
| "grad_norm": 8.958368301391602, | |
| "learning_rate": 4.671151776103337e-05, | |
| "loss": 1.9364, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.33189809831359884, | |
| "grad_norm": 10.209322929382324, | |
| "learning_rate": 4.669357732328669e-05, | |
| "loss": 2.1192, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.333692142088267, | |
| "grad_norm": 10.517390251159668, | |
| "learning_rate": 4.667563688554001e-05, | |
| "loss": 1.8833, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.33548618586293505, | |
| "grad_norm": 11.347796440124512, | |
| "learning_rate": 4.6657696447793334e-05, | |
| "loss": 2.0498, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3372802296376032, | |
| "grad_norm": 9.35456371307373, | |
| "learning_rate": 4.663975601004665e-05, | |
| "loss": 2.0158, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.33907427341227125, | |
| "grad_norm": 7.018476486206055, | |
| "learning_rate": 4.662181557229997e-05, | |
| "loss": 1.9527, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.3408683171869394, | |
| "grad_norm": 13.440537452697754, | |
| "learning_rate": 4.6603875134553285e-05, | |
| "loss": 1.9492, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.34266236096160746, | |
| "grad_norm": 7.040363788604736, | |
| "learning_rate": 4.65859346968066e-05, | |
| "loss": 2.085, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.3444564047362756, | |
| "grad_norm": 8.57967472076416, | |
| "learning_rate": 4.656799425905992e-05, | |
| "loss": 1.9639, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.34625044851094366, | |
| "grad_norm": 7.877899169921875, | |
| "learning_rate": 4.6550053821313244e-05, | |
| "loss": 1.9315, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3480444922856118, | |
| "grad_norm": 10.133997917175293, | |
| "learning_rate": 4.653211338356656e-05, | |
| "loss": 1.9489, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.34983853606027987, | |
| "grad_norm": 10.46199893951416, | |
| "learning_rate": 4.651417294581988e-05, | |
| "loss": 2.1294, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.351632579834948, | |
| "grad_norm": 10.541289329528809, | |
| "learning_rate": 4.64962325080732e-05, | |
| "loss": 2.0334, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.3534266236096161, | |
| "grad_norm": 10.026679039001465, | |
| "learning_rate": 4.647829207032652e-05, | |
| "loss": 1.9669, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.35522066738428415, | |
| "grad_norm": 9.2806978225708, | |
| "learning_rate": 4.646035163257984e-05, | |
| "loss": 2.0065, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.3570147111589523, | |
| "grad_norm": 11.332128524780273, | |
| "learning_rate": 4.6442411194833154e-05, | |
| "loss": 2.1402, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.35880875493362036, | |
| "grad_norm": 9.25808048248291, | |
| "learning_rate": 4.642447075708647e-05, | |
| "loss": 2.1242, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3606027987082885, | |
| "grad_norm": 9.891722679138184, | |
| "learning_rate": 4.640653031933979e-05, | |
| "loss": 2.0954, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.36239684248295656, | |
| "grad_norm": 8.027229309082031, | |
| "learning_rate": 4.638858988159311e-05, | |
| "loss": 1.94, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.3641908862576247, | |
| "grad_norm": 7.155013084411621, | |
| "learning_rate": 4.637064944384643e-05, | |
| "loss": 2.0099, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.36598493003229277, | |
| "grad_norm": 7.851609230041504, | |
| "learning_rate": 4.6352709006099754e-05, | |
| "loss": 2.0819, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.3677789738069609, | |
| "grad_norm": 9.718694686889648, | |
| "learning_rate": 4.633476856835307e-05, | |
| "loss": 2.0265, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.36957301758162897, | |
| "grad_norm": 7.794171333312988, | |
| "learning_rate": 4.631682813060639e-05, | |
| "loss": 1.9589, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.3713670613562971, | |
| "grad_norm": 9.896645545959473, | |
| "learning_rate": 4.629888769285971e-05, | |
| "loss": 1.8671, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.3731611051309652, | |
| "grad_norm": 8.257375717163086, | |
| "learning_rate": 4.628094725511303e-05, | |
| "loss": 2.0167, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.3749551489056333, | |
| "grad_norm": 9.882346153259277, | |
| "learning_rate": 4.626300681736635e-05, | |
| "loss": 2.079, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.3767491926803014, | |
| "grad_norm": 8.422935485839844, | |
| "learning_rate": 4.6245066379619664e-05, | |
| "loss": 1.998, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3785432364549695, | |
| "grad_norm": 9.090377807617188, | |
| "learning_rate": 4.622712594187298e-05, | |
| "loss": 1.8588, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.3803372802296376, | |
| "grad_norm": 9.11992359161377, | |
| "learning_rate": 4.62091855041263e-05, | |
| "loss": 1.9268, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.3821313240043057, | |
| "grad_norm": 7.935147285461426, | |
| "learning_rate": 4.619124506637962e-05, | |
| "loss": 2.1426, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.3839253677789738, | |
| "grad_norm": 6.065644264221191, | |
| "learning_rate": 4.617330462863294e-05, | |
| "loss": 1.9426, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.3857194115536419, | |
| "grad_norm": 7.789048194885254, | |
| "learning_rate": 4.615536419088626e-05, | |
| "loss": 1.9607, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.38751345532831, | |
| "grad_norm": 10.809876441955566, | |
| "learning_rate": 4.613742375313958e-05, | |
| "loss": 2.0248, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3893074991029781, | |
| "grad_norm": 7.871123790740967, | |
| "learning_rate": 4.61194833153929e-05, | |
| "loss": 2.0491, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3911015428776462, | |
| "grad_norm": 10.08164119720459, | |
| "learning_rate": 4.6101542877646216e-05, | |
| "loss": 1.8583, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.39289558665231433, | |
| "grad_norm": 8.326801300048828, | |
| "learning_rate": 4.608360243989954e-05, | |
| "loss": 2.0879, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.3946896304269824, | |
| "grad_norm": 9.958331108093262, | |
| "learning_rate": 4.606566200215286e-05, | |
| "loss": 1.9132, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.39648367420165054, | |
| "grad_norm": 8.36109733581543, | |
| "learning_rate": 4.604772156440617e-05, | |
| "loss": 1.9548, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3982777179763186, | |
| "grad_norm": 8.014139175415039, | |
| "learning_rate": 4.602978112665949e-05, | |
| "loss": 1.8601, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.40007176175098674, | |
| "grad_norm": 8.810429573059082, | |
| "learning_rate": 4.601184068891281e-05, | |
| "loss": 2.0101, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.4018658055256548, | |
| "grad_norm": 9.849298477172852, | |
| "learning_rate": 4.599390025116613e-05, | |
| "loss": 1.9178, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.40365984930032295, | |
| "grad_norm": 7.431755065917969, | |
| "learning_rate": 4.597595981341945e-05, | |
| "loss": 1.9154, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.405453893074991, | |
| "grad_norm": 10.968128204345703, | |
| "learning_rate": 4.595801937567277e-05, | |
| "loss": 2.0084, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.40724793684965915, | |
| "grad_norm": 8.786876678466797, | |
| "learning_rate": 4.594007893792609e-05, | |
| "loss": 2.1216, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.40904198062432723, | |
| "grad_norm": 8.992511749267578, | |
| "learning_rate": 4.592213850017941e-05, | |
| "loss": 1.9462, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.41083602439899536, | |
| "grad_norm": 8.601082801818848, | |
| "learning_rate": 4.5904198062432726e-05, | |
| "loss": 1.988, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.41263006817366343, | |
| "grad_norm": 9.053206443786621, | |
| "learning_rate": 4.5886257624686044e-05, | |
| "loss": 1.8687, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.41442411194833156, | |
| "grad_norm": 10.55015754699707, | |
| "learning_rate": 4.586831718693936e-05, | |
| "loss": 1.9853, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.41621815572299964, | |
| "grad_norm": 7.392729759216309, | |
| "learning_rate": 4.585037674919268e-05, | |
| "loss": 2.0305, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.41801219949766777, | |
| "grad_norm": 8.890578269958496, | |
| "learning_rate": 4.5832436311446e-05, | |
| "loss": 2.1245, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.41980624327233584, | |
| "grad_norm": 8.476337432861328, | |
| "learning_rate": 4.581449587369932e-05, | |
| "loss": 1.8404, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.421600287047004, | |
| "grad_norm": 11.671123504638672, | |
| "learning_rate": 4.5796555435952637e-05, | |
| "loss": 2.0296, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.42339433082167205, | |
| "grad_norm": 9.152827262878418, | |
| "learning_rate": 4.577861499820596e-05, | |
| "loss": 1.9146, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.4251883745963401, | |
| "grad_norm": 7.549522876739502, | |
| "learning_rate": 4.576067456045928e-05, | |
| "loss": 2.0178, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.42698241837100825, | |
| "grad_norm": 11.978499412536621, | |
| "learning_rate": 4.5742734122712595e-05, | |
| "loss": 1.7915, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.42877646214567633, | |
| "grad_norm": 10.303351402282715, | |
| "learning_rate": 4.572479368496592e-05, | |
| "loss": 1.8197, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.43057050592034446, | |
| "grad_norm": 9.132424354553223, | |
| "learning_rate": 4.5706853247219236e-05, | |
| "loss": 2.1076, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.43236454969501253, | |
| "grad_norm": 9.768096923828125, | |
| "learning_rate": 4.5688912809472554e-05, | |
| "loss": 1.8818, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.43415859346968066, | |
| "grad_norm": 7.977086067199707, | |
| "learning_rate": 4.567097237172587e-05, | |
| "loss": 1.9721, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.43595263724434874, | |
| "grad_norm": 10.111530303955078, | |
| "learning_rate": 4.565303193397919e-05, | |
| "loss": 1.9108, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.43774668101901687, | |
| "grad_norm": 8.572367668151855, | |
| "learning_rate": 4.563509149623251e-05, | |
| "loss": 1.8878, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.43954072479368494, | |
| "grad_norm": 15.499300956726074, | |
| "learning_rate": 4.561715105848583e-05, | |
| "loss": 2.0646, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.4413347685683531, | |
| "grad_norm": 8.507719993591309, | |
| "learning_rate": 4.559921062073915e-05, | |
| "loss": 2.0785, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.44312881234302115, | |
| "grad_norm": 8.560482025146484, | |
| "learning_rate": 4.558127018299247e-05, | |
| "loss": 1.919, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.4449228561176893, | |
| "grad_norm": 8.793256759643555, | |
| "learning_rate": 4.556332974524579e-05, | |
| "loss": 2.0275, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.44671689989235736, | |
| "grad_norm": 6.947265625, | |
| "learning_rate": 4.5545389307499105e-05, | |
| "loss": 1.8708, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.4485109436670255, | |
| "grad_norm": 8.579648971557617, | |
| "learning_rate": 4.552744886975242e-05, | |
| "loss": 2.0002, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.45030498744169356, | |
| "grad_norm": 7.623202323913574, | |
| "learning_rate": 4.550950843200574e-05, | |
| "loss": 1.9444, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.4520990312163617, | |
| "grad_norm": 8.441667556762695, | |
| "learning_rate": 4.549156799425906e-05, | |
| "loss": 2.0275, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.45389307499102977, | |
| "grad_norm": 9.18626594543457, | |
| "learning_rate": 4.547362755651238e-05, | |
| "loss": 1.9512, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.4556871187656979, | |
| "grad_norm": 8.11828899383545, | |
| "learning_rate": 4.54556871187657e-05, | |
| "loss": 1.9609, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.45748116254036597, | |
| "grad_norm": 7.3665852546691895, | |
| "learning_rate": 4.5437746681019016e-05, | |
| "loss": 1.9721, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.4592752063150341, | |
| "grad_norm": 8.686524391174316, | |
| "learning_rate": 4.541980624327234e-05, | |
| "loss": 2.0039, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.4610692500897022, | |
| "grad_norm": 8.56877326965332, | |
| "learning_rate": 4.540186580552566e-05, | |
| "loss": 2.0762, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.4628632938643703, | |
| "grad_norm": 9.964433670043945, | |
| "learning_rate": 4.5383925367778974e-05, | |
| "loss": 1.9773, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.4646573376390384, | |
| "grad_norm": 8.93453311920166, | |
| "learning_rate": 4.53659849300323e-05, | |
| "loss": 1.9763, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.4664513814137065, | |
| "grad_norm": 6.911281108856201, | |
| "learning_rate": 4.5348044492285615e-05, | |
| "loss": 1.8979, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4682454251883746, | |
| "grad_norm": 6.9323410987854, | |
| "learning_rate": 4.533010405453893e-05, | |
| "loss": 1.9935, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4700394689630427, | |
| "grad_norm": 7.738839626312256, | |
| "learning_rate": 4.531216361679225e-05, | |
| "loss": 1.9254, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.4718335127377108, | |
| "grad_norm": 6.664251804351807, | |
| "learning_rate": 4.529422317904557e-05, | |
| "loss": 1.9573, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4736275565123789, | |
| "grad_norm": 8.574616432189941, | |
| "learning_rate": 4.527628274129889e-05, | |
| "loss": 1.9251, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.475421600287047, | |
| "grad_norm": 7.2672834396362305, | |
| "learning_rate": 4.525834230355221e-05, | |
| "loss": 1.8782, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.4772156440617151, | |
| "grad_norm": 6.810856342315674, | |
| "learning_rate": 4.5240401865805526e-05, | |
| "loss": 1.9968, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.4790096878363832, | |
| "grad_norm": 16.551050186157227, | |
| "learning_rate": 4.522246142805885e-05, | |
| "loss": 1.9016, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.48080373161105133, | |
| "grad_norm": 9.01229190826416, | |
| "learning_rate": 4.520452099031217e-05, | |
| "loss": 1.9715, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.4825977753857194, | |
| "grad_norm": 9.2802152633667, | |
| "learning_rate": 4.5186580552565484e-05, | |
| "loss": 2.0513, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.48439181916038754, | |
| "grad_norm": 7.945450782775879, | |
| "learning_rate": 4.516864011481881e-05, | |
| "loss": 1.849, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4861858629350556, | |
| "grad_norm": 9.4662446975708, | |
| "learning_rate": 4.5150699677072126e-05, | |
| "loss": 1.9349, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.48797990670972374, | |
| "grad_norm": 7.22721004486084, | |
| "learning_rate": 4.5132759239325436e-05, | |
| "loss": 2.0256, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.4897739504843918, | |
| "grad_norm": 8.505002975463867, | |
| "learning_rate": 4.511481880157876e-05, | |
| "loss": 2.0187, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.49156799425905995, | |
| "grad_norm": 8.256681442260742, | |
| "learning_rate": 4.509687836383208e-05, | |
| "loss": 1.8934, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.493362038033728, | |
| "grad_norm": 13.187761306762695, | |
| "learning_rate": 4.5078937926085395e-05, | |
| "loss": 1.9564, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.4951560818083961, | |
| "grad_norm": 9.577531814575195, | |
| "learning_rate": 4.506099748833872e-05, | |
| "loss": 1.9369, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.49695012558306423, | |
| "grad_norm": 8.296804428100586, | |
| "learning_rate": 4.5043057050592036e-05, | |
| "loss": 2.043, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.4987441693577323, | |
| "grad_norm": 7.145248889923096, | |
| "learning_rate": 4.502511661284536e-05, | |
| "loss": 1.7847, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.5005382131324004, | |
| "grad_norm": 10.56977367401123, | |
| "learning_rate": 4.500717617509868e-05, | |
| "loss": 1.8364, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.5023322569070685, | |
| "grad_norm": 9.34067440032959, | |
| "learning_rate": 4.4989235737351995e-05, | |
| "loss": 1.9133, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.5041263006817366, | |
| "grad_norm": 8.067873001098633, | |
| "learning_rate": 4.497129529960531e-05, | |
| "loss": 1.8836, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.5059203444564048, | |
| "grad_norm": 8.651354789733887, | |
| "learning_rate": 4.495335486185863e-05, | |
| "loss": 1.8979, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.5077143882310728, | |
| "grad_norm": 6.594571590423584, | |
| "learning_rate": 4.4935414424111946e-05, | |
| "loss": 1.9502, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.5095084320057409, | |
| "grad_norm": 10.328874588012695, | |
| "learning_rate": 4.491747398636527e-05, | |
| "loss": 2.0826, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.511302475780409, | |
| "grad_norm": 6.956106185913086, | |
| "learning_rate": 4.489953354861859e-05, | |
| "loss": 1.9671, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.5130965195550772, | |
| "grad_norm": 7.3767313957214355, | |
| "learning_rate": 4.4881593110871905e-05, | |
| "loss": 1.902, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.5148905633297453, | |
| "grad_norm": 11.275064468383789, | |
| "learning_rate": 4.486365267312523e-05, | |
| "loss": 2.0932, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.5166846071044133, | |
| "grad_norm": 7.849559783935547, | |
| "learning_rate": 4.4845712235378546e-05, | |
| "loss": 1.9519, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.5184786508790814, | |
| "grad_norm": 7.759171962738037, | |
| "learning_rate": 4.4827771797631863e-05, | |
| "loss": 1.853, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.5202726946537496, | |
| "grad_norm": 12.3310546875, | |
| "learning_rate": 4.480983135988519e-05, | |
| "loss": 2.017, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.5220667384284177, | |
| "grad_norm": 7.477339267730713, | |
| "learning_rate": 4.4791890922138505e-05, | |
| "loss": 1.8495, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.5238607822030857, | |
| "grad_norm": 8.375692367553711, | |
| "learning_rate": 4.477395048439182e-05, | |
| "loss": 1.9654, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.5256548259777538, | |
| "grad_norm": 7.368261337280273, | |
| "learning_rate": 4.475601004664514e-05, | |
| "loss": 1.8585, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.527448869752422, | |
| "grad_norm": 7.570740699768066, | |
| "learning_rate": 4.4738069608898456e-05, | |
| "loss": 1.965, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.5292429135270901, | |
| "grad_norm": 7.81984281539917, | |
| "learning_rate": 4.4720129171151774e-05, | |
| "loss": 2.0262, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.5310369573017582, | |
| "grad_norm": 7.456051349639893, | |
| "learning_rate": 4.47021887334051e-05, | |
| "loss": 1.8934, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.5328310010764262, | |
| "grad_norm": 8.479120254516602, | |
| "learning_rate": 4.4684248295658415e-05, | |
| "loss": 1.9117, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.5346250448510944, | |
| "grad_norm": 8.73019027709961, | |
| "learning_rate": 4.466630785791174e-05, | |
| "loss": 2.0238, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.5364190886257625, | |
| "grad_norm": 8.31013298034668, | |
| "learning_rate": 4.4648367420165056e-05, | |
| "loss": 1.9404, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.5382131324004306, | |
| "grad_norm": 6.616645812988281, | |
| "learning_rate": 4.4630426982418374e-05, | |
| "loss": 1.9167, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.5400071761750986, | |
| "grad_norm": 7.4681172370910645, | |
| "learning_rate": 4.46124865446717e-05, | |
| "loss": 2.0548, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.5418012199497668, | |
| "grad_norm": 11.563379287719727, | |
| "learning_rate": 4.459454610692501e-05, | |
| "loss": 2.0929, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.5435952637244349, | |
| "grad_norm": 7.389353275299072, | |
| "learning_rate": 4.4576605669178325e-05, | |
| "loss": 1.7729, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.545389307499103, | |
| "grad_norm": 8.560032844543457, | |
| "learning_rate": 4.455866523143165e-05, | |
| "loss": 1.8803, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.547183351273771, | |
| "grad_norm": 8.208505630493164, | |
| "learning_rate": 4.454072479368497e-05, | |
| "loss": 1.8715, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.5489773950484392, | |
| "grad_norm": 8.2820463180542, | |
| "learning_rate": 4.4522784355938284e-05, | |
| "loss": 2.0147, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.5507714388231073, | |
| "grad_norm": 7.4810285568237305, | |
| "learning_rate": 4.450484391819161e-05, | |
| "loss": 1.9301, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.5525654825977754, | |
| "grad_norm": 7.442780017852783, | |
| "learning_rate": 4.4486903480444925e-05, | |
| "loss": 2.0132, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.5543595263724435, | |
| "grad_norm": 7.857019424438477, | |
| "learning_rate": 4.446896304269824e-05, | |
| "loss": 2.1327, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.5561535701471116, | |
| "grad_norm": 8.195714950561523, | |
| "learning_rate": 4.4451022604951567e-05, | |
| "loss": 1.9133, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.5579476139217797, | |
| "grad_norm": 9.424678802490234, | |
| "learning_rate": 4.4433082167204884e-05, | |
| "loss": 1.8079, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.5597416576964478, | |
| "grad_norm": 7.7518768310546875, | |
| "learning_rate": 4.44151417294582e-05, | |
| "loss": 2.0345, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.5615357014711159, | |
| "grad_norm": 8.170982360839844, | |
| "learning_rate": 4.439720129171152e-05, | |
| "loss": 2.089, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.563329745245784, | |
| "grad_norm": 7.443562030792236, | |
| "learning_rate": 4.4379260853964836e-05, | |
| "loss": 1.805, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.5651237890204521, | |
| "grad_norm": 8.59354019165039, | |
| "learning_rate": 4.436132041621816e-05, | |
| "loss": 1.9759, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.5669178327951202, | |
| "grad_norm": 13.471951484680176, | |
| "learning_rate": 4.434337997847148e-05, | |
| "loss": 1.7935, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.5687118765697883, | |
| "grad_norm": 9.660313606262207, | |
| "learning_rate": 4.4325439540724794e-05, | |
| "loss": 1.9807, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.5705059203444564, | |
| "grad_norm": 6.917825222015381, | |
| "learning_rate": 4.430749910297812e-05, | |
| "loss": 2.0491, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.5722999641191245, | |
| "grad_norm": 9.734037399291992, | |
| "learning_rate": 4.4289558665231435e-05, | |
| "loss": 1.8836, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.5740940078937926, | |
| "grad_norm": 7.515590190887451, | |
| "learning_rate": 4.427161822748475e-05, | |
| "loss": 1.9391, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.5758880516684607, | |
| "grad_norm": 7.2530670166015625, | |
| "learning_rate": 4.425367778973808e-05, | |
| "loss": 1.8615, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.5776820954431288, | |
| "grad_norm": 8.29240894317627, | |
| "learning_rate": 4.4235737351991394e-05, | |
| "loss": 2.0954, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.579476139217797, | |
| "grad_norm": 8.519623756408691, | |
| "learning_rate": 4.421779691424471e-05, | |
| "loss": 1.983, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.581270182992465, | |
| "grad_norm": 7.667181015014648, | |
| "learning_rate": 4.419985647649803e-05, | |
| "loss": 1.9309, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.5830642267671331, | |
| "grad_norm": 9.281998634338379, | |
| "learning_rate": 4.4181916038751346e-05, | |
| "loss": 1.9057, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.5848582705418012, | |
| "grad_norm": 7.8026123046875, | |
| "learning_rate": 4.416397560100466e-05, | |
| "loss": 1.8834, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.5866523143164694, | |
| "grad_norm": 7.448451042175293, | |
| "learning_rate": 4.414603516325799e-05, | |
| "loss": 1.8942, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.5884463580911374, | |
| "grad_norm": 8.204652786254883, | |
| "learning_rate": 4.4128094725511304e-05, | |
| "loss": 2.0565, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.5902404018658055, | |
| "grad_norm": 8.598773002624512, | |
| "learning_rate": 4.411015428776462e-05, | |
| "loss": 1.9708, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.5920344456404736, | |
| "grad_norm": 7.167200088500977, | |
| "learning_rate": 4.4092213850017946e-05, | |
| "loss": 1.8534, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.5938284894151418, | |
| "grad_norm": 11.437542915344238, | |
| "learning_rate": 4.407427341227126e-05, | |
| "loss": 2.0349, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.5956225331898098, | |
| "grad_norm": 8.82004451751709, | |
| "learning_rate": 4.405633297452458e-05, | |
| "loss": 1.9427, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.5974165769644779, | |
| "grad_norm": 6.192866802215576, | |
| "learning_rate": 4.40383925367779e-05, | |
| "loss": 1.9727, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.599210620739146, | |
| "grad_norm": 7.089540481567383, | |
| "learning_rate": 4.4020452099031215e-05, | |
| "loss": 1.9497, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.6010046645138142, | |
| "grad_norm": 8.135286331176758, | |
| "learning_rate": 4.400251166128454e-05, | |
| "loss": 1.8405, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.6027987082884823, | |
| "grad_norm": 8.762147903442383, | |
| "learning_rate": 4.3984571223537856e-05, | |
| "loss": 1.9011, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.6045927520631503, | |
| "grad_norm": 9.658001899719238, | |
| "learning_rate": 4.396663078579117e-05, | |
| "loss": 1.8995, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.6063867958378184, | |
| "grad_norm": 7.004101276397705, | |
| "learning_rate": 4.39486903480445e-05, | |
| "loss": 1.8548, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.6081808396124866, | |
| "grad_norm": 7.552975177764893, | |
| "learning_rate": 4.3930749910297814e-05, | |
| "loss": 1.9911, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.6099748833871547, | |
| "grad_norm": 7.0711774826049805, | |
| "learning_rate": 4.391280947255113e-05, | |
| "loss": 1.8611, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.6117689271618227, | |
| "grad_norm": 9.78061580657959, | |
| "learning_rate": 4.3894869034804456e-05, | |
| "loss": 1.9661, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.6135629709364908, | |
| "grad_norm": 8.065385818481445, | |
| "learning_rate": 4.387692859705777e-05, | |
| "loss": 1.975, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.615357014711159, | |
| "grad_norm": 9.28231430053711, | |
| "learning_rate": 4.385898815931109e-05, | |
| "loss": 1.9083, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.6171510584858271, | |
| "grad_norm": 8.710234642028809, | |
| "learning_rate": 4.384104772156441e-05, | |
| "loss": 2.0402, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.6189451022604952, | |
| "grad_norm": 8.679749488830566, | |
| "learning_rate": 4.3823107283817725e-05, | |
| "loss": 1.9162, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.6207391460351632, | |
| "grad_norm": 7.5028228759765625, | |
| "learning_rate": 4.380516684607104e-05, | |
| "loss": 2.0918, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.6225331898098314, | |
| "grad_norm": 8.817152976989746, | |
| "learning_rate": 4.3787226408324366e-05, | |
| "loss": 1.9089, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.6243272335844995, | |
| "grad_norm": 7.720176696777344, | |
| "learning_rate": 4.376928597057768e-05, | |
| "loss": 1.8395, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.6261212773591676, | |
| "grad_norm": 8.804288864135742, | |
| "learning_rate": 4.3751345532831e-05, | |
| "loss": 2.0384, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.6279153211338356, | |
| "grad_norm": 6.383409023284912, | |
| "learning_rate": 4.3733405095084325e-05, | |
| "loss": 1.917, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.6297093649085038, | |
| "grad_norm": 7.6271233558654785, | |
| "learning_rate": 4.371546465733764e-05, | |
| "loss": 1.884, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.6315034086831719, | |
| "grad_norm": 7.319206237792969, | |
| "learning_rate": 4.3697524219590966e-05, | |
| "loss": 2.0081, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.63329745245784, | |
| "grad_norm": 7.624337196350098, | |
| "learning_rate": 4.367958378184428e-05, | |
| "loss": 1.9573, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.635091496232508, | |
| "grad_norm": 8.105642318725586, | |
| "learning_rate": 4.3661643344097594e-05, | |
| "loss": 1.9264, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.6368855400071761, | |
| "grad_norm": 6.624615669250488, | |
| "learning_rate": 4.364370290635092e-05, | |
| "loss": 1.9273, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.6386795837818443, | |
| "grad_norm": 8.527731895446777, | |
| "learning_rate": 4.3625762468604235e-05, | |
| "loss": 1.8837, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.6404736275565124, | |
| "grad_norm": 8.374712944030762, | |
| "learning_rate": 4.360782203085755e-05, | |
| "loss": 1.9182, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.6422676713311805, | |
| "grad_norm": 6.726866245269775, | |
| "learning_rate": 4.3589881593110876e-05, | |
| "loss": 1.7699, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.6440617151058485, | |
| "grad_norm": 10.059771537780762, | |
| "learning_rate": 4.3571941155364194e-05, | |
| "loss": 1.8803, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.6458557588805167, | |
| "grad_norm": 6.3898024559021, | |
| "learning_rate": 4.355400071761751e-05, | |
| "loss": 1.9215, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.6476498026551848, | |
| "grad_norm": 9.93798542022705, | |
| "learning_rate": 4.3536060279870835e-05, | |
| "loss": 1.9015, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.6494438464298529, | |
| "grad_norm": 8.230923652648926, | |
| "learning_rate": 4.351811984212415e-05, | |
| "loss": 1.8842, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.6512378902045209, | |
| "grad_norm": 8.391541481018066, | |
| "learning_rate": 4.350017940437747e-05, | |
| "loss": 2.0254, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.6530319339791891, | |
| "grad_norm": 8.703720092773438, | |
| "learning_rate": 4.3482238966630787e-05, | |
| "loss": 1.9441, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.6548259777538572, | |
| "grad_norm": 8.434385299682617, | |
| "learning_rate": 4.3464298528884104e-05, | |
| "loss": 2.1388, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.6566200215285253, | |
| "grad_norm": 9.658421516418457, | |
| "learning_rate": 4.344635809113742e-05, | |
| "loss": 2.1284, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.6584140653031934, | |
| "grad_norm": 9.099438667297363, | |
| "learning_rate": 4.3428417653390745e-05, | |
| "loss": 1.9191, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.6602081090778615, | |
| "grad_norm": 5.330417633056641, | |
| "learning_rate": 4.341047721564406e-05, | |
| "loss": 1.7558, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.6620021528525296, | |
| "grad_norm": 8.256141662597656, | |
| "learning_rate": 4.339253677789738e-05, | |
| "loss": 1.904, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.6637961966271977, | |
| "grad_norm": 7.978524208068848, | |
| "learning_rate": 4.3374596340150704e-05, | |
| "loss": 1.9706, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.6655902404018658, | |
| "grad_norm": 7.254574298858643, | |
| "learning_rate": 4.335665590240402e-05, | |
| "loss": 1.8038, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.667384284176534, | |
| "grad_norm": 6.119344711303711, | |
| "learning_rate": 4.3338715464657345e-05, | |
| "loss": 2.0935, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.669178327951202, | |
| "grad_norm": 10.237481117248535, | |
| "learning_rate": 4.332077502691066e-05, | |
| "loss": 1.9197, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.6709723717258701, | |
| "grad_norm": 8.238425254821777, | |
| "learning_rate": 4.330283458916398e-05, | |
| "loss": 1.8335, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.6727664155005382, | |
| "grad_norm": 10.77346420288086, | |
| "learning_rate": 4.32848941514173e-05, | |
| "loss": 1.9849, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.6745604592752064, | |
| "grad_norm": 5.878365993499756, | |
| "learning_rate": 4.3266953713670614e-05, | |
| "loss": 1.8184, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.6763545030498744, | |
| "grad_norm": 5.962438583374023, | |
| "learning_rate": 4.324901327592393e-05, | |
| "loss": 1.942, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.6781485468245425, | |
| "grad_norm": 7.118070602416992, | |
| "learning_rate": 4.3231072838177255e-05, | |
| "loss": 1.8888, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.6799425905992106, | |
| "grad_norm": 7.19279146194458, | |
| "learning_rate": 4.321313240043057e-05, | |
| "loss": 1.9875, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.6817366343738788, | |
| "grad_norm": 9.004045486450195, | |
| "learning_rate": 4.319519196268389e-05, | |
| "loss": 1.8713, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.6835306781485468, | |
| "grad_norm": 6.644644737243652, | |
| "learning_rate": 4.3177251524937214e-05, | |
| "loss": 1.8883, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.6853247219232149, | |
| "grad_norm": 7.53091287612915, | |
| "learning_rate": 4.315931108719053e-05, | |
| "loss": 1.9133, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.687118765697883, | |
| "grad_norm": 7.283283710479736, | |
| "learning_rate": 4.314137064944385e-05, | |
| "loss": 1.8786, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.6889128094725512, | |
| "grad_norm": 8.955952644348145, | |
| "learning_rate": 4.3123430211697166e-05, | |
| "loss": 1.9249, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.6907068532472193, | |
| "grad_norm": 6.595334053039551, | |
| "learning_rate": 4.310548977395048e-05, | |
| "loss": 1.8995, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.6925008970218873, | |
| "grad_norm": 6.214903831481934, | |
| "learning_rate": 4.30875493362038e-05, | |
| "loss": 1.8988, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.6942949407965554, | |
| "grad_norm": 5.9941182136535645, | |
| "learning_rate": 4.3069608898457124e-05, | |
| "loss": 1.8008, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.6960889845712236, | |
| "grad_norm": 9.003954887390137, | |
| "learning_rate": 4.305166846071044e-05, | |
| "loss": 2.0354, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.6978830283458917, | |
| "grad_norm": 7.329159259796143, | |
| "learning_rate": 4.303372802296376e-05, | |
| "loss": 2.0086, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.6996770721205597, | |
| "grad_norm": 7.872637748718262, | |
| "learning_rate": 4.301578758521708e-05, | |
| "loss": 1.9595, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.7014711158952278, | |
| "grad_norm": 10.439992904663086, | |
| "learning_rate": 4.29978471474704e-05, | |
| "loss": 1.9187, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.703265159669896, | |
| "grad_norm": 10.078546524047852, | |
| "learning_rate": 4.2979906709723724e-05, | |
| "loss": 1.9979, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.7050592034445641, | |
| "grad_norm": 8.9290771484375, | |
| "learning_rate": 4.296196627197704e-05, | |
| "loss": 1.9665, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.7068532472192322, | |
| "grad_norm": 8.043295860290527, | |
| "learning_rate": 4.294402583423036e-05, | |
| "loss": 2.0803, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.7086472909939002, | |
| "grad_norm": 7.653200149536133, | |
| "learning_rate": 4.2926085396483676e-05, | |
| "loss": 2.1244, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.7104413347685683, | |
| "grad_norm": 12.027050971984863, | |
| "learning_rate": 4.290814495873699e-05, | |
| "loss": 2.0046, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.7122353785432365, | |
| "grad_norm": 6.169637680053711, | |
| "learning_rate": 4.289020452099031e-05, | |
| "loss": 1.9957, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.7140294223179046, | |
| "grad_norm": 8.492105484008789, | |
| "learning_rate": 4.2872264083243634e-05, | |
| "loss": 1.9959, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.7158234660925726, | |
| "grad_norm": 7.080846309661865, | |
| "learning_rate": 4.285432364549695e-05, | |
| "loss": 1.9098, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.7176175098672407, | |
| "grad_norm": 7.8670454025268555, | |
| "learning_rate": 4.283638320775027e-05, | |
| "loss": 1.8557, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.7194115536419089, | |
| "grad_norm": 8.910687446594238, | |
| "learning_rate": 4.281844277000359e-05, | |
| "loss": 2.0108, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.721205597416577, | |
| "grad_norm": 7.866197109222412, | |
| "learning_rate": 4.280050233225691e-05, | |
| "loss": 1.9711, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.722999641191245, | |
| "grad_norm": 7.0763044357299805, | |
| "learning_rate": 4.278256189451023e-05, | |
| "loss": 1.9565, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.7247936849659131, | |
| "grad_norm": 10.510825157165527, | |
| "learning_rate": 4.276462145676355e-05, | |
| "loss": 1.9513, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.7265877287405813, | |
| "grad_norm": 6.813848495483398, | |
| "learning_rate": 4.274668101901687e-05, | |
| "loss": 1.9841, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.7283817725152494, | |
| "grad_norm": 8.661791801452637, | |
| "learning_rate": 4.272874058127018e-05, | |
| "loss": 1.9382, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.7301758162899175, | |
| "grad_norm": 8.576088905334473, | |
| "learning_rate": 4.27108001435235e-05, | |
| "loss": 1.7463, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.7319698600645855, | |
| "grad_norm": 9.310657501220703, | |
| "learning_rate": 4.269285970577682e-05, | |
| "loss": 1.9832, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.7337639038392537, | |
| "grad_norm": 8.533422470092773, | |
| "learning_rate": 4.2674919268030145e-05, | |
| "loss": 2.0184, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.7355579476139218, | |
| "grad_norm": 6.781728267669678, | |
| "learning_rate": 4.265697883028346e-05, | |
| "loss": 1.9384, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.7373519913885899, | |
| "grad_norm": 7.908256530761719, | |
| "learning_rate": 4.263903839253678e-05, | |
| "loss": 1.9602, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.7391460351632579, | |
| "grad_norm": 6.28724479675293, | |
| "learning_rate": 4.26210979547901e-05, | |
| "loss": 2.0066, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.7409400789379261, | |
| "grad_norm": 6.198331832885742, | |
| "learning_rate": 4.260315751704342e-05, | |
| "loss": 2.132, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.7427341227125942, | |
| "grad_norm": 7.816977024078369, | |
| "learning_rate": 4.258521707929674e-05, | |
| "loss": 1.9536, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.7445281664872623, | |
| "grad_norm": 7.433613300323486, | |
| "learning_rate": 4.2567276641550055e-05, | |
| "loss": 1.885, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.7463222102619304, | |
| "grad_norm": 6.046330451965332, | |
| "learning_rate": 4.254933620380337e-05, | |
| "loss": 1.7975, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.7481162540365985, | |
| "grad_norm": 8.465211868286133, | |
| "learning_rate": 4.253139576605669e-05, | |
| "loss": 2.0454, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.7499102978112666, | |
| "grad_norm": 5.859003067016602, | |
| "learning_rate": 4.2513455328310013e-05, | |
| "loss": 2.0586, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.7517043415859347, | |
| "grad_norm": 8.045632362365723, | |
| "learning_rate": 4.249551489056333e-05, | |
| "loss": 1.9465, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.7534983853606028, | |
| "grad_norm": 6.814916133880615, | |
| "learning_rate": 4.247757445281665e-05, | |
| "loss": 1.8779, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.755292429135271, | |
| "grad_norm": 7.628875732421875, | |
| "learning_rate": 4.245963401506997e-05, | |
| "loss": 2.1836, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.757086472909939, | |
| "grad_norm": 6.975657939910889, | |
| "learning_rate": 4.244169357732329e-05, | |
| "loss": 1.7787, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.7588805166846071, | |
| "grad_norm": 7.706836700439453, | |
| "learning_rate": 4.2423753139576606e-05, | |
| "loss": 1.9749, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.7606745604592752, | |
| "grad_norm": 8.721719741821289, | |
| "learning_rate": 4.240581270182993e-05, | |
| "loss": 1.9422, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.7624686042339434, | |
| "grad_norm": 8.833395957946777, | |
| "learning_rate": 4.238787226408325e-05, | |
| "loss": 2.0886, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.7642626480086114, | |
| "grad_norm": 8.308511734008789, | |
| "learning_rate": 4.2369931826336565e-05, | |
| "loss": 1.9319, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.7660566917832795, | |
| "grad_norm": 7.8894476890563965, | |
| "learning_rate": 4.235199138858988e-05, | |
| "loss": 1.9378, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.7678507355579476, | |
| "grad_norm": 6.138456344604492, | |
| "learning_rate": 4.23340509508432e-05, | |
| "loss": 1.9009, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.7696447793326158, | |
| "grad_norm": 7.507815837860107, | |
| "learning_rate": 4.2316110513096524e-05, | |
| "loss": 1.9461, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.7714388231072838, | |
| "grad_norm": 7.388694763183594, | |
| "learning_rate": 4.229817007534984e-05, | |
| "loss": 1.7823, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.7732328668819519, | |
| "grad_norm": 7.483008861541748, | |
| "learning_rate": 4.228022963760316e-05, | |
| "loss": 2.0558, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.77502691065662, | |
| "grad_norm": 8.476717948913574, | |
| "learning_rate": 4.226228919985648e-05, | |
| "loss": 1.97, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.7768209544312881, | |
| "grad_norm": 8.137911796569824, | |
| "learning_rate": 4.22443487621098e-05, | |
| "loss": 2.0543, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.7786149982059563, | |
| "grad_norm": 7.722476482391357, | |
| "learning_rate": 4.222640832436312e-05, | |
| "loss": 1.8836, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.7804090419806243, | |
| "grad_norm": 7.449862003326416, | |
| "learning_rate": 4.220846788661644e-05, | |
| "loss": 1.9243, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.7822030857552924, | |
| "grad_norm": 6.4095563888549805, | |
| "learning_rate": 4.219052744886975e-05, | |
| "loss": 1.8377, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.7839971295299605, | |
| "grad_norm": 6.866125106811523, | |
| "learning_rate": 4.217258701112307e-05, | |
| "loss": 1.8246, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.7857911733046287, | |
| "grad_norm": 9.195448875427246, | |
| "learning_rate": 4.215464657337639e-05, | |
| "loss": 1.8019, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.7875852170792967, | |
| "grad_norm": 6.7557196617126465, | |
| "learning_rate": 4.213670613562971e-05, | |
| "loss": 1.8733, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.7893792608539648, | |
| "grad_norm": 7.832233905792236, | |
| "learning_rate": 4.211876569788303e-05, | |
| "loss": 1.727, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.7911733046286329, | |
| "grad_norm": 6.849626541137695, | |
| "learning_rate": 4.210082526013635e-05, | |
| "loss": 1.8051, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.7929673484033011, | |
| "grad_norm": 7.668883323669434, | |
| "learning_rate": 4.208288482238967e-05, | |
| "loss": 1.9021, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.7947613921779692, | |
| "grad_norm": 6.98665714263916, | |
| "learning_rate": 4.2064944384642986e-05, | |
| "loss": 1.9314, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.7965554359526372, | |
| "grad_norm": 7.1887383460998535, | |
| "learning_rate": 4.204700394689631e-05, | |
| "loss": 1.8046, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.7983494797273053, | |
| "grad_norm": 8.068037033081055, | |
| "learning_rate": 4.202906350914963e-05, | |
| "loss": 2.0018, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.8001435235019735, | |
| "grad_norm": 8.662615776062012, | |
| "learning_rate": 4.2011123071402944e-05, | |
| "loss": 1.9545, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.8019375672766416, | |
| "grad_norm": 6.354881286621094, | |
| "learning_rate": 4.199318263365626e-05, | |
| "loss": 1.699, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.8037316110513096, | |
| "grad_norm": 7.5929059982299805, | |
| "learning_rate": 4.197524219590958e-05, | |
| "loss": 1.7685, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.8055256548259777, | |
| "grad_norm": 7.677204608917236, | |
| "learning_rate": 4.19573017581629e-05, | |
| "loss": 1.876, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.8073196986006459, | |
| "grad_norm": 6.630999565124512, | |
| "learning_rate": 4.193936132041622e-05, | |
| "loss": 1.6986, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.809113742375314, | |
| "grad_norm": 6.430192947387695, | |
| "learning_rate": 4.192142088266954e-05, | |
| "loss": 1.9533, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.810907786149982, | |
| "grad_norm": 8.051733016967773, | |
| "learning_rate": 4.190348044492286e-05, | |
| "loss": 1.8555, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.8127018299246501, | |
| "grad_norm": 10.02955150604248, | |
| "learning_rate": 4.188554000717618e-05, | |
| "loss": 1.8035, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.8144958736993183, | |
| "grad_norm": 9.238576889038086, | |
| "learning_rate": 4.1867599569429496e-05, | |
| "loss": 1.9818, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.8162899174739864, | |
| "grad_norm": 6.678407192230225, | |
| "learning_rate": 4.184965913168282e-05, | |
| "loss": 1.808, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.8180839612486545, | |
| "grad_norm": 8.583653450012207, | |
| "learning_rate": 4.183171869393614e-05, | |
| "loss": 1.7423, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.8198780050233225, | |
| "grad_norm": 7.882835865020752, | |
| "learning_rate": 4.1813778256189454e-05, | |
| "loss": 1.9654, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.8216720487979907, | |
| "grad_norm": 7.367639541625977, | |
| "learning_rate": 4.179583781844277e-05, | |
| "loss": 1.8759, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.8234660925726588, | |
| "grad_norm": 5.808956146240234, | |
| "learning_rate": 4.177789738069609e-05, | |
| "loss": 1.8092, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.8252601363473269, | |
| "grad_norm": 8.538715362548828, | |
| "learning_rate": 4.1759956942949406e-05, | |
| "loss": 1.9468, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.8270541801219949, | |
| "grad_norm": 7.508995532989502, | |
| "learning_rate": 4.174201650520273e-05, | |
| "loss": 1.9887, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.8288482238966631, | |
| "grad_norm": 7.2825446128845215, | |
| "learning_rate": 4.172407606745605e-05, | |
| "loss": 1.9767, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.8306422676713312, | |
| "grad_norm": 7.998370170593262, | |
| "learning_rate": 4.1706135629709365e-05, | |
| "loss": 1.9921, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.8324363114459993, | |
| "grad_norm": 6.514996528625488, | |
| "learning_rate": 4.168819519196269e-05, | |
| "loss": 1.8236, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.8342303552206674, | |
| "grad_norm": 11.413960456848145, | |
| "learning_rate": 4.1670254754216006e-05, | |
| "loss": 1.9107, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.8360243989953355, | |
| "grad_norm": 10.405426025390625, | |
| "learning_rate": 4.165231431646932e-05, | |
| "loss": 1.936, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.8378184427700036, | |
| "grad_norm": 7.796232223510742, | |
| "learning_rate": 4.163437387872264e-05, | |
| "loss": 1.9083, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.8396124865446717, | |
| "grad_norm": 7.413230895996094, | |
| "learning_rate": 4.161643344097596e-05, | |
| "loss": 1.9956, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.8414065303193398, | |
| "grad_norm": 6.458949565887451, | |
| "learning_rate": 4.159849300322928e-05, | |
| "loss": 1.8525, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.843200574094008, | |
| "grad_norm": 7.839207172393799, | |
| "learning_rate": 4.15805525654826e-05, | |
| "loss": 1.9213, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.844994617868676, | |
| "grad_norm": 7.596676349639893, | |
| "learning_rate": 4.1562612127735916e-05, | |
| "loss": 1.7207, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.8467886616433441, | |
| "grad_norm": 8.215608596801758, | |
| "learning_rate": 4.154467168998924e-05, | |
| "loss": 1.8861, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.8485827054180122, | |
| "grad_norm": 8.735016822814941, | |
| "learning_rate": 4.152673125224256e-05, | |
| "loss": 1.7922, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.8503767491926802, | |
| "grad_norm": 7.5839667320251465, | |
| "learning_rate": 4.1508790814495875e-05, | |
| "loss": 1.8327, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.8521707929673484, | |
| "grad_norm": 8.134754180908203, | |
| "learning_rate": 4.14908503767492e-05, | |
| "loss": 2.0809, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.8539648367420165, | |
| "grad_norm": 6.481571674346924, | |
| "learning_rate": 4.1472909939002516e-05, | |
| "loss": 1.8645, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.8557588805166846, | |
| "grad_norm": 6.660789489746094, | |
| "learning_rate": 4.145496950125583e-05, | |
| "loss": 1.8249, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.8575529242913527, | |
| "grad_norm": 9.439289093017578, | |
| "learning_rate": 4.143702906350915e-05, | |
| "loss": 1.9101, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.8593469680660208, | |
| "grad_norm": 7.637185096740723, | |
| "learning_rate": 4.141908862576247e-05, | |
| "loss": 1.8607, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.8611410118406889, | |
| "grad_norm": 6.203429698944092, | |
| "learning_rate": 4.1401148188015785e-05, | |
| "loss": 1.6808, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.862935055615357, | |
| "grad_norm": 7.630656719207764, | |
| "learning_rate": 4.138320775026911e-05, | |
| "loss": 1.8628, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.8647290993900251, | |
| "grad_norm": 7.22261905670166, | |
| "learning_rate": 4.1365267312522426e-05, | |
| "loss": 1.8176, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.8665231431646933, | |
| "grad_norm": 7.170011520385742, | |
| "learning_rate": 4.134732687477575e-05, | |
| "loss": 1.9082, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.8683171869393613, | |
| "grad_norm": 6.344324588775635, | |
| "learning_rate": 4.132938643702907e-05, | |
| "loss": 1.8772, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.8701112307140294, | |
| "grad_norm": 8.296944618225098, | |
| "learning_rate": 4.1311445999282385e-05, | |
| "loss": 1.7995, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.8719052744886975, | |
| "grad_norm": 10.2393159866333, | |
| "learning_rate": 4.129350556153571e-05, | |
| "loss": 1.7471, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.8736993182633657, | |
| "grad_norm": 6.657433032989502, | |
| "learning_rate": 4.127556512378902e-05, | |
| "loss": 1.8175, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.8754933620380337, | |
| "grad_norm": 9.101529121398926, | |
| "learning_rate": 4.125762468604234e-05, | |
| "loss": 1.8132, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.8772874058127018, | |
| "grad_norm": 8.015061378479004, | |
| "learning_rate": 4.123968424829566e-05, | |
| "loss": 1.7798, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.8790814495873699, | |
| "grad_norm": 8.774176597595215, | |
| "learning_rate": 4.122174381054898e-05, | |
| "loss": 1.823, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.8808754933620381, | |
| "grad_norm": 8.144107818603516, | |
| "learning_rate": 4.1203803372802295e-05, | |
| "loss": 1.8932, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.8826695371367062, | |
| "grad_norm": 9.238556861877441, | |
| "learning_rate": 4.118586293505562e-05, | |
| "loss": 1.9033, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.8844635809113742, | |
| "grad_norm": 7.814840316772461, | |
| "learning_rate": 4.1167922497308937e-05, | |
| "loss": 1.9437, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.8862576246860423, | |
| "grad_norm": 8.720809936523438, | |
| "learning_rate": 4.1149982059562254e-05, | |
| "loss": 2.0195, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.8880516684607105, | |
| "grad_norm": 8.207976341247559, | |
| "learning_rate": 4.113204162181558e-05, | |
| "loss": 1.6992, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.8898457122353786, | |
| "grad_norm": 6.2852301597595215, | |
| "learning_rate": 4.1114101184068895e-05, | |
| "loss": 1.9237, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.8916397560100466, | |
| "grad_norm": 7.93939208984375, | |
| "learning_rate": 4.109616074632221e-05, | |
| "loss": 1.8576, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.8934337997847147, | |
| "grad_norm": 7.219597339630127, | |
| "learning_rate": 4.107822030857553e-05, | |
| "loss": 1.9087, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.8952278435593829, | |
| "grad_norm": 6.388714790344238, | |
| "learning_rate": 4.106027987082885e-05, | |
| "loss": 1.8785, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.897021887334051, | |
| "grad_norm": 7.048055648803711, | |
| "learning_rate": 4.1042339433082164e-05, | |
| "loss": 1.7598, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.898815931108719, | |
| "grad_norm": 9.254620552062988, | |
| "learning_rate": 4.102439899533549e-05, | |
| "loss": 1.8903, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.9006099748833871, | |
| "grad_norm": 7.499938488006592, | |
| "learning_rate": 4.1006458557588805e-05, | |
| "loss": 2.0262, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.9024040186580553, | |
| "grad_norm": 7.229835033416748, | |
| "learning_rate": 4.098851811984213e-05, | |
| "loss": 1.7852, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.9041980624327234, | |
| "grad_norm": 7.887816429138184, | |
| "learning_rate": 4.097057768209545e-05, | |
| "loss": 1.8154, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.9059921062073915, | |
| "grad_norm": 11.909371376037598, | |
| "learning_rate": 4.0952637244348764e-05, | |
| "loss": 1.8699, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.9077861499820595, | |
| "grad_norm": 9.439387321472168, | |
| "learning_rate": 4.093469680660209e-05, | |
| "loss": 2.0234, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.9095801937567277, | |
| "grad_norm": 7.028390884399414, | |
| "learning_rate": 4.0916756368855405e-05, | |
| "loss": 1.8577, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.9113742375313958, | |
| "grad_norm": 8.708728790283203, | |
| "learning_rate": 4.089881593110872e-05, | |
| "loss": 1.8517, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.9131682813060639, | |
| "grad_norm": 7.153110027313232, | |
| "learning_rate": 4.088087549336204e-05, | |
| "loss": 1.8174, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.9149623250807319, | |
| "grad_norm": 10.017348289489746, | |
| "learning_rate": 4.086293505561536e-05, | |
| "loss": 1.7079, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.9167563688554, | |
| "grad_norm": 8.1173734664917, | |
| "learning_rate": 4.0844994617868674e-05, | |
| "loss": 1.914, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.9185504126300682, | |
| "grad_norm": 7.267770290374756, | |
| "learning_rate": 4.0827054180122e-05, | |
| "loss": 1.8668, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.9203444564047363, | |
| "grad_norm": 8.86645793914795, | |
| "learning_rate": 4.0809113742375316e-05, | |
| "loss": 1.8357, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.9221385001794044, | |
| "grad_norm": 8.062966346740723, | |
| "learning_rate": 4.079117330462863e-05, | |
| "loss": 1.8604, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.9239325439540724, | |
| "grad_norm": 7.72880220413208, | |
| "learning_rate": 4.077323286688196e-05, | |
| "loss": 1.9482, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.9257265877287406, | |
| "grad_norm": 8.267526626586914, | |
| "learning_rate": 4.0755292429135274e-05, | |
| "loss": 1.6839, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.9275206315034087, | |
| "grad_norm": 10.039454460144043, | |
| "learning_rate": 4.073735199138859e-05, | |
| "loss": 1.9365, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.9293146752780768, | |
| "grad_norm": 9.875499725341797, | |
| "learning_rate": 4.071941155364191e-05, | |
| "loss": 1.8115, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.9311087190527448, | |
| "grad_norm": 7.694703102111816, | |
| "learning_rate": 4.0701471115895226e-05, | |
| "loss": 1.8054, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.932902762827413, | |
| "grad_norm": 6.072929382324219, | |
| "learning_rate": 4.068353067814855e-05, | |
| "loss": 1.9556, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.9346968066020811, | |
| "grad_norm": 9.701952934265137, | |
| "learning_rate": 4.066559024040187e-05, | |
| "loss": 1.9576, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.9364908503767492, | |
| "grad_norm": 8.353795051574707, | |
| "learning_rate": 4.0647649802655185e-05, | |
| "loss": 1.8579, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.9382848941514172, | |
| "grad_norm": 7.89420747756958, | |
| "learning_rate": 4.062970936490851e-05, | |
| "loss": 1.8866, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.9400789379260854, | |
| "grad_norm": 5.647555351257324, | |
| "learning_rate": 4.0611768927161826e-05, | |
| "loss": 1.742, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.9418729817007535, | |
| "grad_norm": 8.360129356384277, | |
| "learning_rate": 4.059382848941514e-05, | |
| "loss": 1.9297, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.9436670254754216, | |
| "grad_norm": 10.252833366394043, | |
| "learning_rate": 4.057588805166847e-05, | |
| "loss": 1.9036, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.9454610692500897, | |
| "grad_norm": 8.29806900024414, | |
| "learning_rate": 4.0557947613921784e-05, | |
| "loss": 1.7497, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.9472551130247578, | |
| "grad_norm": 6.64280891418457, | |
| "learning_rate": 4.05400071761751e-05, | |
| "loss": 1.9239, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.9490491567994259, | |
| "grad_norm": 7.992321014404297, | |
| "learning_rate": 4.052206673842842e-05, | |
| "loss": 1.8058, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.950843200574094, | |
| "grad_norm": 9.094325065612793, | |
| "learning_rate": 4.0504126300681736e-05, | |
| "loss": 1.9108, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.9526372443487621, | |
| "grad_norm": 8.64013385772705, | |
| "learning_rate": 4.048618586293505e-05, | |
| "loss": 2.1198, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.9544312881234303, | |
| "grad_norm": 6.910890579223633, | |
| "learning_rate": 4.046824542518838e-05, | |
| "loss": 1.9502, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.9562253318980983, | |
| "grad_norm": 7.383171081542969, | |
| "learning_rate": 4.0450304987441695e-05, | |
| "loss": 1.7905, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.9580193756727664, | |
| "grad_norm": 8.704651832580566, | |
| "learning_rate": 4.043236454969501e-05, | |
| "loss": 1.9428, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.9598134194474345, | |
| "grad_norm": 8.296703338623047, | |
| "learning_rate": 4.0414424111948336e-05, | |
| "loss": 1.8346, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.9616074632221027, | |
| "grad_norm": 7.613495349884033, | |
| "learning_rate": 4.039648367420165e-05, | |
| "loss": 1.9988, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.9634015069967707, | |
| "grad_norm": 8.145048141479492, | |
| "learning_rate": 4.037854323645497e-05, | |
| "loss": 1.8056, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.9651955507714388, | |
| "grad_norm": 9.955933570861816, | |
| "learning_rate": 4.0360602798708295e-05, | |
| "loss": 1.87, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.9669895945461069, | |
| "grad_norm": 5.713760852813721, | |
| "learning_rate": 4.0342662360961605e-05, | |
| "loss": 1.8019, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.9687836383207751, | |
| "grad_norm": 9.013385772705078, | |
| "learning_rate": 4.032472192321493e-05, | |
| "loss": 1.922, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.9705776820954432, | |
| "grad_norm": 8.361518859863281, | |
| "learning_rate": 4.0306781485468246e-05, | |
| "loss": 1.8847, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.9723717258701112, | |
| "grad_norm": 8.547319412231445, | |
| "learning_rate": 4.0288841047721564e-05, | |
| "loss": 1.9232, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.9741657696447793, | |
| "grad_norm": 8.168745994567871, | |
| "learning_rate": 4.027090060997489e-05, | |
| "loss": 1.834, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.9759598134194475, | |
| "grad_norm": 7.9649810791015625, | |
| "learning_rate": 4.0252960172228205e-05, | |
| "loss": 1.8909, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.9777538571941156, | |
| "grad_norm": 7.361401557922363, | |
| "learning_rate": 4.023501973448152e-05, | |
| "loss": 1.8859, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.9795479009687836, | |
| "grad_norm": 6.616429328918457, | |
| "learning_rate": 4.0217079296734846e-05, | |
| "loss": 1.8936, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.9813419447434517, | |
| "grad_norm": 8.482680320739746, | |
| "learning_rate": 4.0199138858988163e-05, | |
| "loss": 1.8714, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.9831359885181199, | |
| "grad_norm": 7.886808395385742, | |
| "learning_rate": 4.018119842124148e-05, | |
| "loss": 1.8772, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.984930032292788, | |
| "grad_norm": 8.63215446472168, | |
| "learning_rate": 4.01632579834948e-05, | |
| "loss": 1.8233, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.986724076067456, | |
| "grad_norm": 7.060615539550781, | |
| "learning_rate": 4.0145317545748115e-05, | |
| "loss": 1.7991, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.9885181198421241, | |
| "grad_norm": 7.455167293548584, | |
| "learning_rate": 4.012737710800143e-05, | |
| "loss": 1.8871, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.9903121636167922, | |
| "grad_norm": 8.120213508605957, | |
| "learning_rate": 4.0109436670254756e-05, | |
| "loss": 1.7909, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.9921062073914604, | |
| "grad_norm": 8.847984313964844, | |
| "learning_rate": 4.0091496232508074e-05, | |
| "loss": 1.9595, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.9939002511661285, | |
| "grad_norm": 7.193493366241455, | |
| "learning_rate": 4.007355579476139e-05, | |
| "loss": 1.7494, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.9956942949407965, | |
| "grad_norm": 9.286087989807129, | |
| "learning_rate": 4.0055615357014715e-05, | |
| "loss": 1.7659, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.9974883387154646, | |
| "grad_norm": 7.888460159301758, | |
| "learning_rate": 4.003767491926803e-05, | |
| "loss": 1.9127, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.9992823824901328, | |
| "grad_norm": 9.469123840332031, | |
| "learning_rate": 4.0019734481521356e-05, | |
| "loss": 1.9231, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.932178020477295, | |
| "eval_runtime": 189.8033, | |
| "eval_samples_per_second": 13.05, | |
| "eval_steps_per_second": 13.05, | |
| "step": 5574 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 27870, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.0165015541579776e+16, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |