| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.42306760358614, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00242306760358614, | |
| "grad_norm": 0.22300943732261658, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.2877, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00484613520717228, | |
| "grad_norm": 0.13678084313869476, | |
| "learning_rate": 3.8e-06, | |
| "loss": 1.3473, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.00726920281075842, | |
| "grad_norm": 0.22025036811828613, | |
| "learning_rate": 5.8e-06, | |
| "loss": 1.4399, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.00969227041434456, | |
| "grad_norm": 0.1782158464193344, | |
| "learning_rate": 7.8e-06, | |
| "loss": 1.2627, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0121153380179307, | |
| "grad_norm": 0.2191566377878189, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 1.3235, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01453840562151684, | |
| "grad_norm": 0.2430618554353714, | |
| "learning_rate": 1.18e-05, | |
| "loss": 1.4062, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.01696147322510298, | |
| "grad_norm": 0.2611038088798523, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 1.1462, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.01938454082868912, | |
| "grad_norm": 0.18182355165481567, | |
| "learning_rate": 1.58e-05, | |
| "loss": 1.1858, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02180760843227526, | |
| "grad_norm": 0.23076005280017853, | |
| "learning_rate": 1.78e-05, | |
| "loss": 1.0799, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0242306760358614, | |
| "grad_norm": 0.4523642957210541, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 1.1662, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.02665374363944754, | |
| "grad_norm": 0.23707886040210724, | |
| "learning_rate": 2.18e-05, | |
| "loss": 1.0855, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.02907681124303368, | |
| "grad_norm": 0.2294093519449234, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.9007, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03149987884661982, | |
| "grad_norm": 0.23308613896369934, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.9462, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.03392294645020596, | |
| "grad_norm": 0.21758387982845306, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.8586, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.0363460140537921, | |
| "grad_norm": 0.2084963023662567, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.6916, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.03876908165737824, | |
| "grad_norm": 0.2152082622051239, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.7014, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04119214926096438, | |
| "grad_norm": 0.22858214378356934, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.6022, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04361521686455052, | |
| "grad_norm": 0.1915079951286316, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.6187, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04603828446813666, | |
| "grad_norm": 0.12963800132274628, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.4887, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.0484613520717228, | |
| "grad_norm": 0.13217966258525848, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.4853, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05088441967530894, | |
| "grad_norm": 0.17833656072616577, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.4678, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05330748727889508, | |
| "grad_norm": 0.1171719878911972, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.4291, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05573055488248122, | |
| "grad_norm": 0.1501215249300003, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.4234, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.05815362248606736, | |
| "grad_norm": 0.10823249816894531, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.3753, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.0605766900896535, | |
| "grad_norm": 0.12995333969593048, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.3648, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06299975769323964, | |
| "grad_norm": 0.14510154724121094, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.3304, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06542282529682578, | |
| "grad_norm": 0.15389852225780487, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.3509, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.06784589290041192, | |
| "grad_norm": 0.13117581605911255, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.3177, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.07026896050399806, | |
| "grad_norm": 0.14186632633209229, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.3259, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.0726920281075842, | |
| "grad_norm": 0.17051789164543152, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.292, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07511509571117034, | |
| "grad_norm": 0.14579974114894867, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.2928, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.07753816331475648, | |
| "grad_norm": 0.14740227162837982, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.2593, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.07996123091834262, | |
| "grad_norm": 0.16401788592338562, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.2481, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08238429852192876, | |
| "grad_norm": 0.1546965092420578, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.2263, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.0848073661255149, | |
| "grad_norm": 0.22539666295051575, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.2216, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.08723043372910104, | |
| "grad_norm": 0.25954148173332214, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.2152, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.08965350133268718, | |
| "grad_norm": 0.18402907252311707, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.2177, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09207656893627332, | |
| "grad_norm": 0.1356307864189148, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.1911, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09449963653985946, | |
| "grad_norm": 0.13355639576911926, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.1817, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.0969227041434456, | |
| "grad_norm": 0.14798177778720856, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.1782, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.09934577174703174, | |
| "grad_norm": 0.2079785168170929, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.1817, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10176883935061788, | |
| "grad_norm": 0.1562364548444748, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.1699, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.10419190695420402, | |
| "grad_norm": 0.14666175842285156, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.1691, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.10661497455779016, | |
| "grad_norm": 0.28095802664756775, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.1613, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1090380421613763, | |
| "grad_norm": 0.16671571135520935, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.1575, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11146110976496244, | |
| "grad_norm": 0.17770862579345703, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.1646, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11388417736854858, | |
| "grad_norm": 0.2173616737127304, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.1568, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.11630724497213472, | |
| "grad_norm": 0.18545781075954437, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.1509, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.11873031257572086, | |
| "grad_norm": 0.1835421770811081, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.1424, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.121153380179307, | |
| "grad_norm": 0.14738881587982178, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.1352, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.12357644778289315, | |
| "grad_norm": 0.1982613503932953, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.1336, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.12599951538647927, | |
| "grad_norm": 0.23628918826580048, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.131, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.12842258299006543, | |
| "grad_norm": 0.3614969253540039, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.1377, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.13084565059365155, | |
| "grad_norm": 0.15150132775306702, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.1254, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1332687181972377, | |
| "grad_norm": 0.3162827789783478, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.1204, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.13569178580082383, | |
| "grad_norm": 0.18979772925376892, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.1176, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.13811485340441, | |
| "grad_norm": 0.22147120535373688, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.1146, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1405379210079961, | |
| "grad_norm": 0.222473606467247, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.1212, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.14296098861158227, | |
| "grad_norm": 0.14630824327468872, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.1241, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.1453840562151684, | |
| "grad_norm": 0.15462113916873932, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.12, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.14780712381875455, | |
| "grad_norm": 0.19773511588573456, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.1246, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.15023019142234068, | |
| "grad_norm": 0.22675354778766632, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.1145, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.15265325902592683, | |
| "grad_norm": 0.15683938562870026, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0963, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.15507632662951296, | |
| "grad_norm": 0.23586426675319672, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.1043, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.1574993942330991, | |
| "grad_norm": 0.28452053666114807, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.1126, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.15992246183668524, | |
| "grad_norm": 0.21146593987941742, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.1118, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1623455294402714, | |
| "grad_norm": 0.21904832124710083, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.125, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.16476859704385752, | |
| "grad_norm": 0.1765352338552475, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.1084, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.16719166464744367, | |
| "grad_norm": 0.2080189287662506, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0988, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.1696147322510298, | |
| "grad_norm": 0.27506083250045776, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0961, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.17203779985461595, | |
| "grad_norm": 0.1665004938840866, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.1093, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.17446086745820208, | |
| "grad_norm": 0.1868591457605362, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0962, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.17688393506178823, | |
| "grad_norm": 0.14843980967998505, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.096, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.17930700266537436, | |
| "grad_norm": 0.12363710254430771, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0822, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.18173007026896051, | |
| "grad_norm": 0.1875678449869156, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0921, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.18415313787254664, | |
| "grad_norm": 0.15753141045570374, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0852, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1865762054761328, | |
| "grad_norm": 0.14286907017230988, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0879, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.18899927307971892, | |
| "grad_norm": 0.18874263763427734, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0848, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.19142234068330508, | |
| "grad_norm": 0.21680274605751038, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0849, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.1938454082868912, | |
| "grad_norm": 0.24747930467128754, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0892, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.19626847589047736, | |
| "grad_norm": 0.14206859469413757, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0928, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.19869154349406348, | |
| "grad_norm": 0.22649221122264862, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0873, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.20111461109764964, | |
| "grad_norm": 0.19492876529693604, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0894, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.20353767870123576, | |
| "grad_norm": 0.20269566774368286, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.081, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.20596074630482192, | |
| "grad_norm": 0.24085013568401337, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0989, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.20838381390840804, | |
| "grad_norm": 0.15436244010925293, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.091, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.21080688151199417, | |
| "grad_norm": 0.17235654592514038, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0862, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.21322994911558033, | |
| "grad_norm": 0.20409919321537018, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0797, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.21565301671916645, | |
| "grad_norm": 0.20658552646636963, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0778, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.2180760843227526, | |
| "grad_norm": 0.1812807023525238, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0903, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.22049915192633873, | |
| "grad_norm": 0.21331733465194702, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0681, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2229222195299249, | |
| "grad_norm": 0.17142294347286224, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0863, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.225345287133511, | |
| "grad_norm": 0.3264574706554413, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0782, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.22776835473709717, | |
| "grad_norm": 0.18573996424674988, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.075, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.2301914223406833, | |
| "grad_norm": 0.2647434175014496, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.089, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.23261448994426945, | |
| "grad_norm": 0.1720058023929596, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0709, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.23503755754785557, | |
| "grad_norm": 0.23745214939117432, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0727, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.23746062515144173, | |
| "grad_norm": 0.20860709249973297, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0834, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.23988369275502786, | |
| "grad_norm": 0.19187010824680328, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0789, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.242306760358614, | |
| "grad_norm": 0.1633918285369873, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0798, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.24472982796220014, | |
| "grad_norm": 0.15381617844104767, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0701, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2471528955657863, | |
| "grad_norm": 0.17352166771888733, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0792, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.24957596316937242, | |
| "grad_norm": 0.40674543380737305, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0754, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.25199903077295854, | |
| "grad_norm": 0.2632424831390381, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0773, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.2544220983765447, | |
| "grad_norm": 0.19672192633152008, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0688, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.25684516598013085, | |
| "grad_norm": 0.22081975638866425, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0875, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.259268233583717, | |
| "grad_norm": 0.35084763169288635, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.08, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2616913011873031, | |
| "grad_norm": 0.25521281361579895, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0842, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.2641143687908893, | |
| "grad_norm": 0.18135966360569, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0632, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.2665374363944754, | |
| "grad_norm": 0.21649938821792603, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0737, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.26896050399806154, | |
| "grad_norm": 0.22486425936222076, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0781, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.27138357160164767, | |
| "grad_norm": 0.1296594738960266, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0712, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.27380663920523385, | |
| "grad_norm": 0.20417578518390656, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0754, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.27622970680882, | |
| "grad_norm": 0.36884164810180664, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0788, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.2786527744124061, | |
| "grad_norm": 0.1669790893793106, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0752, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2810758420159922, | |
| "grad_norm": 0.22811958193778992, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0694, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.2834989096195784, | |
| "grad_norm": 0.17611163854599, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0747, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.28592197722316454, | |
| "grad_norm": 0.21130016446113586, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0723, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.28834504482675066, | |
| "grad_norm": 0.18295472860336304, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0717, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.2907681124303368, | |
| "grad_norm": 0.23597301542758942, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0757, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.29319118003392297, | |
| "grad_norm": 0.26741477847099304, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0731, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.2956142476375091, | |
| "grad_norm": 0.20920027792453766, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0636, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.2980373152410952, | |
| "grad_norm": 0.224365234375, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0748, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.30046038284468135, | |
| "grad_norm": 0.1771516501903534, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0687, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.30288345044826753, | |
| "grad_norm": 0.21521975100040436, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0688, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.30530651805185366, | |
| "grad_norm": 0.21606016159057617, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0679, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.3077295856554398, | |
| "grad_norm": 0.38219350576400757, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0731, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.3101526532590259, | |
| "grad_norm": 0.35934513807296753, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0754, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3125757208626121, | |
| "grad_norm": 0.27372294664382935, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0703, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.3149987884661982, | |
| "grad_norm": 0.19996874034404755, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0646, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.31742185606978435, | |
| "grad_norm": 0.22204864025115967, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.069, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.3198449236733705, | |
| "grad_norm": 0.23537422716617584, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0722, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.3222679912769566, | |
| "grad_norm": 0.1366153061389923, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0602, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.3246910588805428, | |
| "grad_norm": 0.2823709547519684, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0669, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.3271141264841289, | |
| "grad_norm": 0.23131944239139557, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0625, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.32953719408771504, | |
| "grad_norm": 0.18494287133216858, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0602, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.33196026169130116, | |
| "grad_norm": 0.26136401295661926, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0593, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.33438332929488734, | |
| "grad_norm": 0.16153356432914734, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0652, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.33680639689847347, | |
| "grad_norm": 0.14857329428195953, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0556, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.3392294645020596, | |
| "grad_norm": 0.18751142919063568, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0669, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.3416525321056457, | |
| "grad_norm": 0.18614909052848816, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0582, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.3440755997092319, | |
| "grad_norm": 0.1765611320734024, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0661, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.34649866731281803, | |
| "grad_norm": 0.1687687337398529, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0656, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.34892173491640416, | |
| "grad_norm": 0.1597665697336197, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0619, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.3513448025199903, | |
| "grad_norm": 0.29115739464759827, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0703, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.35376787012357647, | |
| "grad_norm": 0.36171144247055054, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0638, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.3561909377271626, | |
| "grad_norm": 0.24505296349525452, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.07, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3586140053307487, | |
| "grad_norm": 0.26854684948921204, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.072, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.36103707293433485, | |
| "grad_norm": 0.23773345351219177, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0567, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.36346014053792103, | |
| "grad_norm": 0.2043919414281845, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0778, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.36588320814150715, | |
| "grad_norm": 0.17598839104175568, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0615, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.3683062757450933, | |
| "grad_norm": 0.17013928294181824, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0599, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3707293433486794, | |
| "grad_norm": 0.19057349860668182, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0671, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.3731524109522656, | |
| "grad_norm": 0.21346819400787354, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0726, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.3755754785558517, | |
| "grad_norm": 0.18674880266189575, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0655, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.37799854615943784, | |
| "grad_norm": 0.1875901222229004, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0617, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.38042161376302397, | |
| "grad_norm": 0.2749476432800293, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0571, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.38284468136661015, | |
| "grad_norm": 0.29953062534332275, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0521, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.3852677489701963, | |
| "grad_norm": 0.181138813495636, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0632, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.3876908165737824, | |
| "grad_norm": 0.16172991693019867, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0576, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.39011388417736853, | |
| "grad_norm": 0.20111921429634094, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0582, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.3925369517809547, | |
| "grad_norm": 0.29993516206741333, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0576, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.39496001938454084, | |
| "grad_norm": 0.2669925093650818, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0541, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.39738308698812697, | |
| "grad_norm": 0.15487892925739288, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0606, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.3998061545917131, | |
| "grad_norm": 0.2327035367488861, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0676, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.4022292221952993, | |
| "grad_norm": 0.20327667891979218, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0483, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.4046522897988854, | |
| "grad_norm": 0.2192804366350174, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0588, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.4070753574024715, | |
| "grad_norm": 0.18243132531642914, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0646, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.40949842500605765, | |
| "grad_norm": 0.21957585215568542, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0597, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.41192149260964384, | |
| "grad_norm": 0.2594870924949646, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0624, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.41434456021322996, | |
| "grad_norm": 0.15151965618133545, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0571, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.4167676278168161, | |
| "grad_norm": 0.2595440447330475, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0645, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4191906954204022, | |
| "grad_norm": 0.17983701825141907, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.062, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.42161376302398834, | |
| "grad_norm": 0.21827834844589233, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0531, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.4240368306275745, | |
| "grad_norm": 0.2171270102262497, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0604, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.42645989823116065, | |
| "grad_norm": 0.20583873987197876, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0661, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.4288829658347468, | |
| "grad_norm": 0.19157806038856506, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0612, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.4313060334383329, | |
| "grad_norm": 0.12651243805885315, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0588, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.4337291010419191, | |
| "grad_norm": 0.17638131976127625, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0618, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.4361521686455052, | |
| "grad_norm": 0.213279590010643, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0591, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.43857523624909134, | |
| "grad_norm": 0.23938247561454773, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.057, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.44099830385267746, | |
| "grad_norm": 0.1525634527206421, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0486, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.44342137145626365, | |
| "grad_norm": 0.3473944067955017, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0552, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.4458444390598498, | |
| "grad_norm": 0.18791596591472626, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.053, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.4482675066634359, | |
| "grad_norm": 0.28670239448547363, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0517, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.450690574267022, | |
| "grad_norm": 0.3638632297515869, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0651, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.4531136418706082, | |
| "grad_norm": 0.22422659397125244, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0556, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.45553670947419433, | |
| "grad_norm": 0.2053101509809494, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0499, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.45795977707778046, | |
| "grad_norm": 0.2781410217285156, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0491, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.4603828446813666, | |
| "grad_norm": 0.18476931750774384, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0628, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.46280591228495277, | |
| "grad_norm": 0.19517628848552704, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0603, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.4652289798885389, | |
| "grad_norm": 0.1381346881389618, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0563, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.467652047492125, | |
| "grad_norm": 0.16588693857192993, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0571, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.47007511509571115, | |
| "grad_norm": 0.30310162901878357, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0593, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.47249818269929733, | |
| "grad_norm": 0.18577975034713745, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0587, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.47492125030288346, | |
| "grad_norm": 0.17943626642227173, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0503, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.4773443179064696, | |
| "grad_norm": 0.25463947653770447, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0644, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.4797673855100557, | |
| "grad_norm": 0.2563469111919403, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.066, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.4821904531136419, | |
| "grad_norm": 0.19609716534614563, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0564, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.484613520717228, | |
| "grad_norm": 0.23780830204486847, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.051, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.48703658832081415, | |
| "grad_norm": 0.16558371484279633, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0544, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.48945965592440027, | |
| "grad_norm": 0.22054706513881683, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.056, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.49188272352798645, | |
| "grad_norm": 0.1278141885995865, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0573, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.4943057911315726, | |
| "grad_norm": 0.168584942817688, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.06, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.4967288587351587, | |
| "grad_norm": 0.25215259194374084, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0544, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.49915192633874483, | |
| "grad_norm": 0.13378572463989258, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0461, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.501574993942331, | |
| "grad_norm": 0.2712815999984741, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0492, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5039980615459171, | |
| "grad_norm": 0.1256439983844757, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0467, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.5064211291495033, | |
| "grad_norm": 0.19592291116714478, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0524, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5088441967530895, | |
| "grad_norm": 0.13289900124073029, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.047, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5112672643566756, | |
| "grad_norm": 0.21564555168151855, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0498, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.5136903319602617, | |
| "grad_norm": 0.21520939469337463, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0466, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5161133995638478, | |
| "grad_norm": 0.18444831669330597, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0514, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.518536467167434, | |
| "grad_norm": 0.19666188955307007, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0535, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5209595347710201, | |
| "grad_norm": 0.20858798921108246, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0596, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5233826023746062, | |
| "grad_norm": 0.2382081151008606, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0501, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5258056699781924, | |
| "grad_norm": 0.16628485918045044, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.047, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.5282287375817786, | |
| "grad_norm": 0.18956221640110016, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0554, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5306518051853647, | |
| "grad_norm": 0.2803729772567749, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0471, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5330748727889508, | |
| "grad_norm": 0.13001462817192078, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0486, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.535497940392537, | |
| "grad_norm": 0.15444600582122803, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0441, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.5379210079961231, | |
| "grad_norm": 0.35646572709083557, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0494, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.5403440755997092, | |
| "grad_norm": 0.24044056236743927, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0515, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.5427671432032953, | |
| "grad_norm": 0.2854156494140625, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.046, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5451902108068815, | |
| "grad_norm": 0.2488689422607422, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0545, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5476132784104677, | |
| "grad_norm": 0.15807735919952393, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0558, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.5500363460140538, | |
| "grad_norm": 0.20337934792041779, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0496, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.55245941361764, | |
| "grad_norm": 0.1580781489610672, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0568, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5548824812212261, | |
| "grad_norm": 0.1751731038093567, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.051, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5573055488248122, | |
| "grad_norm": 0.1994052231311798, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0441, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5597286164283983, | |
| "grad_norm": 0.24094025790691376, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0577, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.5621516840319845, | |
| "grad_norm": 0.17011752724647522, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0428, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.5645747516355706, | |
| "grad_norm": 0.16502590477466583, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0583, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.5669978192391568, | |
| "grad_norm": 0.2089923918247223, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0491, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.569420886842743, | |
| "grad_norm": 0.1554768830537796, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.054, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.5718439544463291, | |
| "grad_norm": 0.2179958075284958, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0445, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.5742670220499152, | |
| "grad_norm": 0.16002824902534485, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0506, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.5766900896535013, | |
| "grad_norm": 0.28972455859184265, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0424, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.5791131572570875, | |
| "grad_norm": 0.23938970267772675, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0551, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.5815362248606736, | |
| "grad_norm": 0.15034063160419464, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.051, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.5839592924642597, | |
| "grad_norm": 0.18481327593326569, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.047, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.5863823600678459, | |
| "grad_norm": 0.23522192239761353, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0478, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.5888054276714321, | |
| "grad_norm": 0.25239166617393494, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0458, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.5912284952750182, | |
| "grad_norm": 0.256253182888031, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.053, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.5936515628786043, | |
| "grad_norm": 0.20433920621871948, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0485, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.5960746304821904, | |
| "grad_norm": 0.18060754239559174, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0515, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.5984976980857766, | |
| "grad_norm": 0.1614294946193695, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0481, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.6009207656893627, | |
| "grad_norm": 0.181077778339386, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0483, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.6033438332929488, | |
| "grad_norm": 0.1722615659236908, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.041, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6057669008965351, | |
| "grad_norm": 0.14602269232273102, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0493, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6081899685001212, | |
| "grad_norm": 0.1512426882982254, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0497, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6106130361037073, | |
| "grad_norm": 0.13178949058055878, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.046, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.6130361037072934, | |
| "grad_norm": 0.16389816999435425, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0441, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6154591713108796, | |
| "grad_norm": 0.210100919008255, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0492, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.6178822389144657, | |
| "grad_norm": 0.2015194296836853, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.057, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6203053065180518, | |
| "grad_norm": 0.24084636569023132, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0506, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.622728374121638, | |
| "grad_norm": 0.1615552008152008, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0474, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.6251514417252242, | |
| "grad_norm": 0.17708715796470642, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0469, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6275745093288103, | |
| "grad_norm": 0.17544691264629364, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0492, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6299975769323964, | |
| "grad_norm": 0.1414787769317627, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0457, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6324206445359826, | |
| "grad_norm": 0.14779818058013916, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0416, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6348437121395687, | |
| "grad_norm": 0.19263465702533722, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0465, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.6372667797431548, | |
| "grad_norm": 0.3165013790130615, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0431, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.639689847346741, | |
| "grad_norm": 0.17729122936725616, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0405, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.6421129149503271, | |
| "grad_norm": 0.17129915952682495, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.045, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6445359825539132, | |
| "grad_norm": 0.17606666684150696, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.056, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.6469590501574994, | |
| "grad_norm": 0.10136760026216507, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0491, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.6493821177610856, | |
| "grad_norm": 0.1798381209373474, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0505, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6518051853646717, | |
| "grad_norm": 0.20055389404296875, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0452, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6542282529682578, | |
| "grad_norm": 0.16758450865745544, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0527, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6566513205718439, | |
| "grad_norm": 0.17579123377799988, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0444, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.6590743881754301, | |
| "grad_norm": 0.163347527384758, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0481, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.6614974557790162, | |
| "grad_norm": 0.19324535131454468, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.046, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.6639205233826023, | |
| "grad_norm": 0.21531912684440613, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0458, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.6663435909861886, | |
| "grad_norm": 0.12496834993362427, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0373, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.6687666585897747, | |
| "grad_norm": 0.14187116920948029, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0439, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.6711897261933608, | |
| "grad_norm": 0.1545686572790146, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0448, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.6736127937969469, | |
| "grad_norm": 0.20154084265232086, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0439, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.6760358614005331, | |
| "grad_norm": 0.1700827032327652, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0387, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.6784589290041192, | |
| "grad_norm": 0.1399426907300949, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0443, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.6808819966077053, | |
| "grad_norm": 0.16368074715137482, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0456, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.6833050642112914, | |
| "grad_norm": 0.22185085713863373, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0399, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.6857281318148777, | |
| "grad_norm": 0.16658170521259308, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0457, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.6881511994184638, | |
| "grad_norm": 0.1847662329673767, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0429, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.6905742670220499, | |
| "grad_norm": 0.20223012566566467, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0459, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.6929973346256361, | |
| "grad_norm": 0.1493610143661499, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0431, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.6954204022292222, | |
| "grad_norm": 0.2627032399177551, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0445, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.6978434698328083, | |
| "grad_norm": 0.1458875983953476, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0388, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7002665374363944, | |
| "grad_norm": 0.1582096666097641, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0425, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7026896050399806, | |
| "grad_norm": 0.12833742797374725, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0421, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7051126726435668, | |
| "grad_norm": 0.2219606339931488, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0606, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7075357402471529, | |
| "grad_norm": 0.2175474762916565, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.04, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.7099588078507391, | |
| "grad_norm": 0.2039986401796341, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.04, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.7123818754543252, | |
| "grad_norm": 0.11814648658037186, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0398, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7148049430579113, | |
| "grad_norm": 0.15184038877487183, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0428, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7172280106614974, | |
| "grad_norm": 0.2321150004863739, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.052, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.7196510782650836, | |
| "grad_norm": 0.19223789870738983, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0493, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.7220741458686697, | |
| "grad_norm": 0.21668773889541626, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0426, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.7244972134722559, | |
| "grad_norm": 0.16379350423812866, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0386, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.7269202810758421, | |
| "grad_norm": 0.19127467274665833, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0525, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7293433486794282, | |
| "grad_norm": 0.1838960200548172, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0434, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7317664162830143, | |
| "grad_norm": 0.18451040983200073, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0532, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.7341894838866004, | |
| "grad_norm": 0.1776730865240097, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0372, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7366125514901866, | |
| "grad_norm": 0.15609405934810638, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.036, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7390356190937727, | |
| "grad_norm": 0.1353120058774948, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0439, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7414586866973588, | |
| "grad_norm": 0.12224511802196503, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0446, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.7438817543009449, | |
| "grad_norm": 0.16622841358184814, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0491, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.7463048219045312, | |
| "grad_norm": 0.19702613353729248, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0461, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.7487278895081173, | |
| "grad_norm": 0.18081502616405487, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0433, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.7511509571117034, | |
| "grad_norm": 0.3770922124385834, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0485, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.7535740247152896, | |
| "grad_norm": 0.14518462121486664, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0493, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.7559970923188757, | |
| "grad_norm": 0.25315719842910767, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0409, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.7584201599224618, | |
| "grad_norm": 0.22215618193149567, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0437, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.7608432275260479, | |
| "grad_norm": 0.15837465226650238, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0454, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.7632662951296341, | |
| "grad_norm": 0.17210164666175842, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0456, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.7656893627332203, | |
| "grad_norm": 0.17189684510231018, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0397, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.7681124303368064, | |
| "grad_norm": 0.2261638641357422, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0441, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.7705354979403926, | |
| "grad_norm": 0.2089548110961914, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0446, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.7729585655439787, | |
| "grad_norm": 0.21397393941879272, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0459, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.7753816331475648, | |
| "grad_norm": 0.265156626701355, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0514, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.7778047007511509, | |
| "grad_norm": 0.19327251613140106, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0436, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.7802277683547371, | |
| "grad_norm": 0.1638793647289276, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0411, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.7826508359583232, | |
| "grad_norm": 0.2995148003101349, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0429, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.7850739035619094, | |
| "grad_norm": 0.19604302942752838, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0374, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.7874969711654956, | |
| "grad_norm": 0.13748332858085632, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0316, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.7899200387690817, | |
| "grad_norm": 0.1680527925491333, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0511, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.7923431063726678, | |
| "grad_norm": 0.21089929342269897, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0427, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.7947661739762539, | |
| "grad_norm": 0.18065360188484192, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0349, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.7971892415798401, | |
| "grad_norm": 0.12920215725898743, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0464, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.7996123091834262, | |
| "grad_norm": 0.25489136576652527, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0366, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8020353767870123, | |
| "grad_norm": 0.20825359225273132, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0411, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.8044584443905985, | |
| "grad_norm": 0.16933447122573853, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0366, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.8068815119941847, | |
| "grad_norm": 0.1853591501712799, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0424, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.8093045795977708, | |
| "grad_norm": 0.15621642768383026, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0397, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.8117276472013569, | |
| "grad_norm": 0.13941192626953125, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0442, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.814150714804943, | |
| "grad_norm": 0.3172297775745392, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0373, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.8165737824085292, | |
| "grad_norm": 0.21606160700321198, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0511, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.8189968500121153, | |
| "grad_norm": 0.15523676574230194, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.048, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.8214199176157014, | |
| "grad_norm": 0.19805888831615448, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0452, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.8238429852192877, | |
| "grad_norm": 0.21451659500598907, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0444, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8262660528228738, | |
| "grad_norm": 0.17402143776416779, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0431, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.8286891204264599, | |
| "grad_norm": 0.14573906362056732, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0428, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.831112188030046, | |
| "grad_norm": 0.16346170008182526, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0404, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.8335352556336322, | |
| "grad_norm": 0.14306640625, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0423, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.8359583232372183, | |
| "grad_norm": 0.15425516664981842, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0372, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8383813908408044, | |
| "grad_norm": 0.3661347031593323, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0444, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8408044584443906, | |
| "grad_norm": 0.22419315576553345, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0394, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.8432275260479767, | |
| "grad_norm": 0.31667467951774597, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.043, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.8456505936515629, | |
| "grad_norm": 0.17802435159683228, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.042, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.848073661255149, | |
| "grad_norm": 0.1798791140317917, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0516, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8504967288587352, | |
| "grad_norm": 0.17469356954097748, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0472, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.8529197964623213, | |
| "grad_norm": 0.1313830316066742, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0363, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.8553428640659074, | |
| "grad_norm": 0.3296225666999817, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0386, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.8577659316694936, | |
| "grad_norm": 0.17370855808258057, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0456, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.8601889992730797, | |
| "grad_norm": 0.18722471594810486, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0441, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.8626120668766658, | |
| "grad_norm": 0.1968887597322464, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0417, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.865035134480252, | |
| "grad_norm": 0.20979289710521698, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0456, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.8674582020838382, | |
| "grad_norm": 0.2972024083137512, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0408, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.8698812696874243, | |
| "grad_norm": 0.2382773458957672, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0385, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.8723043372910104, | |
| "grad_norm": 0.20507784187793732, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0438, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.8747274048945965, | |
| "grad_norm": 0.16031067073345184, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0373, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.8771504724981827, | |
| "grad_norm": 0.15577581524848938, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0394, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.8795735401017688, | |
| "grad_norm": 0.17504993081092834, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0448, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.8819966077053549, | |
| "grad_norm": 0.17116758227348328, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0424, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.8844196753089412, | |
| "grad_norm": 0.1967484951019287, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0454, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.8868427429125273, | |
| "grad_norm": 0.19744765758514404, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0328, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.8892658105161134, | |
| "grad_norm": 0.1328544318675995, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0411, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.8916888781196995, | |
| "grad_norm": 0.12274576723575592, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0434, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.8941119457232857, | |
| "grad_norm": 0.21458220481872559, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0461, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.8965350133268718, | |
| "grad_norm": 0.16265733540058136, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0391, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.8989580809304579, | |
| "grad_norm": 0.11289467662572861, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0441, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.901381148534044, | |
| "grad_norm": 0.2518782615661621, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0435, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.9038042161376303, | |
| "grad_norm": 0.15403211116790771, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0352, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.9062272837412164, | |
| "grad_norm": 0.14967775344848633, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0419, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.9086503513448025, | |
| "grad_norm": 0.18926765024662018, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0414, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.9110734189483887, | |
| "grad_norm": 0.13593383133411407, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0388, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.9134964865519748, | |
| "grad_norm": 0.27778002619743347, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.0509, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.9159195541555609, | |
| "grad_norm": 0.13119299709796906, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0392, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.918342621759147, | |
| "grad_norm": 0.19899316132068634, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0351, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.9207656893627332, | |
| "grad_norm": 0.23209546506404877, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0417, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.9231887569663194, | |
| "grad_norm": 0.1809903234243393, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0357, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.9256118245699055, | |
| "grad_norm": 0.24020709097385406, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0421, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.9280348921734917, | |
| "grad_norm": 0.13872717320919037, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0434, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.9304579597770778, | |
| "grad_norm": 0.12468786537647247, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0375, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.9328810273806639, | |
| "grad_norm": 0.2610887885093689, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0368, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.93530409498425, | |
| "grad_norm": 0.14304566383361816, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.044, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.9377271625878362, | |
| "grad_norm": 0.16474686563014984, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0406, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.9401502301914223, | |
| "grad_norm": 0.15889941155910492, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0366, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.9425732977950085, | |
| "grad_norm": 0.20691543817520142, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0369, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.9449963653985947, | |
| "grad_norm": 0.2763959467411041, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0427, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.9474194330021808, | |
| "grad_norm": 0.28597092628479004, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0405, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.9498425006057669, | |
| "grad_norm": 0.10694515705108643, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0377, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.952265568209353, | |
| "grad_norm": 0.23446550965309143, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0384, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.9546886358129392, | |
| "grad_norm": 0.20818932354450226, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0447, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.9571117034165253, | |
| "grad_norm": 0.22103236615657806, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0438, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.9595347710201114, | |
| "grad_norm": 0.29493871331214905, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.037, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.9619578386236975, | |
| "grad_norm": 0.22487111389636993, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.044, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.9643809062272838, | |
| "grad_norm": 0.21388165652751923, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0352, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.9668039738308699, | |
| "grad_norm": 0.13826294243335724, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0342, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.969227041434456, | |
| "grad_norm": 0.20242105424404144, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0395, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.9716501090380422, | |
| "grad_norm": 0.16864188015460968, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0365, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.9740731766416283, | |
| "grad_norm": 0.17126575112342834, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0396, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.9764962442452144, | |
| "grad_norm": 0.21484066545963287, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0418, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.9789193118488005, | |
| "grad_norm": 0.29061970114707947, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0438, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.9813423794523867, | |
| "grad_norm": 0.16736122965812683, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0329, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.9837654470559729, | |
| "grad_norm": 0.13058723509311676, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0439, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.986188514659559, | |
| "grad_norm": 0.2493453025817871, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0386, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.9886115822631452, | |
| "grad_norm": 0.21018049120903015, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0359, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.9910346498667313, | |
| "grad_norm": 0.14713706076145172, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0358, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.9934577174703174, | |
| "grad_norm": 0.13290318846702576, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0454, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.9958807850739035, | |
| "grad_norm": 0.1758163571357727, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0388, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.9983038526774897, | |
| "grad_norm": 0.2236786186695099, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0375, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.0007269202810758, | |
| "grad_norm": 0.18666422367095947, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0449, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.003149987884662, | |
| "grad_norm": 0.24746845662593842, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0417, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.005573055488248, | |
| "grad_norm": 0.18281657993793488, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0349, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.0079961230918342, | |
| "grad_norm": 0.14395718276500702, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0279, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.0104191906954203, | |
| "grad_norm": 0.19150447845458984, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0426, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.0128422582990066, | |
| "grad_norm": 0.17420750856399536, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0412, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.0152653259025928, | |
| "grad_norm": 0.2508639395236969, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0406, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.017688393506179, | |
| "grad_norm": 0.18227313458919525, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0358, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.020111461109765, | |
| "grad_norm": 0.13005763292312622, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0344, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.0225345287133512, | |
| "grad_norm": 0.2406199723482132, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.035, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.0249575963169373, | |
| "grad_norm": 0.13731837272644043, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0341, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.0273806639205234, | |
| "grad_norm": 0.21183118224143982, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0392, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.0298037315241095, | |
| "grad_norm": 0.3791130483150482, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0383, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.0322267991276957, | |
| "grad_norm": 0.20733681321144104, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0444, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.0346498667312818, | |
| "grad_norm": 0.18938347697257996, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0325, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.037072934334868, | |
| "grad_norm": 0.15026292204856873, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0343, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.039496001938454, | |
| "grad_norm": 0.12519890069961548, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0345, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.0419190695420402, | |
| "grad_norm": 0.1458505094051361, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0411, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.0443421371456263, | |
| "grad_norm": 0.18468616902828217, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.033, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.0467652047492124, | |
| "grad_norm": 0.154633030295372, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0372, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.0491882723527985, | |
| "grad_norm": 0.1589757353067398, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0305, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.0516113399563847, | |
| "grad_norm": 0.21719059348106384, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0328, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.054034407559971, | |
| "grad_norm": 0.20215091109275818, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0366, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.0564574751635571, | |
| "grad_norm": 0.15874029695987701, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0301, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.0588805427671433, | |
| "grad_norm": 0.1615975797176361, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0342, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.0613036103707294, | |
| "grad_norm": 0.16992174088954926, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0365, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.0637266779743155, | |
| "grad_norm": 0.1354915052652359, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0361, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.0661497455779017, | |
| "grad_norm": 0.24680233001708984, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0466, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.0685728131814878, | |
| "grad_norm": 0.20606473088264465, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0396, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.070995880785074, | |
| "grad_norm": 0.16568884253501892, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0404, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.07341894838866, | |
| "grad_norm": 0.1599683314561844, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0384, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.0758420159922462, | |
| "grad_norm": 0.2357213795185089, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0435, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.0782650835958323, | |
| "grad_norm": 0.2821190357208252, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0359, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.0806881511994184, | |
| "grad_norm": 0.35420918464660645, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0394, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.0831112188030045, | |
| "grad_norm": 0.17902354896068573, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0383, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.0855342864065907, | |
| "grad_norm": 0.21981704235076904, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0385, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.0879573540101768, | |
| "grad_norm": 0.14618508517742157, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0385, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.0903804216137631, | |
| "grad_norm": 0.1590287983417511, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0376, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.0928034892173493, | |
| "grad_norm": 0.12258179485797882, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0411, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.0952265568209354, | |
| "grad_norm": 0.24019291996955872, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0366, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.0976496244245215, | |
| "grad_norm": 0.19612370431423187, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0382, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.1000726920281076, | |
| "grad_norm": 0.13913317024707794, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0327, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.1024957596316938, | |
| "grad_norm": 0.11045411229133606, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0333, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.10491882723528, | |
| "grad_norm": 0.22665664553642273, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0304, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.107341894838866, | |
| "grad_norm": 0.14154298603534698, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0386, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.1097649624424522, | |
| "grad_norm": 0.1647247076034546, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0381, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.1121880300460383, | |
| "grad_norm": 0.18806114792823792, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0374, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.1146110976496244, | |
| "grad_norm": 0.1477152705192566, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0305, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.1170341652532105, | |
| "grad_norm": 0.1263800710439682, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0388, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.1194572328567967, | |
| "grad_norm": 0.15480294823646545, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0406, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.1218803004603828, | |
| "grad_norm": 0.17695686221122742, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0338, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.124303368063969, | |
| "grad_norm": 0.19480949640274048, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0382, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.126726435667555, | |
| "grad_norm": 0.20768029987812042, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0338, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.1291495032711412, | |
| "grad_norm": 0.11325472593307495, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0481, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.1315725708747273, | |
| "grad_norm": 0.17669686675071716, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0392, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.1339956384783136, | |
| "grad_norm": 0.17881830036640167, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0318, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.1364187060818998, | |
| "grad_norm": 0.21821770071983337, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0395, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.138841773685486, | |
| "grad_norm": 0.18453213572502136, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0425, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.141264841289072, | |
| "grad_norm": 0.16833944618701935, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0319, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.1436879088926581, | |
| "grad_norm": 0.16403602063655853, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0382, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.1461109764962443, | |
| "grad_norm": 0.20726455748081207, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0355, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.1485340440998304, | |
| "grad_norm": 0.16238965094089508, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0334, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.1509571117034165, | |
| "grad_norm": 0.16916604340076447, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0338, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.1533801793070027, | |
| "grad_norm": 0.21727706491947174, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0383, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.1558032469105888, | |
| "grad_norm": 0.1924837827682495, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0367, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.158226314514175, | |
| "grad_norm": 0.13621880114078522, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0375, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.160649382117761, | |
| "grad_norm": 0.177626371383667, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0342, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.1630724497213472, | |
| "grad_norm": 0.1389589011669159, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0401, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.1654955173249333, | |
| "grad_norm": 0.21249249577522278, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0346, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.1679185849285194, | |
| "grad_norm": 0.22570757567882538, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0302, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.1703416525321058, | |
| "grad_norm": 0.25085312128067017, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0382, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.1727647201356919, | |
| "grad_norm": 0.1335025131702423, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0461, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.175187787739278, | |
| "grad_norm": 0.16283638775348663, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0332, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.1776108553428641, | |
| "grad_norm": 0.20627540349960327, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.034, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.1800339229464503, | |
| "grad_norm": 0.13472777605056763, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0351, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.1824569905500364, | |
| "grad_norm": 0.12939082086086273, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0393, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.1848800581536225, | |
| "grad_norm": 0.24731796979904175, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0363, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.1873031257572086, | |
| "grad_norm": 0.1261483132839203, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0403, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.1897261933607948, | |
| "grad_norm": 0.18301163613796234, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0362, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.192149260964381, | |
| "grad_norm": 0.28962644934654236, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0304, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.194572328567967, | |
| "grad_norm": 0.19975295662879944, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0385, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.1969953961715531, | |
| "grad_norm": 0.17144648730754852, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0334, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.1994184637751393, | |
| "grad_norm": 0.1319882720708847, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0375, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.2018415313787254, | |
| "grad_norm": 0.1552356332540512, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0314, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.2042645989823115, | |
| "grad_norm": 0.18072573840618134, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0436, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.2066876665858977, | |
| "grad_norm": 0.18687626719474792, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0391, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.2091107341894838, | |
| "grad_norm": 0.25423336029052734, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0332, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.21153380179307, | |
| "grad_norm": 0.27445152401924133, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0332, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.2139568693966563, | |
| "grad_norm": 0.21275608241558075, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0368, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.2163799370002424, | |
| "grad_norm": 0.15624688565731049, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0357, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.2188030046038285, | |
| "grad_norm": 0.11576736718416214, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0363, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.2212260722074146, | |
| "grad_norm": 0.1762501299381256, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0338, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.2236491398110008, | |
| "grad_norm": 0.2148737758398056, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0348, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.226072207414587, | |
| "grad_norm": 0.20553722977638245, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.035, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.228495275018173, | |
| "grad_norm": 0.20314088463783264, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0269, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.2309183426217591, | |
| "grad_norm": 0.14651304483413696, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.035, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.2333414102253453, | |
| "grad_norm": 0.18988747894763947, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0399, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.2357644778289314, | |
| "grad_norm": 0.1370977759361267, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0326, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.2381875454325175, | |
| "grad_norm": 0.1975657343864441, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0344, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.2406106130361036, | |
| "grad_norm": 0.1782601922750473, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.044, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.2430336806396898, | |
| "grad_norm": 0.1720588505268097, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0293, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.245456748243276, | |
| "grad_norm": 0.2118971198797226, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0307, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.2478798158468623, | |
| "grad_norm": 0.17877288162708282, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0307, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.2503028834504484, | |
| "grad_norm": 0.18469521403312683, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0368, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.2527259510540345, | |
| "grad_norm": 0.1583702713251114, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0292, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.2551490186576206, | |
| "grad_norm": 0.18019641935825348, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0309, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.2575720862612068, | |
| "grad_norm": 0.18975822627544403, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.034, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.2599951538647929, | |
| "grad_norm": 0.1740541160106659, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0424, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.262418221468379, | |
| "grad_norm": 0.20315106213092804, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0345, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.2648412890719651, | |
| "grad_norm": 0.2409934550523758, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0362, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.2672643566755513, | |
| "grad_norm": 0.08300124853849411, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0331, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.2696874242791374, | |
| "grad_norm": 0.1453070491552353, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0299, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.2721104918827235, | |
| "grad_norm": 0.1614101231098175, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0285, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.2745335594863096, | |
| "grad_norm": 0.13010790944099426, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0385, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.2769566270898958, | |
| "grad_norm": 0.19348230957984924, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0369, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.279379694693482, | |
| "grad_norm": 0.18507100641727448, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0354, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.281802762297068, | |
| "grad_norm": 0.16559474170207977, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0365, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.2842258299006541, | |
| "grad_norm": 0.1510300487279892, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0336, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.2866488975042403, | |
| "grad_norm": 0.17999106645584106, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.035, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.2890719651078264, | |
| "grad_norm": 0.1714041531085968, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0364, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.2914950327114125, | |
| "grad_norm": 0.12049345672130585, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.032, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.2939181003149987, | |
| "grad_norm": 0.12734355032444, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0362, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.296341167918585, | |
| "grad_norm": 0.15307103097438812, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0363, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.2987642355221711, | |
| "grad_norm": 0.1784488409757614, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0426, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.3011873031257573, | |
| "grad_norm": 0.13317061960697174, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0387, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.3036103707293434, | |
| "grad_norm": 0.16432419419288635, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0349, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.3060334383329295, | |
| "grad_norm": 0.15296678245067596, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0324, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.3084565059365156, | |
| "grad_norm": 0.2578441798686981, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.037, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.3108795735401018, | |
| "grad_norm": 0.16813112795352936, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0349, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.3133026411436879, | |
| "grad_norm": 0.10465259104967117, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.027, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.315725708747274, | |
| "grad_norm": 0.15062761306762695, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0333, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.3181487763508601, | |
| "grad_norm": 0.1879766434431076, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0355, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.3205718439544463, | |
| "grad_norm": 0.2238711267709732, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0358, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.3229949115580324, | |
| "grad_norm": 0.3197612464427948, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0356, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.3254179791616187, | |
| "grad_norm": 0.14447778463363647, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0301, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.3278410467652049, | |
| "grad_norm": 0.13374896347522736, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0288, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.330264114368791, | |
| "grad_norm": 0.15201738476753235, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0288, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.3326871819723771, | |
| "grad_norm": 0.21614566445350647, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0304, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.3351102495759632, | |
| "grad_norm": 0.1442033052444458, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0315, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.3375333171795494, | |
| "grad_norm": 0.18296325206756592, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.029, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.3399563847831355, | |
| "grad_norm": 0.25480255484580994, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0399, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.3423794523867216, | |
| "grad_norm": 0.18130719661712646, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0331, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.3448025199903078, | |
| "grad_norm": 0.1488102525472641, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0268, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.3472255875938939, | |
| "grad_norm": 0.11495231091976166, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0327, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.34964865519748, | |
| "grad_norm": 0.17555421590805054, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0306, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.3520717228010661, | |
| "grad_norm": 0.17269304394721985, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0322, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.3544947904046523, | |
| "grad_norm": 0.13520239293575287, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0313, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.3569178580082384, | |
| "grad_norm": 0.20179545879364014, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0355, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.3593409256118245, | |
| "grad_norm": 0.18395577371120453, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0394, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.3617639932154106, | |
| "grad_norm": 0.18193255364894867, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0326, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.3641870608189968, | |
| "grad_norm": 0.11756373196840286, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0323, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.366610128422583, | |
| "grad_norm": 0.1866205334663391, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0351, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.369033196026169, | |
| "grad_norm": 0.21558016538619995, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.029, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.3714562636297551, | |
| "grad_norm": 0.17946316301822662, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0358, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.3738793312333413, | |
| "grad_norm": 0.19241704046726227, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0344, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.3763023988369276, | |
| "grad_norm": 0.09524490684270859, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0306, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.3787254664405137, | |
| "grad_norm": 0.20127789676189423, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.037, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.3811485340440999, | |
| "grad_norm": 0.22112242877483368, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0315, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.383571601647686, | |
| "grad_norm": 0.21605165302753448, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.035, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.3859946692512721, | |
| "grad_norm": 0.16192230582237244, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0358, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.3884177368548583, | |
| "grad_norm": 0.13257241249084473, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0412, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.3908408044584444, | |
| "grad_norm": 0.1690298169851303, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0298, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.3932638720620305, | |
| "grad_norm": 0.2850973308086395, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0301, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.3956869396656166, | |
| "grad_norm": 0.11184634268283844, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0342, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.3981100072692028, | |
| "grad_norm": 0.1616874635219574, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0322, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.4005330748727889, | |
| "grad_norm": 0.1666002720594406, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.036, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.402956142476375, | |
| "grad_norm": 0.10762447863817215, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0318, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.4053792100799614, | |
| "grad_norm": 0.12863484025001526, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0315, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.4078022776835475, | |
| "grad_norm": 0.1334962099790573, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0294, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.4102253452871336, | |
| "grad_norm": 0.15371330082416534, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0261, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.4126484128907197, | |
| "grad_norm": 0.13973890244960785, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.038, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.4150714804943059, | |
| "grad_norm": 0.1423688679933548, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.034, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.417494548097892, | |
| "grad_norm": 0.29787349700927734, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0421, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.4199176157014781, | |
| "grad_norm": 0.16662997007369995, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0321, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.4223406833050642, | |
| "grad_norm": 0.13844655454158783, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0317, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.4247637509086504, | |
| "grad_norm": 0.14252415299415588, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0436, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.4271868185122365, | |
| "grad_norm": 0.23531639575958252, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0297, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.4296098861158226, | |
| "grad_norm": 0.1339505910873413, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.034, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.4320329537194088, | |
| "grad_norm": 0.2176676243543625, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0339, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.4344560213229949, | |
| "grad_norm": 0.228776216506958, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0349, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.436879088926581, | |
| "grad_norm": 0.1379360854625702, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0296, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.4393021565301671, | |
| "grad_norm": 0.2305406630039215, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0319, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.4417252241337533, | |
| "grad_norm": 0.15086810290813446, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0303, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.4441482917373394, | |
| "grad_norm": 0.22204427421092987, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.029, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.4465713593409255, | |
| "grad_norm": 0.12760701775550842, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0284, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.4489944269445116, | |
| "grad_norm": 0.19484172761440277, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0301, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.4514174945480978, | |
| "grad_norm": 0.2005198746919632, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0272, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.4538405621516841, | |
| "grad_norm": 0.11993716657161713, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0331, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.4562636297552702, | |
| "grad_norm": 0.17084021866321564, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0264, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.4586866973588564, | |
| "grad_norm": 0.1599433720111847, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0346, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.4611097649624425, | |
| "grad_norm": 0.19182421267032623, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0272, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.4635328325660286, | |
| "grad_norm": 0.16607974469661713, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.038, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.4659559001696147, | |
| "grad_norm": 0.15567085146903992, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0243, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.4683789677732009, | |
| "grad_norm": 0.16886258125305176, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0333, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.470802035376787, | |
| "grad_norm": 0.1202966719865799, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0279, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.4732251029803731, | |
| "grad_norm": 0.1705106645822525, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0369, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.4756481705839593, | |
| "grad_norm": 0.12236841022968292, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0293, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.4780712381875454, | |
| "grad_norm": 0.1756087988615036, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0233, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.4804943057911315, | |
| "grad_norm": 0.12164104729890823, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0311, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.4829173733947176, | |
| "grad_norm": 0.22608721256256104, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0343, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.485340440998304, | |
| "grad_norm": 0.18638530373573303, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0329, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.48776350860189, | |
| "grad_norm": 0.1426989734172821, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0272, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.4901865762054762, | |
| "grad_norm": 0.11546658724546432, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0269, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.4926096438090624, | |
| "grad_norm": 0.1294058859348297, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0378, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.4950327114126485, | |
| "grad_norm": 0.13045288622379303, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0333, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.4974557790162346, | |
| "grad_norm": 0.16236844658851624, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0337, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.4998788466198207, | |
| "grad_norm": 0.21645741164684296, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0387, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.5023019142234069, | |
| "grad_norm": 0.14361968636512756, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0319, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.504724981826993, | |
| "grad_norm": 0.23078177869319916, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0318, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.5071480494305791, | |
| "grad_norm": 0.1609354019165039, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0331, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.5095711170341652, | |
| "grad_norm": 0.13488058745861053, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0324, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.5119941846377514, | |
| "grad_norm": 0.12959341704845428, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0299, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.5144172522413375, | |
| "grad_norm": 0.14298292994499207, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0328, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.5168403198449236, | |
| "grad_norm": 0.18947912752628326, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0326, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.5192633874485098, | |
| "grad_norm": 0.17194828391075134, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0319, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.5216864550520959, | |
| "grad_norm": 0.12723220884799957, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0341, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.524109522655682, | |
| "grad_norm": 0.2845194637775421, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0371, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.5265325902592681, | |
| "grad_norm": 0.14137770235538483, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0246, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.5289556578628543, | |
| "grad_norm": 0.13384422659873962, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0324, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.5313787254664404, | |
| "grad_norm": 0.23343564569950104, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0281, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.5338017930700265, | |
| "grad_norm": 0.20749473571777344, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0292, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.5362248606736126, | |
| "grad_norm": 0.1536872684955597, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0287, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.538647928277199, | |
| "grad_norm": 0.23201976716518402, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0266, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.541070995880785, | |
| "grad_norm": 0.12788063287734985, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0348, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.5434940634843712, | |
| "grad_norm": 0.11397260427474976, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0313, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.5459171310879574, | |
| "grad_norm": 0.24470633268356323, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0313, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.5483401986915435, | |
| "grad_norm": 0.14287936687469482, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0339, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.5507632662951296, | |
| "grad_norm": 0.28447020053863525, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0296, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.5531863338987157, | |
| "grad_norm": 0.1163477897644043, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0328, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.5556094015023019, | |
| "grad_norm": 0.10433950275182724, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0302, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.5580324691058882, | |
| "grad_norm": 0.13167643547058105, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0272, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.5604555367094743, | |
| "grad_norm": 0.13868680596351624, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0307, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.5628786043130605, | |
| "grad_norm": 0.11900881677865982, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0239, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.5653016719166466, | |
| "grad_norm": 0.30312401056289673, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0327, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.5677247395202327, | |
| "grad_norm": 0.1915568709373474, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0347, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.5701478071238189, | |
| "grad_norm": 0.25118640065193176, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0298, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.572570874727405, | |
| "grad_norm": 0.1886977255344391, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0303, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.574993942330991, | |
| "grad_norm": 0.16155610978603363, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0274, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.5774170099345772, | |
| "grad_norm": 0.19294053316116333, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0289, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.5798400775381634, | |
| "grad_norm": 0.19525209069252014, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0318, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.5822631451417495, | |
| "grad_norm": 0.12274599075317383, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0354, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.5846862127453356, | |
| "grad_norm": 0.17518219351768494, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.031, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.5871092803489217, | |
| "grad_norm": 0.17310017347335815, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0325, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.5895323479525079, | |
| "grad_norm": 0.0949157178401947, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0344, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.591955415556094, | |
| "grad_norm": 0.1574525684118271, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0341, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.5943784831596801, | |
| "grad_norm": 0.16615572571754456, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0428, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.5968015507632662, | |
| "grad_norm": 0.11884958297014236, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.028, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.5992246183668524, | |
| "grad_norm": 0.14729472994804382, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0321, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.6016476859704385, | |
| "grad_norm": 0.1668490469455719, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0309, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.6040707535740246, | |
| "grad_norm": 0.19977746903896332, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0273, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.6064938211776107, | |
| "grad_norm": 0.2024213820695877, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0355, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.6089168887811969, | |
| "grad_norm": 0.15648718178272247, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0344, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.611339956384783, | |
| "grad_norm": 0.1572267711162567, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0255, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.6137630239883691, | |
| "grad_norm": 0.11957002431154251, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0319, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.6161860915919553, | |
| "grad_norm": 0.21825970709323883, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0278, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.6186091591955416, | |
| "grad_norm": 0.16766905784606934, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.037, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.6210322267991277, | |
| "grad_norm": 0.11007768660783768, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.028, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.6234552944027139, | |
| "grad_norm": 0.23923490941524506, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0317, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.6258783620063, | |
| "grad_norm": 0.11933652311563492, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0286, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.628301429609886, | |
| "grad_norm": 0.12866638600826263, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0327, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.6307244972134722, | |
| "grad_norm": 0.19149260222911835, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0304, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.6331475648170584, | |
| "grad_norm": 0.2288963347673416, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0381, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.6355706324206445, | |
| "grad_norm": 0.16293670237064362, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0291, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.6379937000242308, | |
| "grad_norm": 0.1377790868282318, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0263, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.640416767627817, | |
| "grad_norm": 0.1738491654396057, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0257, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.642839835231403, | |
| "grad_norm": 0.22051261365413666, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0334, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.6452629028349892, | |
| "grad_norm": 0.1970164179801941, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.03, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.6476859704385753, | |
| "grad_norm": 0.17463520169258118, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0426, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.6501090380421615, | |
| "grad_norm": 0.08293083310127258, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0247, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.6525321056457476, | |
| "grad_norm": 0.16733765602111816, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0256, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.6549551732493337, | |
| "grad_norm": 0.18957699835300446, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0259, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.6573782408529198, | |
| "grad_norm": 0.1485845148563385, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0274, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.659801308456506, | |
| "grad_norm": 0.1344272792339325, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.025, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.662224376060092, | |
| "grad_norm": 0.13799865543842316, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0291, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.6646474436636782, | |
| "grad_norm": 0.18789830803871155, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0358, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.6670705112672644, | |
| "grad_norm": 0.11653149127960205, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0347, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.6694935788708505, | |
| "grad_norm": 0.1139049306511879, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0362, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.6719166464744366, | |
| "grad_norm": 0.2249351143836975, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.028, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.6743397140780227, | |
| "grad_norm": 0.20994961261749268, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0319, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.6767627816816089, | |
| "grad_norm": 0.12746700644493103, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0361, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.679185849285195, | |
| "grad_norm": 0.20666435360908508, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0281, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.6816089168887811, | |
| "grad_norm": 0.14798720180988312, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0254, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.6840319844923672, | |
| "grad_norm": 0.13720859587192535, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0349, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.6864550520959534, | |
| "grad_norm": 0.1458396315574646, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0309, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.6888781196995395, | |
| "grad_norm": 0.11982893943786621, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0232, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.6913011873031256, | |
| "grad_norm": 0.14651520550251007, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0316, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.6937242549067117, | |
| "grad_norm": 0.15192952752113342, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0349, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.6961473225102979, | |
| "grad_norm": 0.13334538042545319, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0284, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.6985703901138842, | |
| "grad_norm": 0.15819594264030457, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0333, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.7009934577174703, | |
| "grad_norm": 0.15649054944515228, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0239, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.7034165253210565, | |
| "grad_norm": 0.1568889617919922, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0308, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.7058395929246426, | |
| "grad_norm": 0.15020939707756042, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0347, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.7082626605282287, | |
| "grad_norm": 0.13974468410015106, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0348, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.7106857281318149, | |
| "grad_norm": 0.12164398282766342, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0312, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.713108795735401, | |
| "grad_norm": 0.14998400211334229, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0306, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.715531863338987, | |
| "grad_norm": 0.19442333281040192, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0278, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.7179549309425735, | |
| "grad_norm": 0.167693629860878, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0257, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.7203779985461596, | |
| "grad_norm": 0.3055407404899597, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0291, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.7228010661497457, | |
| "grad_norm": 0.12466944754123688, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0306, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.7252241337533318, | |
| "grad_norm": 0.1483263224363327, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0317, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.727647201356918, | |
| "grad_norm": 0.11770248413085938, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0258, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.730070268960504, | |
| "grad_norm": 0.17324748635292053, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0338, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.7324933365640902, | |
| "grad_norm": 0.14247173070907593, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0316, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.7349164041676763, | |
| "grad_norm": 0.14349092543125153, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0332, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.7373394717712625, | |
| "grad_norm": 0.16288098692893982, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0305, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.7397625393748486, | |
| "grad_norm": 0.16031399369239807, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0302, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.7421856069784347, | |
| "grad_norm": 0.17212922871112823, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.035, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.7446086745820208, | |
| "grad_norm": 0.21562014520168304, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0284, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.747031742185607, | |
| "grad_norm": 0.12171247601509094, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0228, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.749454809789193, | |
| "grad_norm": 0.17230695486068726, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0343, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.7518778773927792, | |
| "grad_norm": 0.1498602032661438, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0275, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.7543009449963654, | |
| "grad_norm": 0.1403890997171402, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0284, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.7567240125999515, | |
| "grad_norm": 0.14623567461967468, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0252, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.7591470802035376, | |
| "grad_norm": 0.20230096578598022, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0302, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.7615701478071237, | |
| "grad_norm": 0.22372810542583466, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.034, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.7639932154107099, | |
| "grad_norm": 0.17974606156349182, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.029, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.766416283014296, | |
| "grad_norm": 0.1568254828453064, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0274, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.7688393506178821, | |
| "grad_norm": 0.14157868921756744, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0355, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.7712624182214682, | |
| "grad_norm": 0.1507447361946106, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0307, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.7736854858250544, | |
| "grad_norm": 0.14527937769889832, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.032, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.7761085534286407, | |
| "grad_norm": 0.18605710566043854, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.034, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.7785316210322268, | |
| "grad_norm": 0.11971258372068405, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0284, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.780954688635813, | |
| "grad_norm": 0.16301608085632324, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0278, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.783377756239399, | |
| "grad_norm": 0.15355491638183594, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0263, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.7858008238429852, | |
| "grad_norm": 0.12897683680057526, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0284, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.7882238914465713, | |
| "grad_norm": 0.19699160754680634, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0293, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.7906469590501575, | |
| "grad_norm": 0.14717955887317657, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0302, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.7930700266537436, | |
| "grad_norm": 0.15098115801811218, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0279, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.7954930942573297, | |
| "grad_norm": 0.2671264111995697, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0253, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.797916161860916, | |
| "grad_norm": 0.2414303421974182, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0323, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.8003392294645022, | |
| "grad_norm": 0.1499500572681427, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0296, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.8027622970680883, | |
| "grad_norm": 0.15585799515247345, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0293, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.8051853646716745, | |
| "grad_norm": 0.12220346927642822, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0261, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.8076084322752606, | |
| "grad_norm": 0.12463181465864182, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0321, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.8100314998788467, | |
| "grad_norm": 0.1807471215724945, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0299, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.8124545674824328, | |
| "grad_norm": 0.10121816396713257, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0308, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.814877635086019, | |
| "grad_norm": 0.13459303975105286, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0343, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.817300702689605, | |
| "grad_norm": 0.1783873587846756, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0281, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.8197237702931912, | |
| "grad_norm": 0.11666081100702286, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0234, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.8221468378967773, | |
| "grad_norm": 0.1261976659297943, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0321, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.8245699055003635, | |
| "grad_norm": 0.12998758256435394, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.034, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.8269929731039496, | |
| "grad_norm": 0.10772717744112015, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0273, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.8294160407075357, | |
| "grad_norm": 0.1000848188996315, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0264, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.8318391083111218, | |
| "grad_norm": 0.16651250422000885, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0288, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.834262175914708, | |
| "grad_norm": 0.14536502957344055, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0278, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.836685243518294, | |
| "grad_norm": 0.18689019978046417, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0354, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.8391083111218802, | |
| "grad_norm": 0.13664589822292328, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0313, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.8415313787254664, | |
| "grad_norm": 0.1341859996318817, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0311, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.8439544463290525, | |
| "grad_norm": 0.13607800006866455, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0229, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.8463775139326386, | |
| "grad_norm": 0.12949807941913605, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.03, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.8488005815362247, | |
| "grad_norm": 0.1493142992258072, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0303, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.8512236491398109, | |
| "grad_norm": 0.10541212558746338, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0292, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.853646716743397, | |
| "grad_norm": 0.17484702169895172, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0333, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.8560697843469833, | |
| "grad_norm": 0.09506268054246902, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.028, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.8584928519505695, | |
| "grad_norm": 0.19987516105175018, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0292, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.8609159195541556, | |
| "grad_norm": 0.167899027466774, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.027, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.8633389871577417, | |
| "grad_norm": 0.12362218648195267, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0281, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.8657620547613278, | |
| "grad_norm": 0.19365519285202026, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.026, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.868185122364914, | |
| "grad_norm": 0.13983801007270813, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.03, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.8706081899685, | |
| "grad_norm": 0.11064846068620682, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0259, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.8730312575720862, | |
| "grad_norm": 0.1765027642250061, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0386, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.8754543251756726, | |
| "grad_norm": 0.12223070114850998, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0265, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.8778773927792587, | |
| "grad_norm": 0.12448066473007202, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0293, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.8803004603828448, | |
| "grad_norm": 0.15871427953243256, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0262, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.882723527986431, | |
| "grad_norm": 0.17256490886211395, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0222, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.885146595590017, | |
| "grad_norm": 0.155892476439476, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0329, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.8875696631936032, | |
| "grad_norm": 0.1817428171634674, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.029, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.8899927307971893, | |
| "grad_norm": 0.13103091716766357, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0273, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.8924157984007755, | |
| "grad_norm": 0.227295383810997, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0286, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.8948388660043616, | |
| "grad_norm": 0.15147389471530914, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.033, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.8972619336079477, | |
| "grad_norm": 0.13875320553779602, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0404, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 1.8996850012115338, | |
| "grad_norm": 0.1799732744693756, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0258, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 1.90210806881512, | |
| "grad_norm": 0.0799701139330864, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0319, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.904531136418706, | |
| "grad_norm": 0.20891959965229034, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0253, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 1.9069542040222922, | |
| "grad_norm": 0.19225963950157166, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0267, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 1.9093772716258783, | |
| "grad_norm": 0.12622466683387756, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0305, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 1.9118003392294645, | |
| "grad_norm": 0.1945401430130005, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0324, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 1.9142234068330506, | |
| "grad_norm": 0.10306015610694885, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0261, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.9166464744366367, | |
| "grad_norm": 0.11172715574502945, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0344, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 1.9190695420402228, | |
| "grad_norm": 0.11810815334320068, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0303, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 1.921492609643809, | |
| "grad_norm": 0.10111381858587265, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0292, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 1.923915677247395, | |
| "grad_norm": 0.2581632137298584, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0329, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 1.9263387448509812, | |
| "grad_norm": 0.12490931153297424, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0365, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.9287618124545673, | |
| "grad_norm": 0.25791874527931213, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0399, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 1.9311848800581535, | |
| "grad_norm": 0.26325419545173645, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0331, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 1.9336079476617396, | |
| "grad_norm": 0.20180101692676544, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0375, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 1.936031015265326, | |
| "grad_norm": 0.20103274285793304, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0306, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 1.938454082868912, | |
| "grad_norm": 0.15941545367240906, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0289, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.9408771504724982, | |
| "grad_norm": 0.18129713833332062, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0332, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 1.9433002180760843, | |
| "grad_norm": 0.16238285601139069, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0316, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 1.9457232856796705, | |
| "grad_norm": 0.11229030787944794, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0285, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 1.9481463532832566, | |
| "grad_norm": 0.2848252058029175, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0252, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 1.9505694208868427, | |
| "grad_norm": 0.13457539677619934, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0225, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 1.9529924884904288, | |
| "grad_norm": 0.23321807384490967, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0307, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 1.9554155560940152, | |
| "grad_norm": 0.09704259037971497, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.027, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 1.9578386236976013, | |
| "grad_norm": 0.15445855259895325, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0277, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 1.9602616913011874, | |
| "grad_norm": 0.3068898320198059, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0262, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 1.9626847589047736, | |
| "grad_norm": 0.1608743518590927, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0229, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 1.9651078265083597, | |
| "grad_norm": 0.1477365642786026, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0317, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 1.9675308941119458, | |
| "grad_norm": 0.10755207389593124, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0323, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 1.969953961715532, | |
| "grad_norm": 0.22165447473526, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0236, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 1.972377029319118, | |
| "grad_norm": 0.16507363319396973, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0314, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 1.9748000969227042, | |
| "grad_norm": 0.177801251411438, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.03, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 1.9772231645262903, | |
| "grad_norm": 0.1621575504541397, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0287, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 1.9796462321298764, | |
| "grad_norm": 0.11137279123067856, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0274, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 1.9820692997334626, | |
| "grad_norm": 0.14686135947704315, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0313, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 1.9844923673370487, | |
| "grad_norm": 0.17601804435253143, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.029, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 1.9869154349406348, | |
| "grad_norm": 0.17296448349952698, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0313, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 1.989338502544221, | |
| "grad_norm": 0.09737856686115265, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0283, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 1.991761570147807, | |
| "grad_norm": 0.17967349290847778, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.032, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 1.9941846377513932, | |
| "grad_norm": 0.14607755839824677, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.028, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 1.9966077053549793, | |
| "grad_norm": 0.13207517564296722, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.029, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 1.9990307729585655, | |
| "grad_norm": 0.1940346509218216, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0249, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 2.0014538405621516, | |
| "grad_norm": 0.17563392221927643, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0268, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 2.0038769081657377, | |
| "grad_norm": 0.12261688709259033, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0352, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 2.006299975769324, | |
| "grad_norm": 0.1353401243686676, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0307, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 2.00872304337291, | |
| "grad_norm": 0.1718779355287552, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0298, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 2.011146110976496, | |
| "grad_norm": 0.18064215779304504, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0255, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 2.013569178580082, | |
| "grad_norm": 0.13827283680438995, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.029, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 2.0159922461836683, | |
| "grad_norm": 0.18655867874622345, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0327, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 2.0184153137872545, | |
| "grad_norm": 0.17226466536521912, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0291, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 2.0208383813908406, | |
| "grad_norm": 0.1455259919166565, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0278, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 2.0232614489944267, | |
| "grad_norm": 0.17325949668884277, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0296, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 2.0256845165980133, | |
| "grad_norm": 0.145994633436203, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0253, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 2.0281075842015994, | |
| "grad_norm": 0.15007467567920685, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0255, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 2.0305306518051855, | |
| "grad_norm": 0.19762636721134186, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0293, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 2.0329537194087717, | |
| "grad_norm": 0.17374244332313538, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0308, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 2.035376787012358, | |
| "grad_norm": 0.16125473380088806, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0233, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.037799854615944, | |
| "grad_norm": 0.1847807615995407, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0278, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 2.04022292221953, | |
| "grad_norm": 0.16369697451591492, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0307, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 2.042645989823116, | |
| "grad_norm": 0.12161785364151001, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0235, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 2.0450690574267023, | |
| "grad_norm": 0.2299482375383377, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0257, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 2.0474921250302884, | |
| "grad_norm": 0.1613558977842331, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0275, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 2.0499151926338746, | |
| "grad_norm": 0.11119429022073746, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0252, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 2.0523382602374607, | |
| "grad_norm": 0.1832219809293747, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0264, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 2.054761327841047, | |
| "grad_norm": 0.13787959516048431, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0314, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 2.057184395444633, | |
| "grad_norm": 0.12625069916248322, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0316, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 2.059607463048219, | |
| "grad_norm": 0.14285360276699066, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0228, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.062030530651805, | |
| "grad_norm": 0.1270981729030609, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0292, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 2.0644535982553913, | |
| "grad_norm": 0.17200256884098053, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0272, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 2.0668766658589774, | |
| "grad_norm": 0.1370762437582016, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.026, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 2.0692997334625636, | |
| "grad_norm": 0.14900153875350952, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0238, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 2.0717228010661497, | |
| "grad_norm": 0.13018465042114258, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0317, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 2.074145868669736, | |
| "grad_norm": 0.10926738381385803, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0251, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 2.076568936273322, | |
| "grad_norm": 0.1418224722146988, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0285, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 2.078992003876908, | |
| "grad_norm": 0.15521803498268127, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0263, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 2.081415071480494, | |
| "grad_norm": 0.1519792526960373, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0252, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 2.0838381390840803, | |
| "grad_norm": 0.16562305390834808, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0322, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.0862612066876665, | |
| "grad_norm": 0.11985314637422562, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0264, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 2.0886842742912526, | |
| "grad_norm": 0.15222705900669098, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.036, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 2.0911073418948387, | |
| "grad_norm": 0.16215650737285614, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0251, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 2.093530409498425, | |
| "grad_norm": 0.14035393297672272, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0291, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 2.095953477102011, | |
| "grad_norm": 0.19924896955490112, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0302, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 2.098376544705597, | |
| "grad_norm": 0.12468541413545609, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0257, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 2.100799612309183, | |
| "grad_norm": 0.1507703959941864, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.026, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 2.1032226799127693, | |
| "grad_norm": 0.1744457632303238, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.034, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 2.105645747516356, | |
| "grad_norm": 0.17232468724250793, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0268, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 2.108068815119942, | |
| "grad_norm": 0.10928317159414291, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0279, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.110491882723528, | |
| "grad_norm": 0.11373521387577057, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0213, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 2.1129149503271143, | |
| "grad_norm": 0.17275741696357727, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0286, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 2.1153380179307004, | |
| "grad_norm": 0.13219864666461945, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0278, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 2.1177610855342865, | |
| "grad_norm": 0.1773848533630371, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0268, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 2.1201841531378727, | |
| "grad_norm": 0.14818203449249268, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0327, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.122607220741459, | |
| "grad_norm": 0.1522279679775238, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0297, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.125030288345045, | |
| "grad_norm": 0.20476707816123962, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0286, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 2.127453355948631, | |
| "grad_norm": 0.12033390998840332, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0319, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 2.129876423552217, | |
| "grad_norm": 0.1066959798336029, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0351, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 2.1322994911558033, | |
| "grad_norm": 0.14931227266788483, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0244, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.1347225587593894, | |
| "grad_norm": 0.16468816995620728, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0375, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 2.1371456263629756, | |
| "grad_norm": 0.1545843929052353, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.0239, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.1395686939665617, | |
| "grad_norm": 0.1869247555732727, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0348, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 2.141991761570148, | |
| "grad_norm": 0.1271735429763794, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0237, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 2.144414829173734, | |
| "grad_norm": 0.15469500422477722, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0283, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.14683789677732, | |
| "grad_norm": 0.1609458029270172, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0331, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 2.149260964380906, | |
| "grad_norm": 0.16481876373291016, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0242, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 2.1516840319844923, | |
| "grad_norm": 0.13567112386226654, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0282, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.1541070995880784, | |
| "grad_norm": 0.17066925764083862, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0276, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 2.1565301671916646, | |
| "grad_norm": 0.18471895158290863, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0289, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.1589532347952507, | |
| "grad_norm": 0.11638637632131577, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0276, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 2.161376302398837, | |
| "grad_norm": 0.1328156441450119, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0273, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 2.163799370002423, | |
| "grad_norm": 0.19720007479190826, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0253, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 2.166222437606009, | |
| "grad_norm": 0.10312143713235855, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0265, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.168645505209595, | |
| "grad_norm": 0.15365023910999298, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0315, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.1710685728131813, | |
| "grad_norm": 0.1724182665348053, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0287, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 2.1734916404167675, | |
| "grad_norm": 0.13435471057891846, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0267, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 2.1759147080203536, | |
| "grad_norm": 0.14483821392059326, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0293, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 2.1783377756239397, | |
| "grad_norm": 0.17233124375343323, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0281, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 2.1807608432275263, | |
| "grad_norm": 0.1296769380569458, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0316, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.1831839108311124, | |
| "grad_norm": 0.10365114361047745, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0265, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 2.1856069784346985, | |
| "grad_norm": 0.16658321022987366, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0256, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 2.1880300460382847, | |
| "grad_norm": 0.143988236784935, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0285, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 2.190453113641871, | |
| "grad_norm": 0.12926596403121948, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0241, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 2.192876181245457, | |
| "grad_norm": 0.14821550250053406, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0275, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.195299248849043, | |
| "grad_norm": 0.11625136435031891, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0294, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.197722316452629, | |
| "grad_norm": 0.14723770320415497, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0252, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 2.2001453840562153, | |
| "grad_norm": 0.11719014495611191, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0291, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 2.2025684516598014, | |
| "grad_norm": 0.1356019228696823, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0297, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 2.2049915192633875, | |
| "grad_norm": 0.10428967326879501, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.027, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.2074145868669737, | |
| "grad_norm": 0.21081489324569702, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0341, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 2.20983765447056, | |
| "grad_norm": 0.14455313980579376, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0311, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.212260722074146, | |
| "grad_norm": 0.15106725692749023, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0231, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 2.214683789677732, | |
| "grad_norm": 0.14613407850265503, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0248, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 2.217106857281318, | |
| "grad_norm": 0.12267711013555527, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0366, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.2195299248849043, | |
| "grad_norm": 0.17528045177459717, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0315, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 2.2219529924884904, | |
| "grad_norm": 0.1882876753807068, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0294, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 2.2243760600920766, | |
| "grad_norm": 0.13916727900505066, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0344, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.2267991276956627, | |
| "grad_norm": 0.17802543938159943, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0257, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 2.229222195299249, | |
| "grad_norm": 0.12251824885606766, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0308, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.231645262902835, | |
| "grad_norm": 0.14502032101154327, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0259, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 2.234068330506421, | |
| "grad_norm": 0.11981617659330368, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0341, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 2.236491398110007, | |
| "grad_norm": 0.11414606869220734, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0285, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 2.2389144657135933, | |
| "grad_norm": 0.16138747334480286, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0287, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.2413375333171794, | |
| "grad_norm": 0.10142087191343307, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0343, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.2437606009207656, | |
| "grad_norm": 0.11517345160245895, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0294, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 2.2461836685243517, | |
| "grad_norm": 0.12598855793476105, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0276, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 2.248606736127938, | |
| "grad_norm": 0.13687241077423096, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0264, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 2.251029803731524, | |
| "grad_norm": 0.17203913629055023, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0248, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 2.25345287133511, | |
| "grad_norm": 0.15836012363433838, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0291, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.255875938938696, | |
| "grad_norm": 0.21492698788642883, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0259, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 2.2582990065422823, | |
| "grad_norm": 0.15109144151210785, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0268, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 2.2607220741458685, | |
| "grad_norm": 0.170174703001976, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0311, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 2.2631451417494546, | |
| "grad_norm": 0.14161725342273712, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.033, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 2.2655682093530407, | |
| "grad_norm": 0.1117384135723114, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0252, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.2679912769566273, | |
| "grad_norm": 0.1494186967611313, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0292, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.2704143445602134, | |
| "grad_norm": 0.12846499681472778, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0376, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 2.2728374121637995, | |
| "grad_norm": 0.15559671819210052, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0292, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 2.2752604797673857, | |
| "grad_norm": 0.14043958485126495, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0284, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 2.277683547370972, | |
| "grad_norm": 0.15847916901111603, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0273, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.280106614974558, | |
| "grad_norm": 0.1476232260465622, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0267, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 2.282529682578144, | |
| "grad_norm": 0.12031502276659012, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0303, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 2.28495275018173, | |
| "grad_norm": 0.13865140080451965, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0272, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 2.2873758177853163, | |
| "grad_norm": 0.1709819883108139, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0356, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 2.2897988853889024, | |
| "grad_norm": 0.22925207018852234, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0293, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 2.2922219529924885, | |
| "grad_norm": 0.13262705504894257, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0262, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 2.2946450205960747, | |
| "grad_norm": 0.12649130821228027, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0296, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 2.297068088199661, | |
| "grad_norm": 0.12093418836593628, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0291, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 2.299491155803247, | |
| "grad_norm": 0.13731685280799866, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0312, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 2.301914223406833, | |
| "grad_norm": 0.15991389751434326, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0268, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.304337291010419, | |
| "grad_norm": 0.1384933590888977, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0336, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 2.3067603586140053, | |
| "grad_norm": 0.14663700759410858, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0319, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 2.3091834262175914, | |
| "grad_norm": 0.139607235789299, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0293, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 2.3116064938211776, | |
| "grad_norm": 0.13558480143547058, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0335, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 2.3140295614247637, | |
| "grad_norm": 0.16161589324474335, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.031, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 2.31645262902835, | |
| "grad_norm": 0.1968097984790802, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0335, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 2.318875696631936, | |
| "grad_norm": 0.1712426096200943, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0303, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 2.321298764235522, | |
| "grad_norm": 0.16822372376918793, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0336, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 2.323721831839108, | |
| "grad_norm": 0.08741915971040726, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0226, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 2.3261448994426943, | |
| "grad_norm": 0.17877912521362305, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0276, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.3285679670462804, | |
| "grad_norm": 0.11136100441217422, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0324, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 2.3309910346498666, | |
| "grad_norm": 0.14391864836215973, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0349, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 2.3334141022534527, | |
| "grad_norm": 0.09565794467926025, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0317, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 2.335837169857039, | |
| "grad_norm": 0.18041923642158508, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0244, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 2.3382602374606254, | |
| "grad_norm": 0.11038856208324432, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0294, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 2.3406833050642115, | |
| "grad_norm": 0.15668071806430817, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0286, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 2.3431063726677976, | |
| "grad_norm": 0.17571397125720978, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0298, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 2.3455294402713838, | |
| "grad_norm": 0.13867910206317902, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0304, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 2.34795250787497, | |
| "grad_norm": 0.12321203202009201, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0242, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 2.350375575478556, | |
| "grad_norm": 0.16788356006145477, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0263, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 2.352798643082142, | |
| "grad_norm": 0.1099233627319336, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0294, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 2.3552217106857283, | |
| "grad_norm": 0.17972053587436676, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0311, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 2.3576447782893144, | |
| "grad_norm": 0.20411260426044464, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0288, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 2.3600678458929005, | |
| "grad_norm": 0.16144753992557526, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0316, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 2.3624909134964867, | |
| "grad_norm": 0.12185072153806686, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0279, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 2.364913981100073, | |
| "grad_norm": 0.14463242888450623, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0255, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 2.367337048703659, | |
| "grad_norm": 0.22074349224567413, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0277, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 2.369760116307245, | |
| "grad_norm": 0.15400734543800354, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0261, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 2.372183183910831, | |
| "grad_norm": 0.10798148065805435, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0308, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 2.3746062515144173, | |
| "grad_norm": 0.1395770162343979, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0238, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 2.3770293191180034, | |
| "grad_norm": 0.11044436693191528, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0274, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 2.3794523867215895, | |
| "grad_norm": 0.12932665646076202, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0252, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 2.3818754543251757, | |
| "grad_norm": 0.15350565314292908, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0319, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 2.384298521928762, | |
| "grad_norm": 0.13151058554649353, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0306, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 2.386721589532348, | |
| "grad_norm": 0.10742009431123734, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0318, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 2.389144657135934, | |
| "grad_norm": 0.22974562644958496, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0338, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 2.39156772473952, | |
| "grad_norm": 0.13090413808822632, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0264, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 2.3939907923431063, | |
| "grad_norm": 0.14210020005702972, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0307, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 2.3964138599466924, | |
| "grad_norm": 0.15216712653636932, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0337, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 2.3988369275502786, | |
| "grad_norm": 0.13430491089820862, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0341, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 2.4012599951538647, | |
| "grad_norm": 0.09523482620716095, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0274, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 2.403683062757451, | |
| "grad_norm": 0.1761728823184967, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0352, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 2.406106130361037, | |
| "grad_norm": 0.15155237913131714, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0232, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 2.408529197964623, | |
| "grad_norm": 0.1542092114686966, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0333, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 2.410952265568209, | |
| "grad_norm": 0.14661715924739838, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0312, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 2.4133753331717953, | |
| "grad_norm": 0.11580303311347961, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0327, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 2.4157984007753814, | |
| "grad_norm": 0.1317577362060547, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0281, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 2.4182214683789676, | |
| "grad_norm": 0.17735572159290314, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0283, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 2.4206445359825537, | |
| "grad_norm": 0.17191249132156372, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0338, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 2.42306760358614, | |
| "grad_norm": 0.18826410174369812, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.03, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.42306760358614, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.06684367745965719, | |
| "train_runtime": 4605.2335, | |
| "train_samples_per_second": 69.486, | |
| "train_steps_per_second": 2.171 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |