| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 8.0, | |
| "eval_steps": 500, | |
| "global_step": 4576, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.017497812773403325, | |
| "grad_norm": 15.0, | |
| "learning_rate": 1.0465116279069768e-06, | |
| "loss": 1.1604, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03499562554680665, | |
| "grad_norm": 9.6875, | |
| "learning_rate": 2.2093023255813954e-06, | |
| "loss": 1.1391, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05249343832020997, | |
| "grad_norm": 4.09375, | |
| "learning_rate": 3.372093023255814e-06, | |
| "loss": 1.0272, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0699912510936133, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 4.5348837209302326e-06, | |
| "loss": 0.8968, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08748906386701662, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 5.697674418604652e-06, | |
| "loss": 0.8108, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.10498687664041995, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 6.86046511627907e-06, | |
| "loss": 0.8071, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12248468941382328, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 8.023255813953488e-06, | |
| "loss": 0.759, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1399825021872266, | |
| "grad_norm": 1.25, | |
| "learning_rate": 9.186046511627908e-06, | |
| "loss": 0.767, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15748031496062992, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.0348837209302327e-05, | |
| "loss": 0.7569, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17497812773403323, | |
| "grad_norm": 1.1171875, | |
| "learning_rate": 1.1511627906976746e-05, | |
| "loss": 0.7603, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.19247594050743658, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 1.2674418604651164e-05, | |
| "loss": 0.7424, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2099737532808399, | |
| "grad_norm": 1.3125, | |
| "learning_rate": 1.3837209302325583e-05, | |
| "loss": 0.7358, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2274715660542432, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.7119, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.24496937882764655, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.616279069767442e-05, | |
| "loss": 0.7219, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.26246719160104987, | |
| "grad_norm": 1.0859375, | |
| "learning_rate": 1.7325581395348837e-05, | |
| "loss": 0.7073, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2799650043744532, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.8488372093023256e-05, | |
| "loss": 0.7135, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2974628171478565, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 1.9651162790697676e-05, | |
| "loss": 0.72, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.31496062992125984, | |
| "grad_norm": 0.96484375, | |
| "learning_rate": 1.9999929041918377e-05, | |
| "loss": 0.7111, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3324584426946632, | |
| "grad_norm": 1.0859375, | |
| "learning_rate": 1.999958149482438e-05, | |
| "loss": 0.7308, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.34995625546806647, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.9998944336771236e-05, | |
| "loss": 0.711, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3674540682414698, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 1.9998017588263007e-05, | |
| "loss": 0.7096, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.38495188101487315, | |
| "grad_norm": 1.15625, | |
| "learning_rate": 1.9996801279122917e-05, | |
| "loss": 0.7193, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.40244969378827644, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 1.9995295448492383e-05, | |
| "loss": 0.7187, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4199475065616798, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 1.9993500144829784e-05, | |
| "loss": 0.7197, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.4374453193350831, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 1.9991415425908868e-05, | |
| "loss": 0.7117, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.4549431321084864, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.9989041358816926e-05, | |
| "loss": 0.6958, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.47244094488188976, | |
| "grad_norm": 1.0625, | |
| "learning_rate": 1.9986378019952595e-05, | |
| "loss": 0.7127, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4899387576552931, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 1.998342549502343e-05, | |
| "loss": 0.7068, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5074365704286964, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 1.998018387904314e-05, | |
| "loss": 0.6994, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5249343832020997, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.997665327632852e-05, | |
| "loss": 0.713, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5424321959755031, | |
| "grad_norm": 1.2109375, | |
| "learning_rate": 1.9972833800496107e-05, | |
| "loss": 0.7119, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.5599300087489064, | |
| "grad_norm": 1.0703125, | |
| "learning_rate": 1.99687255744585e-05, | |
| "loss": 0.6993, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5774278215223098, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 1.9964328730420445e-05, | |
| "loss": 0.706, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.594925634295713, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 1.995964340987454e-05, | |
| "loss": 0.7167, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6124234470691163, | |
| "grad_norm": 1.109375, | |
| "learning_rate": 1.9954669763596708e-05, | |
| "loss": 0.7206, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6299212598425197, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 1.9949407951641325e-05, | |
| "loss": 0.6875, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.647419072615923, | |
| "grad_norm": 1.0625, | |
| "learning_rate": 1.99438581433361e-05, | |
| "loss": 0.7256, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.6649168853893264, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 1.9938020517276583e-05, | |
| "loss": 0.689, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.6824146981627297, | |
| "grad_norm": 0.98828125, | |
| "learning_rate": 1.9931895261320463e-05, | |
| "loss": 0.7029, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6999125109361329, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 1.9925482572581477e-05, | |
| "loss": 0.7189, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7174103237095363, | |
| "grad_norm": 1.015625, | |
| "learning_rate": 1.991878265742311e-05, | |
| "loss": 0.7103, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7349081364829396, | |
| "grad_norm": 0.98046875, | |
| "learning_rate": 1.9911795731451928e-05, | |
| "loss": 0.7045, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.752405949256343, | |
| "grad_norm": 1.1171875, | |
| "learning_rate": 1.9904522019510647e-05, | |
| "loss": 0.7112, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.7699037620297463, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 1.989696175567089e-05, | |
| "loss": 0.711, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.7874015748031497, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 1.988911518322566e-05, | |
| "loss": 0.7174, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8048993875765529, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.9880982554681534e-05, | |
| "loss": 0.7023, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.8223972003499562, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.9872564131750488e-05, | |
| "loss": 0.6956, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8398950131233596, | |
| "grad_norm": 1.0859375, | |
| "learning_rate": 1.9863860185341514e-05, | |
| "loss": 0.7003, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.8573928258967629, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 1.985487099555189e-05, | |
| "loss": 0.6849, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.8748906386701663, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 1.9845596851658168e-05, | |
| "loss": 0.7017, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.8923884514435696, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 1.9836038052106853e-05, | |
| "loss": 0.6809, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.9098862642169728, | |
| "grad_norm": 0.96484375, | |
| "learning_rate": 1.9826194904504824e-05, | |
| "loss": 0.6961, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.9273840769903762, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 1.9816067725609403e-05, | |
| "loss": 0.698, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.9448818897637795, | |
| "grad_norm": 1.125, | |
| "learning_rate": 1.9805656841318198e-05, | |
| "loss": 0.7022, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.9623797025371829, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 1.9794962586658582e-05, | |
| "loss": 0.6896, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.9798775153105862, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 1.978398530577693e-05, | |
| "loss": 0.6915, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.9973753280839895, | |
| "grad_norm": 0.99609375, | |
| "learning_rate": 1.977272535192754e-05, | |
| "loss": 0.6996, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.0139982502187226, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 1.9761183087461264e-05, | |
| "loss": 0.6237, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.031496062992126, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.974935888381385e-05, | |
| "loss": 0.5954, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.0489938757655293, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 1.9737253121493994e-05, | |
| "loss": 0.6124, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.0664916885389326, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 1.9724866190071075e-05, | |
| "loss": 0.6023, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.083989501312336, | |
| "grad_norm": 1.1875, | |
| "learning_rate": 1.971219848816264e-05, | |
| "loss": 0.6128, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.1014873140857393, | |
| "grad_norm": 1.2109375, | |
| "learning_rate": 1.9699250423421568e-05, | |
| "loss": 0.6167, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.1189851268591426, | |
| "grad_norm": 1.203125, | |
| "learning_rate": 1.9686022412522953e-05, | |
| "loss": 0.6322, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.136482939632546, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 1.967251488115069e-05, | |
| "loss": 0.6, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.1539807524059493, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 1.9658728263983783e-05, | |
| "loss": 0.6204, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.1714785651793527, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.964466300468235e-05, | |
| "loss": 0.627, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.188976377952756, | |
| "grad_norm": 1.0, | |
| "learning_rate": 1.9630319555873364e-05, | |
| "loss": 0.5988, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.2064741907261591, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 1.961569837913605e-05, | |
| "loss": 0.6141, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.2239720034995625, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 1.9600799944987077e-05, | |
| "loss": 0.6171, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.2414698162729658, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.958562473286538e-05, | |
| "loss": 0.613, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.2589676290463692, | |
| "grad_norm": 1.109375, | |
| "learning_rate": 1.957017323111675e-05, | |
| "loss": 0.6169, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.2764654418197725, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 1.955444593697811e-05, | |
| "loss": 0.6312, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.2939632545931758, | |
| "grad_norm": 1.2109375, | |
| "learning_rate": 1.9538443356561528e-05, | |
| "loss": 0.6144, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.3114610673665792, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.9522166004837908e-05, | |
| "loss": 0.6096, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.3289588801399825, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.9505614405620436e-05, | |
| "loss": 0.6225, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.3464566929133859, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.9488789091547716e-05, | |
| "loss": 0.6156, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.3639545056867892, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.9471690604066633e-05, | |
| "loss": 0.644, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.3814523184601923, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.945431949341492e-05, | |
| "loss": 0.6221, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.3989501312335957, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 1.9436676318603465e-05, | |
| "loss": 0.6164, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.416447944006999, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 1.941876164739831e-05, | |
| "loss": 0.6187, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.4339457567804024, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.940057605630239e-05, | |
| "loss": 0.6041, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.4514435695538057, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 1.938212013053697e-05, | |
| "loss": 0.6172, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.468941382327209, | |
| "grad_norm": 1.125, | |
| "learning_rate": 1.9363394464022814e-05, | |
| "loss": 0.6107, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.4864391951006124, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 1.934439965936109e-05, | |
| "loss": 0.6109, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.5039370078740157, | |
| "grad_norm": 1.15625, | |
| "learning_rate": 1.9325136327813945e-05, | |
| "loss": 0.606, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.521434820647419, | |
| "grad_norm": 1.1171875, | |
| "learning_rate": 1.9305605089284873e-05, | |
| "loss": 0.6264, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.5389326334208224, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.9285806572298726e-05, | |
| "loss": 0.6185, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.5564304461942258, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.926574141398153e-05, | |
| "loss": 0.6243, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.5739282589676291, | |
| "grad_norm": 1.0703125, | |
| "learning_rate": 1.924541026003994e-05, | |
| "loss": 0.6035, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.5914260717410325, | |
| "grad_norm": 1.203125, | |
| "learning_rate": 1.9224813764740496e-05, | |
| "loss": 0.6297, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.6089238845144358, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.9203952590888547e-05, | |
| "loss": 0.6138, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.6264216972878391, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 1.918282740980693e-05, | |
| "loss": 0.6211, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.6439195100612425, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.9161438901314352e-05, | |
| "loss": 0.6082, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.6614173228346458, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.9139787753703554e-05, | |
| "loss": 0.6023, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.678915135608049, | |
| "grad_norm": 1.0859375, | |
| "learning_rate": 1.91178746637191e-05, | |
| "loss": 0.6152, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.6964129483814523, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 1.9095700336535017e-05, | |
| "loss": 0.6076, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.7139107611548556, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.907326548573205e-05, | |
| "loss": 0.6257, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.731408573928259, | |
| "grad_norm": 1.21875, | |
| "learning_rate": 1.905057083327474e-05, | |
| "loss": 0.616, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.7489063867016623, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 1.9027617109488164e-05, | |
| "loss": 0.6239, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.7664041994750657, | |
| "grad_norm": 1.125, | |
| "learning_rate": 1.9004405053034445e-05, | |
| "loss": 0.5979, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.7839020122484688, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 1.8980935410888973e-05, | |
| "loss": 0.6177, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.8013998250218721, | |
| "grad_norm": 1.0859375, | |
| "learning_rate": 1.895720893831638e-05, | |
| "loss": 0.5923, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.8188976377952755, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.893322639884622e-05, | |
| "loss": 0.6029, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.8363954505686788, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.8908988564248415e-05, | |
| "loss": 0.6273, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.8538932633420822, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.8884496214508398e-05, | |
| "loss": 0.6358, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.8713910761154855, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.8859750137802037e-05, | |
| "loss": 0.6321, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.8834751130470247e-05, | |
| "loss": 0.6118, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.9063867016622922, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.8809499996993385e-05, | |
| "loss": 0.6095, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.9238845144356955, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.8783997549965344e-05, | |
| "loss": 0.622, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.9413823272090989, | |
| "grad_norm": 1.2421875, | |
| "learning_rate": 1.875824461006741e-05, | |
| "loss": 0.636, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.9588801399825022, | |
| "grad_norm": 1.125, | |
| "learning_rate": 1.8732242006041864e-05, | |
| "loss": 0.6089, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.9763779527559056, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.8705990574665288e-05, | |
| "loss": 0.6169, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.993875765529309, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 1.8679491160721654e-05, | |
| "loss": 0.6174, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.010498687664042, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 1.8652744616975142e-05, | |
| "loss": 0.566, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.027996500437445, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 1.8625751804142685e-05, | |
| "loss": 0.5181, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.0454943132108485, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.8598513590866278e-05, | |
| "loss": 0.5257, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.062992125984252, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.8571030853685016e-05, | |
| "loss": 0.5193, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.080489938757655, | |
| "grad_norm": 1.3125, | |
| "learning_rate": 1.8543304477006906e-05, | |
| "loss": 0.5252, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.0979877515310585, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 1.851533535308038e-05, | |
| "loss": 0.5194, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.115485564304462, | |
| "grad_norm": 1.1875, | |
| "learning_rate": 1.848712438196561e-05, | |
| "loss": 0.5126, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.1329833770778652, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.8458672471505522e-05, | |
| "loss": 0.5007, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.1504811898512686, | |
| "grad_norm": 1.2421875, | |
| "learning_rate": 1.8429980537296587e-05, | |
| "loss": 0.5205, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.167979002624672, | |
| "grad_norm": 1.25, | |
| "learning_rate": 1.840104950265936e-05, | |
| "loss": 0.5018, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.1854768153980753, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 1.8371880298608768e-05, | |
| "loss": 0.5216, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.2029746281714786, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 1.834247386382414e-05, | |
| "loss": 0.5109, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.220472440944882, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 1.8312831144619024e-05, | |
| "loss": 0.512, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.2379702537182853, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 1.828295309491069e-05, | |
| "loss": 0.5139, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.2554680664916886, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 1.8252840676189484e-05, | |
| "loss": 0.5277, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.272965879265092, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.8222494857487845e-05, | |
| "loss": 0.5175, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.2904636920384953, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 1.819191661534915e-05, | |
| "loss": 0.5247, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.3079615048118987, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.8161106933796268e-05, | |
| "loss": 0.5173, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.325459317585302, | |
| "grad_norm": 2.0, | |
| "learning_rate": 1.813006680429991e-05, | |
| "loss": 0.5262, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.3429571303587053, | |
| "grad_norm": 1.328125, | |
| "learning_rate": 1.8098797225746703e-05, | |
| "loss": 0.5167, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.3604549431321082, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.8067299204407075e-05, | |
| "loss": 0.5254, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.377952755905512, | |
| "grad_norm": 1.40625, | |
| "learning_rate": 1.803557375390284e-05, | |
| "loss": 0.5234, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.395450568678915, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.8003621895174603e-05, | |
| "loss": 0.5113, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.4129483814523183, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 1.797144465644889e-05, | |
| "loss": 0.5177, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.4304461942257216, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 1.7939043073205076e-05, | |
| "loss": 0.5289, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.447944006999125, | |
| "grad_norm": 1.3671875, | |
| "learning_rate": 1.7906418188142045e-05, | |
| "loss": 0.524, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.4654418197725283, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 1.7873571051144644e-05, | |
| "loss": 0.5193, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.4829396325459316, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 1.7840502719249898e-05, | |
| "loss": 0.5117, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.500437445319335, | |
| "grad_norm": 1.375, | |
| "learning_rate": 1.7807214256612995e-05, | |
| "loss": 0.5436, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.5179352580927383, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 1.777370673447303e-05, | |
| "loss": 0.5229, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.5354330708661417, | |
| "grad_norm": 1.375, | |
| "learning_rate": 1.7739981231118558e-05, | |
| "loss": 0.5243, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.552930883639545, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.770603883185286e-05, | |
| "loss": 0.5244, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.5704286964129484, | |
| "grad_norm": 1.375, | |
| "learning_rate": 1.7671880628959034e-05, | |
| "loss": 0.5284, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.5879265091863517, | |
| "grad_norm": 1.234375, | |
| "learning_rate": 1.763750772166486e-05, | |
| "loss": 0.5243, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.605424321959755, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.7602921216107397e-05, | |
| "loss": 0.5299, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.6229221347331584, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.7568122225297407e-05, | |
| "loss": 0.5129, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.6404199475065617, | |
| "grad_norm": 1.40625, | |
| "learning_rate": 1.753311186908353e-05, | |
| "loss": 0.5222, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.657917760279965, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 1.7497891274116246e-05, | |
| "loss": 0.5314, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.6754155730533684, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 1.7462461573811632e-05, | |
| "loss": 0.5145, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.6929133858267718, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 1.742682390831487e-05, | |
| "loss": 0.5252, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.710411198600175, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 1.739097942446356e-05, | |
| "loss": 0.5354, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.7279090113735784, | |
| "grad_norm": 1.328125, | |
| "learning_rate": 1.7354929275750832e-05, | |
| "loss": 0.5128, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.745406824146982, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.731867462228819e-05, | |
| "loss": 0.5337, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.7629046369203847, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.7282216630768223e-05, | |
| "loss": 0.51, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.7804024496937885, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 1.7245556474427036e-05, | |
| "loss": 0.5161, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.7979002624671914, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 1.7208695333006483e-05, | |
| "loss": 0.5197, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.815398075240595, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.717163439271623e-05, | |
| "loss": 0.5267, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.832895888013998, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 1.7134374846195566e-05, | |
| "loss": 0.5144, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.850393700787402, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.7096917892475024e-05, | |
| "loss": 0.5255, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.8678915135608047, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 1.7059264736937796e-05, | |
| "loss": 0.5197, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.885389326334208, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 1.702141659128095e-05, | |
| "loss": 0.5165, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.9028871391076114, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 1.6983374673476428e-05, | |
| "loss": 0.5202, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.9203849518810148, | |
| "grad_norm": 1.328125, | |
| "learning_rate": 1.694514020773186e-05, | |
| "loss": 0.5227, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.937882764654418, | |
| "grad_norm": 1.328125, | |
| "learning_rate": 1.6906714424451154e-05, | |
| "loss": 0.5223, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.9553805774278215, | |
| "grad_norm": 1.3359375, | |
| "learning_rate": 1.6868098560194926e-05, | |
| "loss": 0.528, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.972878390201225, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.6829293857640675e-05, | |
| "loss": 0.518, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.990376202974628, | |
| "grad_norm": 1.3671875, | |
| "learning_rate": 1.679030156554282e-05, | |
| "loss": 0.5345, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.0069991251093615, | |
| "grad_norm": 1.6015625, | |
| "learning_rate": 1.6751122938692493e-05, | |
| "loss": 0.4821, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.024496937882765, | |
| "grad_norm": 1.6796875, | |
| "learning_rate": 1.6711759237877188e-05, | |
| "loss": 0.4391, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.041994750656168, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.667221172984015e-05, | |
| "loss": 0.4258, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.059492563429571, | |
| "grad_norm": 1.6796875, | |
| "learning_rate": 1.663248168723964e-05, | |
| "loss": 0.4333, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 3.0769903762029744, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 1.6592570388607978e-05, | |
| "loss": 0.4318, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 3.094488188976378, | |
| "grad_norm": 1.703125, | |
| "learning_rate": 1.6552479118310378e-05, | |
| "loss": 0.4146, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 3.111986001749781, | |
| "grad_norm": 1.5078125, | |
| "learning_rate": 1.651220916650364e-05, | |
| "loss": 0.4209, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.1294838145231845, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 1.6471761829094623e-05, | |
| "loss": 0.4331, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.146981627296588, | |
| "grad_norm": 1.578125, | |
| "learning_rate": 1.643113840769854e-05, | |
| "loss": 0.428, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.164479440069991, | |
| "grad_norm": 1.9609375, | |
| "learning_rate": 1.6390340209597074e-05, | |
| "loss": 0.4195, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.1819772528433945, | |
| "grad_norm": 1.7109375, | |
| "learning_rate": 1.6349368547696297e-05, | |
| "loss": 0.4232, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 3.199475065616798, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 1.6308224740484456e-05, | |
| "loss": 0.4258, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 3.216972878390201, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 1.6266910111989498e-05, | |
| "loss": 0.424, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.2344706911636045, | |
| "grad_norm": 1.5703125, | |
| "learning_rate": 1.622542599173649e-05, | |
| "loss": 0.4326, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.251968503937008, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 1.6183773714704824e-05, | |
| "loss": 0.436, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.269466316710411, | |
| "grad_norm": 1.671875, | |
| "learning_rate": 1.6141954621285267e-05, | |
| "loss": 0.4423, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.2869641294838146, | |
| "grad_norm": 1.71875, | |
| "learning_rate": 1.6099970057236808e-05, | |
| "loss": 0.4348, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.304461942257218, | |
| "grad_norm": 1.8125, | |
| "learning_rate": 1.605782137364338e-05, | |
| "loss": 0.4306, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.3219597550306212, | |
| "grad_norm": 1.609375, | |
| "learning_rate": 1.6015509926870343e-05, | |
| "loss": 0.4321, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.3394575678040246, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 1.597303707852087e-05, | |
| "loss": 0.446, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.356955380577428, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 1.5930404195392114e-05, | |
| "loss": 0.4246, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.3744531933508313, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.588761264943122e-05, | |
| "loss": 0.4184, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.3919510061242346, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 1.5844663817691187e-05, | |
| "loss": 0.4267, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.409448818897638, | |
| "grad_norm": 2.0, | |
| "learning_rate": 1.5801559082286546e-05, | |
| "loss": 0.4358, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.4269466316710413, | |
| "grad_norm": 1.671875, | |
| "learning_rate": 1.5758299830348884e-05, | |
| "loss": 0.4438, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.4444444444444446, | |
| "grad_norm": 1.578125, | |
| "learning_rate": 1.5714887453982204e-05, | |
| "loss": 0.4413, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.4619422572178475, | |
| "grad_norm": 1.7109375, | |
| "learning_rate": 1.5671323350218135e-05, | |
| "loss": 0.4425, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.4794400699912513, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 1.562760892097096e-05, | |
| "loss": 0.4326, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.4969378827646542, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 1.5583745572992518e-05, | |
| "loss": 0.4335, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.514435695538058, | |
| "grad_norm": 1.734375, | |
| "learning_rate": 1.553973471782692e-05, | |
| "loss": 0.4405, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 3.531933508311461, | |
| "grad_norm": 1.7265625, | |
| "learning_rate": 1.5495577771765134e-05, | |
| "loss": 0.4259, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 3.5494313210848643, | |
| "grad_norm": 1.5546875, | |
| "learning_rate": 1.5451276155799405e-05, | |
| "loss": 0.4218, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 3.5669291338582676, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.540683129557752e-05, | |
| "loss": 0.4248, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 3.584426946631671, | |
| "grad_norm": 1.71875, | |
| "learning_rate": 1.5362244621356946e-05, | |
| "loss": 0.4276, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.6019247594050743, | |
| "grad_norm": 1.6640625, | |
| "learning_rate": 1.531751756795879e-05, | |
| "loss": 0.4408, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 3.6194225721784776, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 1.5272651574721632e-05, | |
| "loss": 0.4264, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 3.636920384951881, | |
| "grad_norm": 1.7109375, | |
| "learning_rate": 1.5227648085455202e-05, | |
| "loss": 0.4295, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.6544181977252843, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.5182508548393917e-05, | |
| "loss": 0.427, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.6719160104986877, | |
| "grad_norm": 1.6953125, | |
| "learning_rate": 1.5137234416150288e-05, | |
| "loss": 0.4398, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.689413823272091, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 1.5091827145668156e-05, | |
| "loss": 0.4411, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 3.7069116360454943, | |
| "grad_norm": 1.734375, | |
| "learning_rate": 1.504628819817582e-05, | |
| "loss": 0.4181, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 3.7244094488188977, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 1.5000619039139011e-05, | |
| "loss": 0.4384, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 3.741907261592301, | |
| "grad_norm": 1.7109375, | |
| "learning_rate": 1.495482113821373e-05, | |
| "loss": 0.4293, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 3.7594050743657044, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.4908895969198951e-05, | |
| "loss": 0.4298, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.7769028871391077, | |
| "grad_norm": 1.625, | |
| "learning_rate": 1.4862845009989211e-05, | |
| "loss": 0.4403, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 3.794400699912511, | |
| "grad_norm": 1.6640625, | |
| "learning_rate": 1.4816669742527018e-05, | |
| "loss": 0.4373, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 3.8118985126859144, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 1.4770371652755203e-05, | |
| "loss": 0.4268, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 3.8293963254593177, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 1.4723952230569057e-05, | |
| "loss": 0.4274, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 3.846894138232721, | |
| "grad_norm": 1.6953125, | |
| "learning_rate": 1.4677412969768427e-05, | |
| "loss": 0.4313, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.864391951006124, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 1.4630755368009611e-05, | |
| "loss": 0.4465, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 3.8818897637795278, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 1.4583980926757184e-05, | |
| "loss": 0.4454, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 3.8993875765529307, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 1.4537091151235673e-05, | |
| "loss": 0.4412, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 3.9168853893263345, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 1.449008755038112e-05, | |
| "loss": 0.4289, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 3.9343832020997374, | |
| "grad_norm": 1.8515625, | |
| "learning_rate": 1.4442971636792518e-05, | |
| "loss": 0.4311, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.9518810148731407, | |
| "grad_norm": 1.6640625, | |
| "learning_rate": 1.4395744926683142e-05, | |
| "loss": 0.4263, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 3.969378827646544, | |
| "grad_norm": 1.875, | |
| "learning_rate": 1.4348408939831758e-05, | |
| "loss": 0.4435, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.9868766404199474, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.4300965199533696e-05, | |
| "loss": 0.433, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 4.003499562554681, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 1.4253415232551861e-05, | |
| "loss": 0.4271, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 4.020997375328084, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 1.4205760569067577e-05, | |
| "loss": 0.3527, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 4.038495188101487, | |
| "grad_norm": 1.7265625, | |
| "learning_rate": 1.4158002742631359e-05, | |
| "loss": 0.3734, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 4.05599300087489, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 1.4110143290113546e-05, | |
| "loss": 0.3634, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 4.073490813648294, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 1.4062183751654868e-05, | |
| "loss": 0.3462, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 4.090988626421697, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 1.4014125670616856e-05, | |
| "loss": 0.3487, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 4.108486439195101, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 1.3965970593532201e-05, | |
| "loss": 0.3794, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 4.125984251968504, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.3917720070054965e-05, | |
| "loss": 0.3634, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 4.1434820647419075, | |
| "grad_norm": 1.9609375, | |
| "learning_rate": 1.386937565291073e-05, | |
| "loss": 0.3601, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 4.16097987751531, | |
| "grad_norm": 1.9453125, | |
| "learning_rate": 1.382093889784662e-05, | |
| "loss": 0.3654, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 4.178477690288714, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 1.3772411363581238e-05, | |
| "loss": 0.3492, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 4.195975503062117, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 1.3723794611754502e-05, | |
| "loss": 0.3683, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 4.213473315835521, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1.3675090206877399e-05, | |
| "loss": 0.3634, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 4.230971128608924, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 1.3626299716281639e-05, | |
| "loss": 0.3623, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 4.248468941382328, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.3577424710069202e-05, | |
| "loss": 0.3596, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 4.2659667541557305, | |
| "grad_norm": 2.25, | |
| "learning_rate": 1.3528466761061839e-05, | |
| "loss": 0.3623, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 4.283464566929134, | |
| "grad_norm": 1.9453125, | |
| "learning_rate": 1.3479427444750415e-05, | |
| "loss": 0.3535, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 4.300962379702537, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 1.343030833924426e-05, | |
| "loss": 0.3592, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 4.318460192475941, | |
| "grad_norm": 1.9765625, | |
| "learning_rate": 1.338111102522035e-05, | |
| "loss": 0.3644, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 4.335958005249344, | |
| "grad_norm": 2.25, | |
| "learning_rate": 1.3331837085872444e-05, | |
| "loss": 0.3565, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 4.353455818022747, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 1.3282488106860146e-05, | |
| "loss": 0.3613, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 4.3709536307961505, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 1.323306567625788e-05, | |
| "loss": 0.3574, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.388451443569553, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 1.3183571384503767e-05, | |
| "loss": 0.3507, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 4.405949256342957, | |
| "grad_norm": 1.8671875, | |
| "learning_rate": 1.3134006824348464e-05, | |
| "loss": 0.3583, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 4.42344706911636, | |
| "grad_norm": 2.0, | |
| "learning_rate": 1.3084373590803898e-05, | |
| "loss": 0.3567, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 4.440944881889764, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 1.303467328109193e-05, | |
| "loss": 0.3538, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 4.458442694663167, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 1.2984907494592983e-05, | |
| "loss": 0.3623, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 4.475940507436571, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.2935077832794533e-05, | |
| "loss": 0.3649, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 4.4934383202099735, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 1.2885185899239617e-05, | |
| "loss": 0.3551, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 4.510936132983377, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.2835233299475192e-05, | |
| "loss": 0.354, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 4.52843394575678, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.2785221641000487e-05, | |
| "loss": 0.3652, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 4.545931758530184, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.2735152533215275e-05, | |
| "loss": 0.3597, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 4.563429571303587, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.2685027587368072e-05, | |
| "loss": 0.3567, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 4.580927384076991, | |
| "grad_norm": 1.75, | |
| "learning_rate": 1.2634848416504289e-05, | |
| "loss": 0.3577, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 4.5984251968503935, | |
| "grad_norm": 1.9453125, | |
| "learning_rate": 1.2584616635414325e-05, | |
| "loss": 0.3576, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 4.615923009623797, | |
| "grad_norm": 1.9609375, | |
| "learning_rate": 1.2534333860581607e-05, | |
| "loss": 0.3609, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 4.6334208223972, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.248400171013056e-05, | |
| "loss": 0.3525, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 4.650918635170604, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.2433621803774542e-05, | |
| "loss": 0.3567, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 4.668416447944007, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 1.2383195762763718e-05, | |
| "loss": 0.3623, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 4.685914260717411, | |
| "grad_norm": 1.8046875, | |
| "learning_rate": 1.2332725209832892e-05, | |
| "loss": 0.3643, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 4.703412073490814, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 1.2282211769149283e-05, | |
| "loss": 0.3642, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 4.7209098862642165, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 1.2231657066260261e-05, | |
| "loss": 0.348, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 4.73840769903762, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 1.2181062728041029e-05, | |
| "loss": 0.3545, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 4.755905511811024, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 1.2130430382642276e-05, | |
| "loss": 0.3624, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 4.773403324584427, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 1.2079761659437781e-05, | |
| "loss": 0.3719, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 4.79090113735783, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.202905818897198e-05, | |
| "loss": 0.362, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 4.808398950131234, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 1.1978321602907497e-05, | |
| "loss": 0.3691, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 4.8258967629046365, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.192755353397262e-05, | |
| "loss": 0.3611, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 4.84339457567804, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 1.187675561590878e-05, | |
| "loss": 0.356, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 4.860892388451443, | |
| "grad_norm": 2.0, | |
| "learning_rate": 1.1825929483417975e-05, | |
| "loss": 0.349, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 4.878390201224847, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.1775076772110136e-05, | |
| "loss": 0.3643, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 4.89588801399825, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.1724199118450537e-05, | |
| "loss": 0.358, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.913385826771654, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 1.1673298159707086e-05, | |
| "loss": 0.358, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 4.930883639545057, | |
| "grad_norm": 1.875, | |
| "learning_rate": 1.1622375533897683e-05, | |
| "loss": 0.3705, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 4.94838145231846, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 1.157143287973747e-05, | |
| "loss": 0.3638, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 4.965879265091863, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 1.1520471836586115e-05, | |
| "loss": 0.3417, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 4.983377077865267, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 1.1469494044395055e-05, | |
| "loss": 0.3732, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 1.1418501143654713e-05, | |
| "loss": 0.376, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 5.017497812773403, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.1367494775341716e-05, | |
| "loss": 0.3109, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 5.034995625546807, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.1316476580866091e-05, | |
| "loss": 0.3136, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 5.05249343832021, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 1.1265448202018432e-05, | |
| "loss": 0.3137, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 5.069991251093613, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.121441128091706e-05, | |
| "loss": 0.3082, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 5.087489063867016, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.1163367459955208e-05, | |
| "loss": 0.3102, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 5.10498687664042, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 1.1112318381748135e-05, | |
| "loss": 0.3092, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 5.122484689413823, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.1061265689080296e-05, | |
| "loss": 0.3054, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 5.139982502187227, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.1010211024852445e-05, | |
| "loss": 0.3165, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 5.15748031496063, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 1.0959156032028789e-05, | |
| "loss": 0.3081, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 5.174978127734033, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 1.0908102353584117e-05, | |
| "loss": 0.3047, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 5.192475940507436, | |
| "grad_norm": 2.0, | |
| "learning_rate": 1.0857051632450905e-05, | |
| "loss": 0.3016, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 5.20997375328084, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 1.0806005511466478e-05, | |
| "loss": 0.306, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 5.227471566054243, | |
| "grad_norm": 2.5, | |
| "learning_rate": 1.075496563332011e-05, | |
| "loss": 0.3036, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 5.244969378827647, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 1.0703933640500183e-05, | |
| "loss": 0.3041, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 5.26246719160105, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 1.0652911175241336e-05, | |
| "loss": 0.3183, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 5.2799650043744535, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 1.0601899879471593e-05, | |
| "loss": 0.2964, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 5.297462817147856, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 1.0550901394759543e-05, | |
| "loss": 0.3006, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 5.31496062992126, | |
| "grad_norm": 2.25, | |
| "learning_rate": 1.0499917362261505e-05, | |
| "loss": 0.3146, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 5.332458442694663, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 1.0448949422668729e-05, | |
| "loss": 0.3153, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 5.349956255468067, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 1.039799921615458e-05, | |
| "loss": 0.3166, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 5.36745406824147, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 1.0347068382321758e-05, | |
| "loss": 0.3069, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 5.3849518810148735, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 1.0296158560149554e-05, | |
| "loss": 0.3031, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 5.402449693788276, | |
| "grad_norm": 2.75, | |
| "learning_rate": 1.0245271387941085e-05, | |
| "loss": 0.308, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 5.41994750656168, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 1.0194408503270582e-05, | |
| "loss": 0.3081, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 5.437445319335083, | |
| "grad_norm": 2.25, | |
| "learning_rate": 1.0143571542930695e-05, | |
| "loss": 0.3052, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 5.454943132108486, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.0092762142879803e-05, | |
| "loss": 0.3156, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 5.47244094488189, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.0041981938189401e-05, | |
| "loss": 0.3076, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 5.489938757655293, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 9.99123256299145e-06, | |
| "loss": 0.3164, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 5.5074365704286965, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 9.94051565042581e-06, | |
| "loss": 0.3215, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 5.524934383202099, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 9.889832832587667e-06, | |
| "loss": 0.3076, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 5.542432195975503, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 9.83918574047503e-06, | |
| "loss": 0.288, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 5.559930008748906, | |
| "grad_norm": 2.25, | |
| "learning_rate": 9.788576003936239e-06, | |
| "loss": 0.3116, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 5.57742782152231, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 9.738005251617508e-06, | |
| "loss": 0.3083, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 5.594925634295713, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 9.68747511091052e-06, | |
| "loss": 0.3078, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 5.6124234470691166, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 9.636987207900056e-06, | |
| "loss": 0.3131, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 5.6299212598425195, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 9.58654316731167e-06, | |
| "loss": 0.3147, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 5.647419072615923, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 9.536144612459399e-06, | |
| "loss": 0.3131, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 5.664916885389326, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 9.485793165193531e-06, | |
| "loss": 0.3147, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 5.68241469816273, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 9.435490445848403e-06, | |
| "loss": 0.3124, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 5.699912510936133, | |
| "grad_norm": 2.125, | |
| "learning_rate": 9.385238073190264e-06, | |
| "loss": 0.3091, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 5.717410323709537, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 9.335037664365185e-06, | |
| "loss": 0.2988, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 5.7349081364829395, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 9.284890834847022e-06, | |
| "loss": 0.306, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 5.752405949256343, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 9.234799198385402e-06, | |
| "loss": 0.3214, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 5.769903762029746, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 9.184764366953831e-06, | |
| "loss": 0.3178, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 5.78740157480315, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 9.134787950697798e-06, | |
| "loss": 0.3061, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 5.804899387576553, | |
| "grad_norm": 2.25, | |
| "learning_rate": 9.084871557882957e-06, | |
| "loss": 0.3096, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 5.822397200349956, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 9.035016794843383e-06, | |
| "loss": 0.3131, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 5.83989501312336, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 8.985225265929872e-06, | |
| "loss": 0.307, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 5.857392825896763, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 8.93549857345831e-06, | |
| "loss": 0.3107, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 5.874890638670166, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 8.885838317658126e-06, | |
| "loss": 0.3041, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 5.892388451443569, | |
| "grad_norm": 1.9765625, | |
| "learning_rate": 8.83624609662078e-06, | |
| "loss": 0.3123, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 5.909886264216973, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 8.786723506248334e-06, | |
| "loss": 0.316, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 5.927384076990376, | |
| "grad_norm": 2.625, | |
| "learning_rate": 8.737272140202105e-06, | |
| "loss": 0.3015, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 5.94488188976378, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 8.687893589851375e-06, | |
| "loss": 0.3216, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 5.9623797025371825, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 8.638589444222191e-06, | |
| "loss": 0.3143, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 5.979877515310586, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 8.5893612899462e-06, | |
| "loss": 0.3089, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 5.997375328083989, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 8.540210711209626e-06, | |
| "loss": 0.3137, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 6.013998250218723, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 8.491139289702262e-06, | |
| "loss": 0.2933, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 6.031496062992126, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 8.442148604566598e-06, | |
| "loss": 0.2787, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 6.04899387576553, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 8.393240232346976e-06, | |
| "loss": 0.2763, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 6.066491688538933, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 8.34441574693887e-06, | |
| "loss": 0.2706, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 6.083989501312336, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 8.295676719538231e-06, | |
| "loss": 0.2732, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 6.101487314085739, | |
| "grad_norm": 2.25, | |
| "learning_rate": 8.247024718590941e-06, | |
| "loss": 0.2772, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 6.118985126859142, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 8.19846130974232e-06, | |
| "loss": 0.2791, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 6.136482939632546, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 8.14998805578675e-06, | |
| "loss": 0.2808, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 6.153980752405949, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 8.101606516617384e-06, | |
| "loss": 0.2908, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 6.171478565179353, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 8.053318249175955e-06, | |
| "loss": 0.2809, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 6.188976377952756, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 8.005124807402657e-06, | |
| "loss": 0.2708, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 6.206474190726159, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.957027742186153e-06, | |
| "loss": 0.2789, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 6.223972003499562, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 7.909028601313658e-06, | |
| "loss": 0.2752, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 6.241469816272966, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 7.861128929421133e-06, | |
| "loss": 0.2778, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 6.258967629046369, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 7.813330267943586e-06, | |
| "loss": 0.2779, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 6.276465441819773, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 7.765634155065451e-06, | |
| "loss": 0.2696, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 6.293963254593176, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 7.718042125671102e-06, | |
| "loss": 0.2813, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 6.311461067366579, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 7.670555711295446e-06, | |
| "loss": 0.2847, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 6.328958880139982, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.623176440074667e-06, | |
| "loss": 0.2761, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 6.346456692913386, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.575905836697008e-06, | |
| "loss": 0.2801, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 6.363954505686789, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 7.528745422353745e-06, | |
| "loss": 0.2848, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 6.381452318460193, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.481696714690203e-06, | |
| "loss": 0.2834, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 6.398950131233596, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 7.4347612277569345e-06, | |
| "loss": 0.2743, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 6.4164479440069995, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 7.387940471961001e-06, | |
| "loss": 0.2794, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 6.433945756780402, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 7.341235954017351e-06, | |
| "loss": 0.277, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 6.451443569553806, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 7.294649176900344e-06, | |
| "loss": 0.2817, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 6.468941382327209, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.248181639795384e-06, | |
| "loss": 0.2816, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 6.486439195100612, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.201834838050668e-06, | |
| "loss": 0.2765, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 6.503937007874016, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.155610263129082e-06, | |
| "loss": 0.2778, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 6.5214348206474195, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.109509402560171e-06, | |
| "loss": 0.27, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 6.538932633420822, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.063533739892312e-06, | |
| "loss": 0.2713, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 6.556430446194225, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.017684754644938e-06, | |
| "loss": 0.2801, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 6.573928258967629, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 6.971963922260954e-06, | |
| "loss": 0.2893, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 6.591426071741032, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 6.926372714059227e-06, | |
| "loss": 0.2785, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 6.608923884514436, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.880912597187266e-06, | |
| "loss": 0.2762, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 6.626421697287839, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.835585034573993e-06, | |
| "loss": 0.2791, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 6.6439195100612425, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 6.790391484882665e-06, | |
| "loss": 0.2795, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 6.661417322834645, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 6.745333402463944e-06, | |
| "loss": 0.2845, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 6.678915135608049, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.700412237309084e-06, | |
| "loss": 0.2829, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 6.696412948381452, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.655629435003274e-06, | |
| "loss": 0.2814, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 6.713910761154856, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 6.6109864366791225e-06, | |
| "loss": 0.2703, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 6.731408573928259, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 6.56648467897027e-06, | |
| "loss": 0.2856, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 6.7489063867016625, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 6.522125593965171e-06, | |
| "loss": 0.2766, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 6.766404199475065, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 6.477910609161004e-06, | |
| "loss": 0.2792, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 6.783902012248469, | |
| "grad_norm": 2.5, | |
| "learning_rate": 6.433841147417717e-06, | |
| "loss": 0.28, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 6.801399825021872, | |
| "grad_norm": 2.125, | |
| "learning_rate": 6.389918626912277e-06, | |
| "loss": 0.2773, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 6.818897637795276, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 6.346144461092991e-06, | |
| "loss": 0.2761, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 6.836395450568679, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 6.302520058634057e-06, | |
| "loss": 0.2765, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 6.853893263342083, | |
| "grad_norm": 2.125, | |
| "learning_rate": 6.259046823390184e-06, | |
| "loss": 0.2746, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 6.8713910761154855, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 6.2157261543514825e-06, | |
| "loss": 0.2832, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 6.888888888888889, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 6.172559445598385e-06, | |
| "loss": 0.2831, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 6.906386701662292, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 6.129548086256816e-06, | |
| "loss": 0.2821, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 6.923884514435695, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.086693460453466e-06, | |
| "loss": 0.2905, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 6.941382327209099, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 6.0439969472712734e-06, | |
| "loss": 0.2772, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 6.958880139982503, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.001459920705046e-06, | |
| "loss": 0.2814, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 6.9763779527559056, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 5.95908374961721e-06, | |
| "loss": 0.2674, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 6.9938757655293085, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 5.916869797693794e-06, | |
| "loss": 0.2746, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 7.010498687664042, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 5.874819423400538e-06, | |
| "loss": 0.2766, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 7.027996500437445, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 5.832933979939177e-06, | |
| "loss": 0.257, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 7.045494313210849, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 5.791214815203874e-06, | |
| "loss": 0.2575, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 7.062992125984252, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 5.749663271737873e-06, | |
| "loss": 0.2589, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 7.080489938757656, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 5.708280686690284e-06, | |
| "loss": 0.2585, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 7.0979877515310585, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 5.667068391773045e-06, | |
| "loss": 0.2696, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 7.115485564304462, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 5.62602771321808e-06, | |
| "loss": 0.263, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 7.132983377077865, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 5.5851599717346106e-06, | |
| "loss": 0.2598, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 7.150481189851269, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 5.5444664824666594e-06, | |
| "loss": 0.2683, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 7.167979002624672, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 5.503948554950727e-06, | |
| "loss": 0.2669, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 7.185476815398075, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 5.4636074930736525e-06, | |
| "loss": 0.2546, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 7.202974628171479, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 5.423444595030648e-06, | |
| "loss": 0.262, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 7.2204724409448815, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 5.383461153283529e-06, | |
| "loss": 0.2772, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 7.237970253718285, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 5.343658454519113e-06, | |
| "loss": 0.2584, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 7.255468066491688, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 5.3040377796078295e-06, | |
| "loss": 0.2626, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 7.272965879265092, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 5.264600403562482e-06, | |
| "loss": 0.2643, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 7.290463692038495, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 5.225347595497234e-06, | |
| "loss": 0.2598, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 7.307961504811899, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 5.186280618586752e-06, | |
| "loss": 0.2584, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 7.3254593175853016, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 5.147400730025567e-06, | |
| "loss": 0.2551, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 7.342957130358705, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 5.108709180987623e-06, | |
| "loss": 0.2573, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 7.360454943132108, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 5.070207216586e-06, | |
| "loss": 0.2546, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 7.377952755905512, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 5.031896075832846e-06, | |
| "loss": 0.2503, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 7.395450568678915, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 4.993776991599511e-06, | |
| "loss": 0.2612, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 7.412948381452319, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 4.955851190576886e-06, | |
| "loss": 0.2624, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 7.430446194225722, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 4.918119893235894e-06, | |
| "loss": 0.2627, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 7.447944006999125, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 4.880584313788245e-06, | |
| "loss": 0.2505, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 7.465441819772528, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 4.843245660147346e-06, | |
| "loss": 0.2545, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 7.482939632545932, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 4.806105133889444e-06, | |
| "loss": 0.265, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 7.500437445319335, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 4.7691639302149365e-06, | |
| "loss": 0.2661, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 7.517935258092739, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 4.732423237909929e-06, | |
| "loss": 0.2628, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 7.535433070866142, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 4.695884239307972e-06, | |
| "loss": 0.2691, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 7.5529308836395455, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 4.659548110252012e-06, | |
| "loss": 0.2632, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 7.570428696412948, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 4.623416020056556e-06, | |
| "loss": 0.2625, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 7.587926509186351, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 4.587489131470039e-06, | |
| "loss": 0.2514, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 7.605424321959755, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 4.551768600637407e-06, | |
| "loss": 0.2686, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 7.622922134733159, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 4.516255577062913e-06, | |
| "loss": 0.2701, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 7.640419947506562, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 4.48095120357312e-06, | |
| "loss": 0.2617, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 7.657917760279965, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 4.445856616280136e-06, | |
| "loss": 0.2597, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 7.675415573053368, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 4.410972944545041e-06, | |
| "loss": 0.2695, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 7.692913385826771, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 4.376301310941552e-06, | |
| "loss": 0.2632, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 7.710411198600175, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 4.3418428312198835e-06, | |
| "loss": 0.2629, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 7.727909011373578, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 4.307598614270871e-06, | |
| "loss": 0.2505, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 7.745406824146982, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 4.273569762090261e-06, | |
| "loss": 0.2695, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 7.762904636920385, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 4.239757369743248e-06, | |
| "loss": 0.2636, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 7.7804024496937885, | |
| "grad_norm": 2.5, | |
| "learning_rate": 4.2061625253292484e-06, | |
| "loss": 0.257, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 7.797900262467191, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 4.172786309946885e-06, | |
| "loss": 0.2672, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 7.815398075240595, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 4.139629797659188e-06, | |
| "loss": 0.2641, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 7.832895888013998, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 4.106694055459023e-06, | |
| "loss": 0.2654, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 7.850393700787402, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 4.073980143234777e-06, | |
| "loss": 0.2606, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 7.867891513560805, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 4.041489113736244e-06, | |
| "loss": 0.2669, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 7.8853893263342085, | |
| "grad_norm": 2.125, | |
| "learning_rate": 4.009222012540725e-06, | |
| "loss": 0.274, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 7.902887139107611, | |
| "grad_norm": 2.125, | |
| "learning_rate": 3.977179878019412e-06, | |
| "loss": 0.259, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 7.920384951881015, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 3.9453637413039536e-06, | |
| "loss": 0.2611, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 7.937882764654418, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 3.913774626253279e-06, | |
| "loss": 0.2541, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 7.955380577427822, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 3.882413549420649e-06, | |
| "loss": 0.2657, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 7.972878390201225, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 3.851281520020941e-06, | |
| "loss": 0.2671, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 7.990376202974629, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 3.820379539898173e-06, | |
| "loss": 0.271, | |
| "step": 4570 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5710, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.2912807922064032e+19, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |