| { | |
| "best_metric": 4.180971145629883, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/3/checkpoints/checkpoint-457920", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 457920, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.8193, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 7.5528, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 7.0655, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 7.0038, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 6.9567, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 6.9333, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 6.7583, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 6.6494, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 6.5522, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992454285120906e-05, | |
| "loss": 6.4697, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.991615690369854e-05, | |
| "loss": 6.4108, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990777095618801e-05, | |
| "loss": 6.3505, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989938500867749e-05, | |
| "loss": 6.279, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989099906116697e-05, | |
| "loss": 6.2115, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988261311365645e-05, | |
| "loss": 6.163, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 6.1099, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 6.064, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 6.0229, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.9858, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.9451, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.9127, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 5.8723, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 5.8504, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 5.8095, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 5.7896, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790384069844466e-05, | |
| "loss": 5.7592, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9781998122333946e-05, | |
| "loss": 5.7363, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773612174823426e-05, | |
| "loss": 5.7164, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 5.6864, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756856658606115e-05, | |
| "loss": 5.661, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748470711095595e-05, | |
| "loss": 5.6484, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.974008476358507e-05, | |
| "loss": 5.6377, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.973169881607455e-05, | |
| "loss": 5.6148, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9723329247367764e-05, | |
| "loss": 5.5928, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9714943299857244e-05, | |
| "loss": 5.5877, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9706557352346724e-05, | |
| "loss": 5.5604, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9698171404836204e-05, | |
| "loss": 5.5467, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9689785457325684e-05, | |
| "loss": 5.5138, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96814158886189e-05, | |
| "loss": 5.517, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967302994110837e-05, | |
| "loss": 5.495, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966464399359785e-05, | |
| "loss": 5.4865, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965625804608733e-05, | |
| "loss": 5.4727, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964787209857681e-05, | |
| "loss": 5.4673, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 5.4368, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 5.4292, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962273063484898e-05, | |
| "loss": 5.4252, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.961434468733847e-05, | |
| "loss": 5.4094, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960595873982795e-05, | |
| "loss": 5.4115, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 5.3897, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958920322361064e-05, | |
| "loss": 5.3689, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958081727610012e-05, | |
| "loss": 5.3782, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95724313285896e-05, | |
| "loss": 5.3554, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956404538107908e-05, | |
| "loss": 5.3494, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955565943356856e-05, | |
| "loss": 5.3231, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.954727348605804e-05, | |
| "loss": 5.3342, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.953888753854752e-05, | |
| "loss": 5.3097, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 5.337, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522148401133936e-05, | |
| "loss": 5.2988, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951376245362342e-05, | |
| "loss": 5.2995, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95053765061129e-05, | |
| "loss": 5.2921, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949699055860238e-05, | |
| "loss": 5.2727, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948862098989559e-05, | |
| "loss": 5.2601, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948023504238507e-05, | |
| "loss": 5.2592, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947186547367828e-05, | |
| "loss": 5.2427, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946347952616776e-05, | |
| "loss": 5.2365, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945509357865724e-05, | |
| "loss": 5.2539, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944670763114672e-05, | |
| "loss": 5.2348, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94383216836362e-05, | |
| "loss": 5.2196, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942993573612568e-05, | |
| "loss": 5.1965, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942154978861516e-05, | |
| "loss": 5.1937, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941316384110464e-05, | |
| "loss": 5.2021, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.940477789359412e-05, | |
| "loss": 5.199, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396408324887336e-05, | |
| "loss": 5.1908, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388022377376816e-05, | |
| "loss": 5.1877, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379636429866296e-05, | |
| "loss": 5.1795, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371250482355776e-05, | |
| "loss": 5.172, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362864534845256e-05, | |
| "loss": 5.1582, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.935447858733473e-05, | |
| "loss": 5.1481, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.934609263982421e-05, | |
| "loss": 5.143, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.933770669231369e-05, | |
| "loss": 5.1294, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329337123606905e-05, | |
| "loss": 5.1368, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932095117609638e-05, | |
| "loss": 5.1283, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931256522858586e-05, | |
| "loss": 5.1304, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9304195659879074e-05, | |
| "loss": 5.1112, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9295809712368554e-05, | |
| "loss": 5.0917, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9287423764858034e-05, | |
| "loss": 5.106, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9279037817347514e-05, | |
| "loss": 5.1017, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9270651869836994e-05, | |
| "loss": 5.0989, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9262265922326474e-05, | |
| "loss": 5.0846, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9253879974815954e-05, | |
| "loss": 5.0866, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9245494027305433e-05, | |
| "loss": 5.0716, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9237108079794913e-05, | |
| "loss": 5.0666, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922873851108812e-05, | |
| "loss": 5.0659, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922036894238133e-05, | |
| "loss": 5.0585, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921198299487081e-05, | |
| "loss": 5.0629, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920359704736029e-05, | |
| "loss": 5.0555, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919521109984978e-05, | |
| "loss": 5.054, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918682515233926e-05, | |
| "loss": 5.0308, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917843920482874e-05, | |
| "loss": 5.0282, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917005325731822e-05, | |
| "loss": 5.0263, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.916168368861143e-05, | |
| "loss": 5.0243, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.915329774110091e-05, | |
| "loss": 5.0169, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914491179359039e-05, | |
| "loss": 5.0106, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.913652584607987e-05, | |
| "loss": 4.9992, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.912813989856935e-05, | |
| "loss": 5.0068, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911975395105883e-05, | |
| "loss": 4.9826, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9111384382352036e-05, | |
| "loss": 4.9882, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9102998434841516e-05, | |
| "loss": 4.9854, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9094612487330996e-05, | |
| "loss": 4.9772, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9086226539820476e-05, | |
| "loss": 4.972, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907784059230996e-05, | |
| "loss": 4.9762, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906945464479944e-05, | |
| "loss": 4.963, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906106869728892e-05, | |
| "loss": 4.9663, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9052682749778396e-05, | |
| "loss": 4.9583, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904431318107161e-05, | |
| "loss": 4.9476, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903592723356109e-05, | |
| "loss": 4.9528, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9027541286050565e-05, | |
| "loss": 4.9511, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9019155338540045e-05, | |
| "loss": 4.9449, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9010769391029525e-05, | |
| "loss": 4.9299, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.900239982232274e-05, | |
| "loss": 4.936, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994013874812214e-05, | |
| "loss": 4.937, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.89856279273017e-05, | |
| "loss": 4.9266, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.897724197979118e-05, | |
| "loss": 4.9132, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.896885603228066e-05, | |
| "loss": 4.9205, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.896048646357387e-05, | |
| "loss": 4.9208, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.895210051606335e-05, | |
| "loss": 4.9077, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.894371456855283e-05, | |
| "loss": 4.9024, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935361378649774e-05, | |
| "loss": 4.8932, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8926975431139254e-05, | |
| "loss": 4.8968, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918589483628734e-05, | |
| "loss": 4.887, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8910203536118214e-05, | |
| "loss": 4.9026, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890181758860769e-05, | |
| "loss": 4.8919, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889343164109717e-05, | |
| "loss": 4.8734, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888504569358665e-05, | |
| "loss": 4.88, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8876659746076134e-05, | |
| "loss": 4.8784, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8868273798565614e-05, | |
| "loss": 4.874, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8859887851055094e-05, | |
| "loss": 4.8784, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88515182823483e-05, | |
| "loss": 4.8631, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884313233483778e-05, | |
| "loss": 4.8654, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883474638732726e-05, | |
| "loss": 4.8722, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882636043981674e-05, | |
| "loss": 4.8551, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881797449230622e-05, | |
| "loss": 4.8518, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88095885447957e-05, | |
| "loss": 4.8525, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880120259728518e-05, | |
| "loss": 4.8396, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879281664977466e-05, | |
| "loss": 4.8387, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878443070226414e-05, | |
| "loss": 4.8447, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877604475475362e-05, | |
| "loss": 4.8284, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87676588072431e-05, | |
| "loss": 4.8317, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875928923853632e-05, | |
| "loss": 4.8373, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87509032910258e-05, | |
| "loss": 4.8327, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.789715766906738, | |
| "eval_runtime": 296.8384, | |
| "eval_samples_per_second": 1285.518, | |
| "eval_steps_per_second": 40.173, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874251734351528e-05, | |
| "loss": 4.8151, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873413139600475e-05, | |
| "loss": 4.8145, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.872576182729797e-05, | |
| "loss": 4.8291, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.871737587978745e-05, | |
| "loss": 4.8121, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870898993227693e-05, | |
| "loss": 4.8245, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.87006039847664e-05, | |
| "loss": 4.7966, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8692234416059616e-05, | |
| "loss": 4.8152, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683848468549096e-05, | |
| "loss": 4.7978, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675462521038576e-05, | |
| "loss": 4.7922, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667076573528056e-05, | |
| "loss": 4.7973, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8658690626017536e-05, | |
| "loss": 4.8009, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8650304678507016e-05, | |
| "loss": 4.8013, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8641935109800225e-05, | |
| "loss": 4.7841, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8633549162289705e-05, | |
| "loss": 4.7819, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8625163214779185e-05, | |
| "loss": 4.7754, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8616777267268665e-05, | |
| "loss": 4.7762, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608407698561874e-05, | |
| "loss": 4.774, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8600021751051354e-05, | |
| "loss": 4.7742, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8591635803540834e-05, | |
| "loss": 4.7691, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8583249856030314e-05, | |
| "loss": 4.7882, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8574863908519794e-05, | |
| "loss": 4.7655, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8566477961009274e-05, | |
| "loss": 4.7741, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8558092013498754e-05, | |
| "loss": 4.761, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854972244479197e-05, | |
| "loss": 4.7688, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854133649728145e-05, | |
| "loss": 4.7565, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853295054977093e-05, | |
| "loss": 4.7563, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.852456460226041e-05, | |
| "loss": 4.751, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851617865474989e-05, | |
| "loss": 4.7469, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850779270723937e-05, | |
| "loss": 4.7304, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849940675972885e-05, | |
| "loss": 4.7463, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849102081221833e-05, | |
| "loss": 4.7427, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.848265124351154e-05, | |
| "loss": 4.7461, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847426529600102e-05, | |
| "loss": 4.7357, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.84658793484905e-05, | |
| "loss": 4.7385, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845750977978371e-05, | |
| "loss": 4.7318, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8449123832273194e-05, | |
| "loss": 4.7271, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8440737884762674e-05, | |
| "loss": 4.7123, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8432351937252154e-05, | |
| "loss": 4.7198, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8423965989741634e-05, | |
| "loss": 4.7173, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8415580042231114e-05, | |
| "loss": 4.7149, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840719409472059e-05, | |
| "loss": 4.7182, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839880814721007e-05, | |
| "loss": 4.7172, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839043857850328e-05, | |
| "loss": 4.7068, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838205263099276e-05, | |
| "loss": 4.712, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8373666683482236e-05, | |
| "loss": 4.7121, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8365280735971716e-05, | |
| "loss": 4.6985, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8356894788461196e-05, | |
| "loss": 4.7099, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834852521975441e-05, | |
| "loss": 4.6964, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834015565104763e-05, | |
| "loss": 4.6847, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.833176970353711e-05, | |
| "loss": 4.7038, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.832338375602659e-05, | |
| "loss": 4.6868, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.831499780851606e-05, | |
| "loss": 4.6861, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.830661186100554e-05, | |
| "loss": 4.6745, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.829822591349502e-05, | |
| "loss": 4.6848, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.82898399659845e-05, | |
| "loss": 4.6708, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.828145401847398e-05, | |
| "loss": 4.7047, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.827306807096346e-05, | |
| "loss": 4.6729, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.826468212345294e-05, | |
| "loss": 4.6864, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825631255474615e-05, | |
| "loss": 4.6799, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8247942986039366e-05, | |
| "loss": 4.6672, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8239557038528846e-05, | |
| "loss": 4.6574, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8231171091018326e-05, | |
| "loss": 4.6714, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8222785143507806e-05, | |
| "loss": 4.6572, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8214399195997286e-05, | |
| "loss": 4.6562, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8206013248486766e-05, | |
| "loss": 4.6737, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8197627300976246e-05, | |
| "loss": 4.6682, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8189241353465726e-05, | |
| "loss": 4.6522, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8180871784758935e-05, | |
| "loss": 4.6456, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8172485837248415e-05, | |
| "loss": 4.646, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8164099889737895e-05, | |
| "loss": 4.6578, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8155713942227375e-05, | |
| "loss": 4.6571, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8147327994716854e-05, | |
| "loss": 4.6546, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8138958426010064e-05, | |
| "loss": 4.6547, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.813057247849955e-05, | |
| "loss": 4.6546, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.812220290979276e-05, | |
| "loss": 4.6548, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.811381696228224e-05, | |
| "loss": 4.6374, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.810543101477172e-05, | |
| "loss": 4.6412, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80970450672612e-05, | |
| "loss": 4.6403, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808865911975068e-05, | |
| "loss": 4.6251, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808028955104389e-05, | |
| "loss": 4.6399, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80719199823371e-05, | |
| "loss": 4.6376, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.806353403482658e-05, | |
| "loss": 4.6405, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805514808731606e-05, | |
| "loss": 4.6218, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804676213980554e-05, | |
| "loss": 4.6123, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803837619229502e-05, | |
| "loss": 4.6298, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8029990244784504e-05, | |
| "loss": 4.624, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8021604297273984e-05, | |
| "loss": 4.6306, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8013218349763464e-05, | |
| "loss": 4.6225, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8004832402252944e-05, | |
| "loss": 4.6237, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7996446454742424e-05, | |
| "loss": 4.6187, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.79880605072319e-05, | |
| "loss": 4.6113, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797967455972138e-05, | |
| "loss": 4.6174, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797128861221086e-05, | |
| "loss": 4.613, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.796290266470034e-05, | |
| "loss": 4.6213, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7954533095993546e-05, | |
| "loss": 4.6214, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946147148483026e-05, | |
| "loss": 4.6173, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7937761200972506e-05, | |
| "loss": 4.6048, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929375253461986e-05, | |
| "loss": 4.6026, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792098930595147e-05, | |
| "loss": 4.6014, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.791261973724468e-05, | |
| "loss": 4.6118, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.790423378973416e-05, | |
| "loss": 4.5968, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.789584784222364e-05, | |
| "loss": 4.5973, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.788746189471312e-05, | |
| "loss": 4.5909, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78790759472026e-05, | |
| "loss": 4.6058, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787070637849581e-05, | |
| "loss": 4.5785, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786232043098529e-05, | |
| "loss": 4.5917, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78539508622785e-05, | |
| "loss": 4.5929, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784556491476798e-05, | |
| "loss": 4.5862, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783717896725746e-05, | |
| "loss": 4.5834, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782879301974694e-05, | |
| "loss": 4.5951, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782040707223642e-05, | |
| "loss": 4.5796, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7812021124725906e-05, | |
| "loss": 4.5825, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7803635177215386e-05, | |
| "loss": 4.5873, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7795249229704866e-05, | |
| "loss": 4.5734, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7786879660998075e-05, | |
| "loss": 4.5847, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7778510092291284e-05, | |
| "loss": 4.5881, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7770124144780764e-05, | |
| "loss": 4.5806, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7761738197270244e-05, | |
| "loss": 4.5692, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7753352249759724e-05, | |
| "loss": 4.5789, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7744966302249204e-05, | |
| "loss": 4.5791, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7736580354738684e-05, | |
| "loss": 4.5702, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7728194407228164e-05, | |
| "loss": 4.5644, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771982483852137e-05, | |
| "loss": 4.569, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771143889101086e-05, | |
| "loss": 4.5791, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.770305294350034e-05, | |
| "loss": 4.5664, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769466699598982e-05, | |
| "loss": 4.564, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76862810484793e-05, | |
| "loss": 4.5531, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767791147977251e-05, | |
| "loss": 4.5607, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766952553226199e-05, | |
| "loss": 4.5547, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766113958475147e-05, | |
| "loss": 4.572, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765275363724095e-05, | |
| "loss": 4.5675, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764436768973043e-05, | |
| "loss": 4.5468, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76359817422199e-05, | |
| "loss": 4.5544, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762759579470938e-05, | |
| "loss": 4.5588, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761920984719886e-05, | |
| "loss": 4.5555, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761082389968834e-05, | |
| "loss": 4.5632, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760245433098156e-05, | |
| "loss": 4.5486, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7594084762274773e-05, | |
| "loss": 4.551, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758569881476425e-05, | |
| "loss": 4.5585, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.757731286725373e-05, | |
| "loss": 4.5456, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756894329854694e-05, | |
| "loss": 4.5474, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756055735103642e-05, | |
| "loss": 4.5528, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75521714035259e-05, | |
| "loss": 4.5369, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7543785456015375e-05, | |
| "loss": 4.5411, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7535399508504855e-05, | |
| "loss": 4.548, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7527013560994335e-05, | |
| "loss": 4.5321, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7518627613483815e-05, | |
| "loss": 4.5397, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7510241665973295e-05, | |
| "loss": 4.5431, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.750185571846278e-05, | |
| "loss": 4.5393, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.501766681671143, | |
| "eval_runtime": 297.3924, | |
| "eval_samples_per_second": 1283.123, | |
| "eval_steps_per_second": 40.099, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.749346977095226e-05, | |
| "loss": 4.5264, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.748508382344174e-05, | |
| "loss": 4.528, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.747669787593122e-05, | |
| "loss": 4.5475, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.74683119284207e-05, | |
| "loss": 4.5259, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.745992598091018e-05, | |
| "loss": 4.5412, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.745154003339966e-05, | |
| "loss": 4.521, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.744315408588914e-05, | |
| "loss": 4.5358, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.743476813837862e-05, | |
| "loss": 4.5181, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7426382190868095e-05, | |
| "loss": 4.5296, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7417996243357575e-05, | |
| "loss": 4.525, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7409610295847055e-05, | |
| "loss": 4.5276, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7401224348336535e-05, | |
| "loss": 4.5342, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739285477962975e-05, | |
| "loss": 4.5152, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.738446883211923e-05, | |
| "loss": 4.5156, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.737608288460871e-05, | |
| "loss": 4.5121, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736769693709819e-05, | |
| "loss": 4.5088, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735931098958767e-05, | |
| "loss": 4.5131, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735092504207715e-05, | |
| "loss": 4.5168, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.734253909456663e-05, | |
| "loss": 4.5102, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.733415314705611e-05, | |
| "loss": 4.5325, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.732576719954559e-05, | |
| "loss": 4.5123, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.73173976308388e-05, | |
| "loss": 4.5189, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730901168332828e-05, | |
| "loss": 4.5092, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730062573581776e-05, | |
| "loss": 4.5184, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.729223978830724e-05, | |
| "loss": 4.5099, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.728387021960045e-05, | |
| "loss": 4.5035, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727548427208993e-05, | |
| "loss": 4.507, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7267098324579415e-05, | |
| "loss": 4.5009, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7258712377068895e-05, | |
| "loss": 4.4857, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7250326429558375e-05, | |
| "loss": 4.5015, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7241940482047855e-05, | |
| "loss": 4.5069, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7233554534537335e-05, | |
| "loss": 4.5024, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722516858702681e-05, | |
| "loss": 4.4991, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7216799018320024e-05, | |
| "loss": 4.5006, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720842944961323e-05, | |
| "loss": 4.4944, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720004350210271e-05, | |
| "loss": 4.4899, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719165755459219e-05, | |
| "loss": 4.4854, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.718327160708167e-05, | |
| "loss": 4.4838, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.717488565957115e-05, | |
| "loss": 4.485, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.716651609086437e-05, | |
| "loss": 4.4847, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.715813014335385e-05, | |
| "loss": 4.4932, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714974419584333e-05, | |
| "loss": 4.4926, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714135824833281e-05, | |
| "loss": 4.4808, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.713297230082228e-05, | |
| "loss": 4.4881, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71246027321155e-05, | |
| "loss": 4.4906, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.711621678460498e-05, | |
| "loss": 4.474, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.710783083709446e-05, | |
| "loss": 4.4876, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709944488958393e-05, | |
| "loss": 4.4804, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7091075320877147e-05, | |
| "loss": 4.4622, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7082689373366627e-05, | |
| "loss": 4.4824, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7074303425856106e-05, | |
| "loss": 4.4747, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7065917478345586e-05, | |
| "loss": 4.4698, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7057531530835066e-05, | |
| "loss": 4.4653, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704916196212828e-05, | |
| "loss": 4.4685, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704079239342149e-05, | |
| "loss": 4.4563, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.703240644591097e-05, | |
| "loss": 4.4859, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702402049840045e-05, | |
| "loss": 4.4659, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7015634550889924e-05, | |
| "loss": 4.4741, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7007248603379404e-05, | |
| "loss": 4.4743, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6998862655868884e-05, | |
| "loss": 4.4579, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6990476708358364e-05, | |
| "loss": 4.4505, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6982090760847844e-05, | |
| "loss": 4.4685, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6973704813337324e-05, | |
| "loss": 4.4539, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6965318865826804e-05, | |
| "loss": 4.4503, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695693291831629e-05, | |
| "loss": 4.4696, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694854697080577e-05, | |
| "loss": 4.4663, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694019378090271e-05, | |
| "loss": 4.4508, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.693180783339219e-05, | |
| "loss": 4.4449, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.692342188588167e-05, | |
| "loss": 4.4467, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.691503593837115e-05, | |
| "loss": 4.4565, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.690664999086063e-05, | |
| "loss": 4.4604, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.689826404335011e-05, | |
| "loss": 4.4573, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.688987809583959e-05, | |
| "loss": 4.4598, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68815085271328e-05, | |
| "loss": 4.4573, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.687312257962228e-05, | |
| "loss": 4.46, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.686473663211176e-05, | |
| "loss": 4.4458, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6856350684601245e-05, | |
| "loss": 4.4494, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6847981115894454e-05, | |
| "loss": 4.4526, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683961154718766e-05, | |
| "loss": 4.4333, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683122559967714e-05, | |
| "loss": 4.4479, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.682283965216662e-05, | |
| "loss": 4.4532, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68144537046561e-05, | |
| "loss": 4.4513, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.680606775714558e-05, | |
| "loss": 4.4378, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.679768180963506e-05, | |
| "loss": 4.4252, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678929586212454e-05, | |
| "loss": 4.4457, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678090991461402e-05, | |
| "loss": 4.4354, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677254034590723e-05, | |
| "loss": 4.4502, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676415439839671e-05, | |
| "loss": 4.4401, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.675578482968993e-05, | |
| "loss": 4.4433, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.674739888217941e-05, | |
| "loss": 4.437, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.673901293466889e-05, | |
| "loss": 4.4285, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.673062698715837e-05, | |
| "loss": 4.4346, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.672224103964785e-05, | |
| "loss": 4.4323, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.671385509213733e-05, | |
| "loss": 4.4458, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.670546914462681e-05, | |
| "loss": 4.4479, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.669708319711629e-05, | |
| "loss": 4.4384, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668869724960576e-05, | |
| "loss": 4.4256, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6680327680898976e-05, | |
| "loss": 4.4291, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6671941733388456e-05, | |
| "loss": 4.4285, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6663572164681665e-05, | |
| "loss": 4.435, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6655186217171145e-05, | |
| "loss": 4.4238, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664680026966063e-05, | |
| "loss": 4.4269, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663841432215011e-05, | |
| "loss": 4.417, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663002837463959e-05, | |
| "loss": 4.436, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6621642427129065e-05, | |
| "loss": 4.4078, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6613256479618545e-05, | |
| "loss": 4.4238, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6604870532108025e-05, | |
| "loss": 4.4166, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6596484584597505e-05, | |
| "loss": 4.4222, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6588115015890714e-05, | |
| "loss": 4.416, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6579729068380194e-05, | |
| "loss": 4.4258, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6571343120869674e-05, | |
| "loss": 4.4189, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6562957173359154e-05, | |
| "loss": 4.4147, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655458760465237e-05, | |
| "loss": 4.4213, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.654620165714185e-05, | |
| "loss": 4.4071, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6537832088435065e-05, | |
| "loss": 4.4257, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.652944614092454e-05, | |
| "loss": 4.4288, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.652106019341402e-05, | |
| "loss": 4.414, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65126742459035e-05, | |
| "loss": 4.4103, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.650428829839298e-05, | |
| "loss": 4.4152, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.649591872968619e-05, | |
| "loss": 4.4153, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.648753278217567e-05, | |
| "loss": 4.4106, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647914683466515e-05, | |
| "loss": 4.408, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647076088715463e-05, | |
| "loss": 4.4067, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.646237493964411e-05, | |
| "loss": 4.4224, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.645398899213359e-05, | |
| "loss": 4.4115, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.644560304462307e-05, | |
| "loss": 4.4061, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6437217097112554e-05, | |
| "loss": 4.3952, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6428831149602034e-05, | |
| "loss": 4.4075, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642046158089524e-05, | |
| "loss": 4.3977, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.641207563338472e-05, | |
| "loss": 4.4171, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64036896858742e-05, | |
| "loss": 4.4126, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.639530373836368e-05, | |
| "loss": 4.3982, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.638693416965689e-05, | |
| "loss": 4.3967, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637854822214637e-05, | |
| "loss": 4.4046, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637016227463585e-05, | |
| "loss": 4.4016, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636177632712533e-05, | |
| "loss": 4.4127, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635339037961481e-05, | |
| "loss": 4.395, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.634502081090802e-05, | |
| "loss": 4.4049, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633663486339751e-05, | |
| "loss": 4.4033, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632824891588699e-05, | |
| "loss": 4.4012, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631986296837647e-05, | |
| "loss": 4.4005, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631147702086595e-05, | |
| "loss": 4.4022, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.630309107335542e-05, | |
| "loss": 4.3908, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.629472150464864e-05, | |
| "loss": 4.3963, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.628633555713812e-05, | |
| "loss": 4.4006, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6277949609627597e-05, | |
| "loss": 4.3913, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626956366211707e-05, | |
| "loss": 4.3889, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626117771460655e-05, | |
| "loss": 4.397, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.625279176709603e-05, | |
| "loss": 4.394, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.364997386932373, | |
| "eval_runtime": 291.5075, | |
| "eval_samples_per_second": 1309.026, | |
| "eval_steps_per_second": 40.908, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6244422198389246e-05, | |
| "loss": 4.3874, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6236036250878726e-05, | |
| "loss": 4.3795, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6227650303368205e-05, | |
| "loss": 4.404, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6219264355857685e-05, | |
| "loss": 4.3843, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6210878408347165e-05, | |
| "loss": 4.4022, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6202492460836645e-05, | |
| "loss": 4.3758, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6194106513326125e-05, | |
| "loss": 4.3963, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6185736944619334e-05, | |
| "loss": 4.3803, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6177350997108814e-05, | |
| "loss": 4.3851, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6168965049598294e-05, | |
| "loss": 4.3879, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6160579102087774e-05, | |
| "loss": 4.3862, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6152193154577254e-05, | |
| "loss": 4.3949, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.614382358587046e-05, | |
| "loss": 4.3797, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.613543763835994e-05, | |
| "loss": 4.3769, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.612706806965316e-05, | |
| "loss": 4.3799, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611868212214264e-05, | |
| "loss": 4.3659, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611029617463212e-05, | |
| "loss": 4.3784, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61019102271216e-05, | |
| "loss": 4.382, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.609352427961108e-05, | |
| "loss": 4.3725, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.608513833210056e-05, | |
| "loss": 4.3959, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.607675238459004e-05, | |
| "loss": 4.3805, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.606836643707952e-05, | |
| "loss": 4.3879, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6059980489569e-05, | |
| "loss": 4.3762, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.605159454205848e-05, | |
| "loss": 4.3832, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.604320859454796e-05, | |
| "loss": 4.378, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.603482264703743e-05, | |
| "loss": 4.3727, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.602645307833065e-05, | |
| "loss": 4.3758, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.601806713082013e-05, | |
| "loss": 4.3679, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.600968118330961e-05, | |
| "loss": 4.3621, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6001311614602824e-05, | |
| "loss": 4.3665, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5992925667092303e-05, | |
| "loss": 4.3766, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5984539719581783e-05, | |
| "loss": 4.3753, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5976153772071257e-05, | |
| "loss": 4.3718, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5967767824560737e-05, | |
| "loss": 4.3745, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595939825585395e-05, | |
| "loss": 4.3591, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595101230834343e-05, | |
| "loss": 4.3635, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5942626360832906e-05, | |
| "loss": 4.3631, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.593425679212612e-05, | |
| "loss": 4.3539, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.59258708446156e-05, | |
| "loss": 4.3613, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.591748489710508e-05, | |
| "loss": 4.356, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590909894959456e-05, | |
| "loss": 4.3678, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590072938088778e-05, | |
| "loss": 4.3662, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.589234343337726e-05, | |
| "loss": 4.3587, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.588395748586673e-05, | |
| "loss": 4.3581, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.587557153835621e-05, | |
| "loss": 4.3717, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.586718559084569e-05, | |
| "loss": 4.3507, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585879964333517e-05, | |
| "loss": 4.364, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585041369582465e-05, | |
| "loss": 4.3565, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.584202774831413e-05, | |
| "loss": 4.3396, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.583364180080361e-05, | |
| "loss": 4.3632, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.582525585329309e-05, | |
| "loss": 4.3502, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.581686990578257e-05, | |
| "loss": 4.3533, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580848395827205e-05, | |
| "loss": 4.3423, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5800114389565266e-05, | |
| "loss": 4.3476, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5791744820858475e-05, | |
| "loss": 4.3377, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5783358873347955e-05, | |
| "loss": 4.362, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5774972925837435e-05, | |
| "loss": 4.3484, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5766586978326915e-05, | |
| "loss": 4.3541, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5758201030816395e-05, | |
| "loss": 4.3559, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5749815083305875e-05, | |
| "loss": 4.3424, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5741429135795355e-05, | |
| "loss": 4.3323, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5733043188284835e-05, | |
| "loss": 4.3522, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.572468999838177e-05, | |
| "loss": 4.3375, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.571630405087125e-05, | |
| "loss": 4.3337, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.570791810336074e-05, | |
| "loss": 4.3513, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569953215585022e-05, | |
| "loss": 4.349, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569116258714343e-05, | |
| "loss": 4.3373, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.568277663963291e-05, | |
| "loss": 4.3334, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.567439069212239e-05, | |
| "loss": 4.3278, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.566600474461187e-05, | |
| "loss": 4.3383, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.565761879710135e-05, | |
| "loss": 4.3481, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.564923284959083e-05, | |
| "loss": 4.3463, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.564084690208031e-05, | |
| "loss": 4.3461, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.563246095456979e-05, | |
| "loss": 4.342, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5624091385863e-05, | |
| "loss": 4.3538, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5615721817156206e-05, | |
| "loss": 4.3301, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.560733586964569e-05, | |
| "loss": 4.3401, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559894992213517e-05, | |
| "loss": 4.3369, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559056397462465e-05, | |
| "loss": 4.3199, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.558219440591786e-05, | |
| "loss": 4.3425, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.557380845840734e-05, | |
| "loss": 4.341, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556542251089682e-05, | |
| "loss": 4.3365, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.55570365633863e-05, | |
| "loss": 4.3291, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554865061587578e-05, | |
| "loss": 4.3183, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554028104716899e-05, | |
| "loss": 4.3331, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553189509965847e-05, | |
| "loss": 4.3325, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.552350915214795e-05, | |
| "loss": 4.3383, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.551512320463743e-05, | |
| "loss": 4.3277, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5506770014734376e-05, | |
| "loss": 4.3368, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5498384067223856e-05, | |
| "loss": 4.3248, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5489998119713336e-05, | |
| "loss": 4.3247, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5481612172202816e-05, | |
| "loss": 4.3251, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5473226224692296e-05, | |
| "loss": 4.3225, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5464840277181776e-05, | |
| "loss": 4.3408, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5456454329671256e-05, | |
| "loss": 4.3378, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5448084760964465e-05, | |
| "loss": 4.3376, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5439698813453945e-05, | |
| "loss": 4.3179, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5431312865943425e-05, | |
| "loss": 4.3251, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5422926918432905e-05, | |
| "loss": 4.325, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5414540970922385e-05, | |
| "loss": 4.3287, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5406155023411865e-05, | |
| "loss": 4.3202, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5397769075901345e-05, | |
| "loss": 4.3208, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5389383128390824e-05, | |
| "loss": 4.3117, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.538101355968404e-05, | |
| "loss": 4.3324, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5372627612173514e-05, | |
| "loss": 4.3058, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5364241664662994e-05, | |
| "loss": 4.3198, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5355855717152473e-05, | |
| "loss": 4.3164, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5347469769641953e-05, | |
| "loss": 4.3191, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533908382213143e-05, | |
| "loss": 4.3143, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533069787462091e-05, | |
| "loss": 4.3265, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.532231192711039e-05, | |
| "loss": 4.3169, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.531392597959987e-05, | |
| "loss": 4.3101, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.530557278969682e-05, | |
| "loss": 4.3242, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52971868421863e-05, | |
| "loss": 4.3069, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528880089467578e-05, | |
| "loss": 4.3227, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528041494716526e-05, | |
| "loss": 4.3295, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.527202899965474e-05, | |
| "loss": 4.3181, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.526364305214422e-05, | |
| "loss": 4.311, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.525527348343743e-05, | |
| "loss": 4.3126, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.524688753592691e-05, | |
| "loss": 4.3135, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523850158841639e-05, | |
| "loss": 4.3132, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523011564090587e-05, | |
| "loss": 4.3148, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5221746072199076e-05, | |
| "loss": 4.3032, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5213360124688556e-05, | |
| "loss": 4.3227, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5204974177178036e-05, | |
| "loss": 4.3159, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5196588229667516e-05, | |
| "loss": 4.3112, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5188202282157e-05, | |
| "loss": 4.2951, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517983271345021e-05, | |
| "loss": 4.311, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517144676593969e-05, | |
| "loss": 4.3021, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.516306081842917e-05, | |
| "loss": 4.3139, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.515467487091865e-05, | |
| "loss": 4.3189, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.514628892340813e-05, | |
| "loss": 4.3071, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513790297589761e-05, | |
| "loss": 4.294, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512951702838709e-05, | |
| "loss": 4.3091, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512113108087657e-05, | |
| "loss": 4.3053, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511277789097351e-05, | |
| "loss": 4.3176, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5104408322266726e-05, | |
| "loss": 4.2971, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5096022374756206e-05, | |
| "loss": 4.3086, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5087636427245685e-05, | |
| "loss": 4.3088, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5079250479735165e-05, | |
| "loss": 4.311, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5070864532224645e-05, | |
| "loss": 4.3053, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5062478584714125e-05, | |
| "loss": 4.3061, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5054092637203605e-05, | |
| "loss": 4.2961, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5045706689693085e-05, | |
| "loss": 4.3045, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5037320742182565e-05, | |
| "loss": 4.3046, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5028951173475774e-05, | |
| "loss": 4.302, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5020565225965254e-05, | |
| "loss": 4.2936, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5012179278454734e-05, | |
| "loss": 4.3059, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5003793330944214e-05, | |
| "loss": 4.2992, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.280091285705566, | |
| "eval_runtime": 291.7383, | |
| "eval_samples_per_second": 1307.991, | |
| "eval_steps_per_second": 40.876, | |
| "step": 305280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4995407383433694e-05, | |
| "loss": 4.2914, | |
| "step": 305664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4987021435923174e-05, | |
| "loss": 4.2903, | |
| "step": 306176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4978635488412654e-05, | |
| "loss": 4.3098, | |
| "step": 306688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4970249540902134e-05, | |
| "loss": 4.2949, | |
| "step": 307200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4961863593391614e-05, | |
| "loss": 4.3056, | |
| "step": 307712 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4953477645881094e-05, | |
| "loss": 4.293, | |
| "step": 308224 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4945091698370574e-05, | |
| "loss": 4.2998, | |
| "step": 308736 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4936705750860054e-05, | |
| "loss": 4.2937, | |
| "step": 309248 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4928319803349534e-05, | |
| "loss": 4.2937, | |
| "step": 309760 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4919933855839014e-05, | |
| "loss": 4.2961, | |
| "step": 310272 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4911547908328494e-05, | |
| "loss": 4.2971, | |
| "step": 310784 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4903161960817974e-05, | |
| "loss": 4.3047, | |
| "step": 311296 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.489479239211118e-05, | |
| "loss": 4.2893, | |
| "step": 311808 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.488642282340439e-05, | |
| "loss": 4.2885, | |
| "step": 312320 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.487803687589387e-05, | |
| "loss": 4.2894, | |
| "step": 312832 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486965092838336e-05, | |
| "loss": 4.2791, | |
| "step": 313344 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486126498087284e-05, | |
| "loss": 4.2889, | |
| "step": 313856 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.485287903336232e-05, | |
| "loss": 4.2912, | |
| "step": 314368 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48444930858518e-05, | |
| "loss": 4.2863, | |
| "step": 314880 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.483610713834128e-05, | |
| "loss": 4.3072, | |
| "step": 315392 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.482772119083076e-05, | |
| "loss": 4.2942, | |
| "step": 315904 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481933524332024e-05, | |
| "loss": 4.3011, | |
| "step": 316416 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481094929580971e-05, | |
| "loss": 4.2907, | |
| "step": 316928 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.480256334829919e-05, | |
| "loss": 4.2957, | |
| "step": 317440 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.479417740078867e-05, | |
| "loss": 4.2894, | |
| "step": 317952 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.478579145327815e-05, | |
| "loss": 4.2911, | |
| "step": 318464 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.477740550576763e-05, | |
| "loss": 4.2876, | |
| "step": 318976 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476903593706084e-05, | |
| "loss": 4.2826, | |
| "step": 319488 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476064998955033e-05, | |
| "loss": 4.2799, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4752280420843536e-05, | |
| "loss": 4.2802, | |
| "step": 320512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4743894473333016e-05, | |
| "loss": 4.2878, | |
| "step": 321024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4735508525822496e-05, | |
| "loss": 4.2911, | |
| "step": 321536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4727122578311976e-05, | |
| "loss": 4.2863, | |
| "step": 322048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4718736630801456e-05, | |
| "loss": 4.288, | |
| "step": 322560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4710367062094665e-05, | |
| "loss": 4.2758, | |
| "step": 323072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4701981114584145e-05, | |
| "loss": 4.2807, | |
| "step": 323584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4693595167073625e-05, | |
| "loss": 4.2808, | |
| "step": 324096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4685209219563105e-05, | |
| "loss": 4.267, | |
| "step": 324608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4676823272052585e-05, | |
| "loss": 4.28, | |
| "step": 325120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4668437324542065e-05, | |
| "loss": 4.271, | |
| "step": 325632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.466006775583528e-05, | |
| "loss": 4.2857, | |
| "step": 326144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.465169818712849e-05, | |
| "loss": 4.2845, | |
| "step": 326656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.464331223961797e-05, | |
| "loss": 4.2792, | |
| "step": 327168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.463492629210745e-05, | |
| "loss": 4.2729, | |
| "step": 327680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.462654034459693e-05, | |
| "loss": 4.2897, | |
| "step": 328192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.461815439708641e-05, | |
| "loss": 4.2672, | |
| "step": 328704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.460976844957589e-05, | |
| "loss": 4.2828, | |
| "step": 329216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.460138250206537e-05, | |
| "loss": 4.273, | |
| "step": 329728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.459299655455485e-05, | |
| "loss": 4.2579, | |
| "step": 330240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.458461060704433e-05, | |
| "loss": 4.2838, | |
| "step": 330752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.457624103833754e-05, | |
| "loss": 4.2696, | |
| "step": 331264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.456787146963075e-05, | |
| "loss": 4.2699, | |
| "step": 331776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4559485522120234e-05, | |
| "loss": 4.2647, | |
| "step": 332288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4551099574609714e-05, | |
| "loss": 4.2618, | |
| "step": 332800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4542713627099194e-05, | |
| "loss": 4.2626, | |
| "step": 333312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4534327679588674e-05, | |
| "loss": 4.2736, | |
| "step": 333824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4525941732078154e-05, | |
| "loss": 4.2718, | |
| "step": 334336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4517555784567634e-05, | |
| "loss": 4.2741, | |
| "step": 334848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4509169837057114e-05, | |
| "loss": 4.2763, | |
| "step": 335360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450080026835032e-05, | |
| "loss": 4.2652, | |
| "step": 335872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.44924143208398e-05, | |
| "loss": 4.2525, | |
| "step": 336384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.448402837332928e-05, | |
| "loss": 4.2748, | |
| "step": 336896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.447564242581876e-05, | |
| "loss": 4.2584, | |
| "step": 337408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.446725647830824e-05, | |
| "loss": 4.2535, | |
| "step": 337920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4458870530797716e-05, | |
| "loss": 4.2751, | |
| "step": 338432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.44504845832872e-05, | |
| "loss": 4.2692, | |
| "step": 338944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.444209863577668e-05, | |
| "loss": 4.2597, | |
| "step": 339456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.443371268826616e-05, | |
| "loss": 4.2583, | |
| "step": 339968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.442534311955937e-05, | |
| "loss": 4.2466, | |
| "step": 340480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.441695717204885e-05, | |
| "loss": 4.26, | |
| "step": 340992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440857122453833e-05, | |
| "loss": 4.2733, | |
| "step": 341504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440018527702781e-05, | |
| "loss": 4.268, | |
| "step": 342016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.439179932951729e-05, | |
| "loss": 4.2668, | |
| "step": 342528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.43834297608105e-05, | |
| "loss": 4.2633, | |
| "step": 343040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.437504381329998e-05, | |
| "loss": 4.281, | |
| "step": 343552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.436667424459319e-05, | |
| "loss": 4.2543, | |
| "step": 344064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.435828829708267e-05, | |
| "loss": 4.2656, | |
| "step": 344576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434990234957216e-05, | |
| "loss": 4.2649, | |
| "step": 345088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434151640206164e-05, | |
| "loss": 4.2387, | |
| "step": 345600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4333130454551117e-05, | |
| "loss": 4.2715, | |
| "step": 346112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4324744507040597e-05, | |
| "loss": 4.264, | |
| "step": 346624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4316358559530076e-05, | |
| "loss": 4.2631, | |
| "step": 347136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4307972612019556e-05, | |
| "loss": 4.2568, | |
| "step": 347648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4299603043312766e-05, | |
| "loss": 4.2401, | |
| "step": 348160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4291217095802245e-05, | |
| "loss": 4.2586, | |
| "step": 348672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4282831148291725e-05, | |
| "loss": 4.2567, | |
| "step": 349184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4274445200781205e-05, | |
| "loss": 4.2654, | |
| "step": 349696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4266059253270685e-05, | |
| "loss": 4.2533, | |
| "step": 350208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4257706063367624e-05, | |
| "loss": 4.2624, | |
| "step": 350720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4249320115857104e-05, | |
| "loss": 4.25, | |
| "step": 351232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.424093416834659e-05, | |
| "loss": 4.2542, | |
| "step": 351744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.423254822083607e-05, | |
| "loss": 4.249, | |
| "step": 352256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.422416227332555e-05, | |
| "loss": 4.2492, | |
| "step": 352768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.421577632581503e-05, | |
| "loss": 4.2661, | |
| "step": 353280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.420739037830451e-05, | |
| "loss": 4.2631, | |
| "step": 353792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.419900443079399e-05, | |
| "loss": 4.2683, | |
| "step": 354304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.41906348620872e-05, | |
| "loss": 4.2423, | |
| "step": 354816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.418224891457668e-05, | |
| "loss": 4.2533, | |
| "step": 355328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.417386296706616e-05, | |
| "loss": 4.2532, | |
| "step": 355840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.416547701955564e-05, | |
| "loss": 4.2546, | |
| "step": 356352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.415709107204512e-05, | |
| "loss": 4.2514, | |
| "step": 356864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414872150333833e-05, | |
| "loss": 4.2441, | |
| "step": 357376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414033555582781e-05, | |
| "loss": 4.2452, | |
| "step": 357888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.413194960831729e-05, | |
| "loss": 4.2605, | |
| "step": 358400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4123580039610504e-05, | |
| "loss": 4.2331, | |
| "step": 358912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4115194092099984e-05, | |
| "loss": 4.2459, | |
| "step": 359424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.410682452339319e-05, | |
| "loss": 4.2399, | |
| "step": 359936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409843857588267e-05, | |
| "loss": 4.2535, | |
| "step": 360448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409005262837215e-05, | |
| "loss": 4.2426, | |
| "step": 360960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.408166668086163e-05, | |
| "loss": 4.2536, | |
| "step": 361472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.407328073335111e-05, | |
| "loss": 4.2455, | |
| "step": 361984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.406489478584059e-05, | |
| "loss": 4.2395, | |
| "step": 362496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.405650883833007e-05, | |
| "loss": 4.2529, | |
| "step": 363008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.404812289081955e-05, | |
| "loss": 4.239, | |
| "step": 363520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4039736943309026e-05, | |
| "loss": 4.2535, | |
| "step": 364032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.403135099579851e-05, | |
| "loss": 4.2569, | |
| "step": 364544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.402296504828799e-05, | |
| "loss": 4.2456, | |
| "step": 365056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.401457910077747e-05, | |
| "loss": 4.2398, | |
| "step": 365568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.400619315326695e-05, | |
| "loss": 4.2449, | |
| "step": 366080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.399780720575643e-05, | |
| "loss": 4.2412, | |
| "step": 366592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398942125824591e-05, | |
| "loss": 4.2453, | |
| "step": 367104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398103531073539e-05, | |
| "loss": 4.245, | |
| "step": 367616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.39726657420286e-05, | |
| "loss": 4.2356, | |
| "step": 368128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.396427979451808e-05, | |
| "loss": 4.2529, | |
| "step": 368640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.395591022581129e-05, | |
| "loss": 4.2452, | |
| "step": 369152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.394752427830077e-05, | |
| "loss": 4.2468, | |
| "step": 369664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.393913833079025e-05, | |
| "loss": 4.2273, | |
| "step": 370176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.393075238327973e-05, | |
| "loss": 4.2415, | |
| "step": 370688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3922382814572946e-05, | |
| "loss": 4.2332, | |
| "step": 371200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3913996867062426e-05, | |
| "loss": 4.2446, | |
| "step": 371712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3905610919551906e-05, | |
| "loss": 4.2477, | |
| "step": 372224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3897224972041386e-05, | |
| "loss": 4.2418, | |
| "step": 372736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3888855403334595e-05, | |
| "loss": 4.2252, | |
| "step": 373248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3880469455824075e-05, | |
| "loss": 4.2442, | |
| "step": 373760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3872083508313555e-05, | |
| "loss": 4.2376, | |
| "step": 374272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3863697560803035e-05, | |
| "loss": 4.2503, | |
| "step": 374784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3855311613292515e-05, | |
| "loss": 4.2293, | |
| "step": 375296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3846942044585724e-05, | |
| "loss": 4.2453, | |
| "step": 375808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3838556097075204e-05, | |
| "loss": 4.2414, | |
| "step": 376320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3830170149564684e-05, | |
| "loss": 4.2452, | |
| "step": 376832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3821784202054164e-05, | |
| "loss": 4.236, | |
| "step": 377344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3813398254543644e-05, | |
| "loss": 4.2395, | |
| "step": 377856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.380501230703313e-05, | |
| "loss": 4.2281, | |
| "step": 378368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.379662635952261e-05, | |
| "loss": 4.2398, | |
| "step": 378880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.378824041201209e-05, | |
| "loss": 4.2374, | |
| "step": 379392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.37798708433053e-05, | |
| "loss": 4.2346, | |
| "step": 379904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.377150127459851e-05, | |
| "loss": 4.2285, | |
| "step": 380416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.376311532708799e-05, | |
| "loss": 4.2372, | |
| "step": 380928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.375472937957747e-05, | |
| "loss": 4.2379, | |
| "step": 381440 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.222952365875244, | |
| "eval_runtime": 293.7245, | |
| "eval_samples_per_second": 1299.146, | |
| "eval_steps_per_second": 40.599, | |
| "step": 381600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.374634343206695e-05, | |
| "loss": 4.2327, | |
| "step": 381952 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.373795748455643e-05, | |
| "loss": 4.2243, | |
| "step": 382464 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372957153704591e-05, | |
| "loss": 4.2436, | |
| "step": 382976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372120196833912e-05, | |
| "loss": 4.2291, | |
| "step": 383488 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3712832399632333e-05, | |
| "loss": 4.2384, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3704446452121813e-05, | |
| "loss": 4.2301, | |
| "step": 384512 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.369606050461129e-05, | |
| "loss": 4.2308, | |
| "step": 385024 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.368767455710077e-05, | |
| "loss": 4.2321, | |
| "step": 385536 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367928860959025e-05, | |
| "loss": 4.2272, | |
| "step": 386048 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367091904088346e-05, | |
| "loss": 4.2331, | |
| "step": 386560 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.366253309337294e-05, | |
| "loss": 4.2384, | |
| "step": 387072 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.365414714586242e-05, | |
| "loss": 4.236, | |
| "step": 387584 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.364577757715563e-05, | |
| "loss": 4.2278, | |
| "step": 388096 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.363739162964511e-05, | |
| "loss": 4.2243, | |
| "step": 388608 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.362900568213459e-05, | |
| "loss": 4.2282, | |
| "step": 389120 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.362061973462407e-05, | |
| "loss": 4.2135, | |
| "step": 389632 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.361223378711355e-05, | |
| "loss": 4.2254, | |
| "step": 390144 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.360384783960304e-05, | |
| "loss": 4.2254, | |
| "step": 390656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.359546189209251e-05, | |
| "loss": 4.2274, | |
| "step": 391168 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.358707594458199e-05, | |
| "loss": 4.2438, | |
| "step": 391680 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.357868999707147e-05, | |
| "loss": 4.2336, | |
| "step": 392192 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.357030404956095e-05, | |
| "loss": 4.2376, | |
| "step": 392704 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.356191810205043e-05, | |
| "loss": 4.2296, | |
| "step": 393216 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.355353215453991e-05, | |
| "loss": 4.2308, | |
| "step": 393728 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.354516258583312e-05, | |
| "loss": 4.2281, | |
| "step": 394240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.35367766383226e-05, | |
| "loss": 4.2287, | |
| "step": 394752 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352839069081208e-05, | |
| "loss": 4.2227, | |
| "step": 395264 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352000474330156e-05, | |
| "loss": 4.2218, | |
| "step": 395776 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.351161879579104e-05, | |
| "loss": 4.2191, | |
| "step": 396288 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.350323284828052e-05, | |
| "loss": 4.2194, | |
| "step": 396800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3494863279573736e-05, | |
| "loss": 4.2243, | |
| "step": 397312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3486477332063216e-05, | |
| "loss": 4.2279, | |
| "step": 397824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3478091384552696e-05, | |
| "loss": 4.2245, | |
| "step": 398336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3469705437042175e-05, | |
| "loss": 4.2305, | |
| "step": 398848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3461319489531655e-05, | |
| "loss": 4.2134, | |
| "step": 399360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3452949920824865e-05, | |
| "loss": 4.2216, | |
| "step": 399872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3444563973314344e-05, | |
| "loss": 4.2177, | |
| "step": 400384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3436178025803824e-05, | |
| "loss": 4.2085, | |
| "step": 400896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3427792078293304e-05, | |
| "loss": 4.2211, | |
| "step": 401408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3419422509586514e-05, | |
| "loss": 4.204, | |
| "step": 401920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3411036562075993e-05, | |
| "loss": 4.2227, | |
| "step": 402432 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3402650614565473e-05, | |
| "loss": 4.2217, | |
| "step": 402944 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.339426466705496e-05, | |
| "loss": 4.2231, | |
| "step": 403456 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.338587871954444e-05, | |
| "loss": 4.2142, | |
| "step": 403968 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.337749277203392e-05, | |
| "loss": 4.2301, | |
| "step": 404480 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.33691068245234e-05, | |
| "loss": 4.2063, | |
| "step": 404992 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.336073725581661e-05, | |
| "loss": 4.2243, | |
| "step": 405504 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.335235130830609e-05, | |
| "loss": 4.2133, | |
| "step": 406016 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.334396536079557e-05, | |
| "loss": 4.1928, | |
| "step": 406528 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.333557941328505e-05, | |
| "loss": 4.2276, | |
| "step": 407040 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.332720984457826e-05, | |
| "loss": 4.209, | |
| "step": 407552 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331882389706774e-05, | |
| "loss": 4.216, | |
| "step": 408064 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331043794955722e-05, | |
| "loss": 4.2031, | |
| "step": 408576 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.330206838085043e-05, | |
| "loss": 4.2026, | |
| "step": 409088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3293682433339914e-05, | |
| "loss": 4.2087, | |
| "step": 409600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3285296485829394e-05, | |
| "loss": 4.2103, | |
| "step": 410112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3276910538318874e-05, | |
| "loss": 4.2154, | |
| "step": 410624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326852459080835e-05, | |
| "loss": 4.2125, | |
| "step": 411136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326013864329783e-05, | |
| "loss": 4.2234, | |
| "step": 411648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.325175269578731e-05, | |
| "loss": 4.2058, | |
| "step": 412160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.324336674827679e-05, | |
| "loss": 4.1964, | |
| "step": 412672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.323498080076627e-05, | |
| "loss": 4.2151, | |
| "step": 413184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.322659485325575e-05, | |
| "loss": 4.1958, | |
| "step": 413696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.321820890574523e-05, | |
| "loss": 4.1977, | |
| "step": 414208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3209822958234707e-05, | |
| "loss": 4.2164, | |
| "step": 414720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3201453389527916e-05, | |
| "loss": 4.2141, | |
| "step": 415232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3193067442017396e-05, | |
| "loss": 4.1997, | |
| "step": 415744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3184681494506876e-05, | |
| "loss": 4.2032, | |
| "step": 416256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.317629554699636e-05, | |
| "loss": 4.192, | |
| "step": 416768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.316792597828957e-05, | |
| "loss": 4.2023, | |
| "step": 417280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.315955640958278e-05, | |
| "loss": 4.2145, | |
| "step": 417792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.315117046207226e-05, | |
| "loss": 4.2104, | |
| "step": 418304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.314278451456174e-05, | |
| "loss": 4.2096, | |
| "step": 418816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.313439856705122e-05, | |
| "loss": 4.2076, | |
| "step": 419328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.31260126195407e-05, | |
| "loss": 4.2227, | |
| "step": 419840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.311762667203018e-05, | |
| "loss": 4.1957, | |
| "step": 420352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310924072451966e-05, | |
| "loss": 4.2094, | |
| "step": 420864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310087115581287e-05, | |
| "loss": 4.2064, | |
| "step": 421376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.309248520830235e-05, | |
| "loss": 4.184, | |
| "step": 421888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.308409926079183e-05, | |
| "loss": 4.2126, | |
| "step": 422400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3075729692085045e-05, | |
| "loss": 4.2092, | |
| "step": 422912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3067343744574525e-05, | |
| "loss": 4.2061, | |
| "step": 423424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3058957797064005e-05, | |
| "loss": 4.204, | |
| "step": 423936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3050571849553485e-05, | |
| "loss": 4.1892, | |
| "step": 424448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3042185902042965e-05, | |
| "loss": 4.1957, | |
| "step": 424960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3033799954532445e-05, | |
| "loss": 4.2018, | |
| "step": 425472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3025430385825654e-05, | |
| "loss": 4.2091, | |
| "step": 425984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3017044438315134e-05, | |
| "loss": 4.1959, | |
| "step": 426496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3008658490804614e-05, | |
| "loss": 4.2082, | |
| "step": 427008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3000272543294094e-05, | |
| "loss": 4.1948, | |
| "step": 427520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2991886595783574e-05, | |
| "loss": 4.1983, | |
| "step": 428032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2983500648273054e-05, | |
| "loss": 4.1928, | |
| "step": 428544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2975114700762534e-05, | |
| "loss": 4.1959, | |
| "step": 429056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.296674513205575e-05, | |
| "loss": 4.2108, | |
| "step": 429568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.295835918454523e-05, | |
| "loss": 4.2094, | |
| "step": 430080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.294997323703471e-05, | |
| "loss": 4.2134, | |
| "step": 430592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.294158728952418e-05, | |
| "loss": 4.1864, | |
| "step": 431104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.293320134201366e-05, | |
| "loss": 4.2002, | |
| "step": 431616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.292481539450314e-05, | |
| "loss": 4.1941, | |
| "step": 432128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.291642944699262e-05, | |
| "loss": 4.2047, | |
| "step": 432640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.29080434994821e-05, | |
| "loss": 4.1956, | |
| "step": 433152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289965755197158e-05, | |
| "loss": 4.1949, | |
| "step": 433664 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289127160446106e-05, | |
| "loss": 4.1878, | |
| "step": 434176 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.288288565695054e-05, | |
| "loss": 4.2035, | |
| "step": 434688 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.287449970944002e-05, | |
| "loss": 4.1817, | |
| "step": 435200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.286613014073324e-05, | |
| "loss": 4.198, | |
| "step": 435712 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.285776057202645e-05, | |
| "loss": 4.181, | |
| "step": 436224 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284937462451593e-05, | |
| "loss": 4.2019, | |
| "step": 436736 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284098867700541e-05, | |
| "loss": 4.1924, | |
| "step": 437248 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.283260272949489e-05, | |
| "loss": 4.1943, | |
| "step": 437760 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.282421678198437e-05, | |
| "loss": 4.1921, | |
| "step": 438272 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.281583083447385e-05, | |
| "loss": 4.1936, | |
| "step": 438784 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.280744488696333e-05, | |
| "loss": 4.1951, | |
| "step": 439296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.279905893945281e-05, | |
| "loss": 4.1896, | |
| "step": 439808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2790689370746016e-05, | |
| "loss": 4.1962, | |
| "step": 440320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2782303423235496e-05, | |
| "loss": 4.2088, | |
| "step": 440832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2773933854528705e-05, | |
| "loss": 4.1933, | |
| "step": 441344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.276554790701819e-05, | |
| "loss": 4.1884, | |
| "step": 441856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.275716195950767e-05, | |
| "loss": 4.1939, | |
| "step": 442368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.274877601199715e-05, | |
| "loss": 4.193, | |
| "step": 442880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.274039006448663e-05, | |
| "loss": 4.1871, | |
| "step": 443392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.273202049577984e-05, | |
| "loss": 4.1953, | |
| "step": 443904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.272363454826932e-05, | |
| "loss": 4.182, | |
| "step": 444416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.27152486007588e-05, | |
| "loss": 4.1984, | |
| "step": 444928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.270686265324828e-05, | |
| "loss": 4.1975, | |
| "step": 445440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269847670573776e-05, | |
| "loss": 4.1902, | |
| "step": 445952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269009075822724e-05, | |
| "loss": 4.1805, | |
| "step": 446464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2681704810716714e-05, | |
| "loss": 4.1869, | |
| "step": 446976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2673318863206194e-05, | |
| "loss": 4.1863, | |
| "step": 447488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.266494929449941e-05, | |
| "loss": 4.1958, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.265656334698889e-05, | |
| "loss": 4.1912, | |
| "step": 448512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.264817739947837e-05, | |
| "loss": 4.1934, | |
| "step": 449024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.263979145196785e-05, | |
| "loss": 4.1721, | |
| "step": 449536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2631421883261065e-05, | |
| "loss": 4.194, | |
| "step": 450048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2623052314554274e-05, | |
| "loss": 4.1872, | |
| "step": 450560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2614666367043754e-05, | |
| "loss": 4.1966, | |
| "step": 451072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2606280419533234e-05, | |
| "loss": 4.1793, | |
| "step": 451584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2597894472022714e-05, | |
| "loss": 4.1986, | |
| "step": 452096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258950852451219e-05, | |
| "loss": 4.1882, | |
| "step": 452608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258112257700167e-05, | |
| "loss": 4.1943, | |
| "step": 453120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.257273662949115e-05, | |
| "loss": 4.182, | |
| "step": 453632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.256435068198063e-05, | |
| "loss": 4.1965, | |
| "step": 454144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.255598111327384e-05, | |
| "loss": 4.1738, | |
| "step": 454656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.254759516576332e-05, | |
| "loss": 4.189, | |
| "step": 455168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.253922559705654e-05, | |
| "loss": 4.1896, | |
| "step": 455680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.253083964954602e-05, | |
| "loss": 4.1809, | |
| "step": 456192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.252245370203549e-05, | |
| "loss": 4.1844, | |
| "step": 456704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.251408413332871e-05, | |
| "loss": 4.1808, | |
| "step": 457216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.250569818581819e-05, | |
| "loss": 4.1886, | |
| "step": 457728 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.180971145629883, | |
| "eval_runtime": 292.488, | |
| "eval_samples_per_second": 1304.638, | |
| "eval_steps_per_second": 40.771, | |
| "step": 457920 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 1.881581403560177e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |