| { | |
| "best_metric": 4.345666408538818, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-c-command/lstm/1/checkpoints/checkpoint-228960", | |
| "epoch": 0.025000606015738065, | |
| "eval_steps": 10, | |
| "global_step": 228960, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.8194, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 7.5501, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 7.0562, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 6.9924, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 6.9445, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 6.8914, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 6.7191, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 6.6279, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 6.5288, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 6.462, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 6.3894, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 6.3229, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 6.2581, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989098268236324e-05, | |
| "loss": 6.1826, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988259673485272e-05, | |
| "loss": 6.1381, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 6.0784, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 6.0419, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.9957, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.9488, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.912, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.874, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.982391148108281e-05, | |
| "loss": 5.837, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.981552553357229e-05, | |
| "loss": 5.8066, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 5.7679, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 5.7509, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790384069844466e-05, | |
| "loss": 5.7201, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9781998122333946e-05, | |
| "loss": 5.6907, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773612174823426e-05, | |
| "loss": 5.6686, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.97652262273129e-05, | |
| "loss": 5.6417, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.975684027980238e-05, | |
| "loss": 5.6247, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.974845433229186e-05, | |
| "loss": 5.6047, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.974008476358507e-05, | |
| "loss": 5.5844, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 5.5627, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9723329247367764e-05, | |
| "loss": 5.5411, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9714943299857244e-05, | |
| "loss": 5.5372, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9706557352346724e-05, | |
| "loss": 5.5275, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9698171404836204e-05, | |
| "loss": 5.4998, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9689785457325684e-05, | |
| "loss": 5.4604, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96814158886189e-05, | |
| "loss": 5.4654, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967302994110837e-05, | |
| "loss": 5.4356, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966464399359785e-05, | |
| "loss": 5.4355, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965625804608733e-05, | |
| "loss": 5.4196, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 5.4084, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 5.384, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 5.3881, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962273063484898e-05, | |
| "loss": 5.3761, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.961434468733847e-05, | |
| "loss": 5.3727, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960595873982795e-05, | |
| "loss": 5.357, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959757279231743e-05, | |
| "loss": 5.3364, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958920322361064e-05, | |
| "loss": 5.3297, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958081727610012e-05, | |
| "loss": 5.3319, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95724313285896e-05, | |
| "loss": 5.3092, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956404538107908e-05, | |
| "loss": 5.3015, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955565943356856e-05, | |
| "loss": 5.2876, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.954728986486177e-05, | |
| "loss": 5.284, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.953890391735125e-05, | |
| "loss": 5.2673, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530517969840727e-05, | |
| "loss": 5.2818, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522132022330207e-05, | |
| "loss": 5.2458, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9513746074819686e-05, | |
| "loss": 5.2509, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9505360127309166e-05, | |
| "loss": 5.2428, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9496974179798646e-05, | |
| "loss": 5.2222, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948858823228813e-05, | |
| "loss": 5.2247, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948021866358134e-05, | |
| "loss": 5.2126, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947183271607082e-05, | |
| "loss": 5.1955, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946346314736403e-05, | |
| "loss": 5.1887, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945507719985351e-05, | |
| "loss": 5.2059, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944669125234299e-05, | |
| "loss": 5.1805, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943830530483247e-05, | |
| "loss": 5.1722, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942991935732195e-05, | |
| "loss": 5.1541, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942154978861516e-05, | |
| "loss": 5.1626, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941316384110464e-05, | |
| "loss": 5.1683, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.940477789359412e-05, | |
| "loss": 5.1518, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.93963919460836e-05, | |
| "loss": 5.1462, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388022377376816e-05, | |
| "loss": 5.1518, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379636429866296e-05, | |
| "loss": 5.1414, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371250482355776e-05, | |
| "loss": 5.1299, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362880913648985e-05, | |
| "loss": 5.1089, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354494966138465e-05, | |
| "loss": 5.116, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346109018627945e-05, | |
| "loss": 5.098, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337723071117425e-05, | |
| "loss": 5.0973, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329337123606905e-05, | |
| "loss": 5.0911, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932095117609638e-05, | |
| "loss": 5.0873, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931256522858586e-05, | |
| "loss": 5.0955, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.930417928107534e-05, | |
| "loss": 5.0856, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9295793333564825e-05, | |
| "loss": 5.0583, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9287423764858034e-05, | |
| "loss": 5.0598, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9279037817347514e-05, | |
| "loss": 5.0627, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9270651869836994e-05, | |
| "loss": 5.0587, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9262265922326474e-05, | |
| "loss": 5.0486, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925389635361968e-05, | |
| "loss": 5.0433, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924551040610916e-05, | |
| "loss": 5.0367, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923712445859864e-05, | |
| "loss": 5.0228, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922873851108812e-05, | |
| "loss": 5.0289, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922036894238133e-05, | |
| "loss": 5.0223, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921198299487081e-05, | |
| "loss": 5.012, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920359704736029e-05, | |
| "loss": 5.0188, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919521109984978e-05, | |
| "loss": 4.9993, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918682515233926e-05, | |
| "loss": 4.9971, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917843920482874e-05, | |
| "loss": 4.9859, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917006963612195e-05, | |
| "loss": 4.983, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.916168368861143e-05, | |
| "loss": 4.9908, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.915329774110091e-05, | |
| "loss": 4.9747, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914491179359039e-05, | |
| "loss": 4.9708, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.913652584607987e-05, | |
| "loss": 4.9656, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.912813989856935e-05, | |
| "loss": 4.964, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911975395105883e-05, | |
| "loss": 4.9471, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911136800354831e-05, | |
| "loss": 4.9365, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9102998434841516e-05, | |
| "loss": 4.9436, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9094612487330996e-05, | |
| "loss": 4.9494, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908625929742794e-05, | |
| "loss": 4.9396, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907787334991742e-05, | |
| "loss": 4.9366, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90694874024069e-05, | |
| "loss": 4.9222, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906110145489638e-05, | |
| "loss": 4.9287, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905271550738586e-05, | |
| "loss": 4.9155, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904432955987534e-05, | |
| "loss": 4.9188, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903594361236482e-05, | |
| "loss": 4.914, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90275576648543e-05, | |
| "loss": 4.9113, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901917171734378e-05, | |
| "loss": 4.9025, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901078576983326e-05, | |
| "loss": 4.899, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.900239982232274e-05, | |
| "loss": 4.9028, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994013874812214e-05, | |
| "loss": 4.901, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.898564430610543e-05, | |
| "loss": 4.889, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.897725835859491e-05, | |
| "loss": 4.8911, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968872411084396e-05, | |
| "loss": 4.8816, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.896048646357387e-05, | |
| "loss": 4.8833, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952133273670815e-05, | |
| "loss": 4.8635, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943747326160294e-05, | |
| "loss": 4.8586, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935361378649774e-05, | |
| "loss": 4.8633, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8926975431139254e-05, | |
| "loss": 4.8646, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918589483628734e-05, | |
| "loss": 4.8555, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8910219914921943e-05, | |
| "loss": 4.8669, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8901833967411423e-05, | |
| "loss": 4.8577, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88934480199009e-05, | |
| "loss": 4.8415, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888506207239038e-05, | |
| "loss": 4.8467, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.887667612487986e-05, | |
| "loss": 4.8514, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886829017736934e-05, | |
| "loss": 4.8423, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885992060866256e-05, | |
| "loss": 4.839, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885153466115204e-05, | |
| "loss": 4.8298, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884314871364151e-05, | |
| "loss": 4.8331, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883476276613099e-05, | |
| "loss": 4.8361, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882637681862047e-05, | |
| "loss": 4.8186, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881799087110995e-05, | |
| "loss": 4.8113, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880960492359943e-05, | |
| "loss": 4.8184, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880123535489264e-05, | |
| "loss": 4.8102, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879284940738212e-05, | |
| "loss": 4.8063, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878447983867534e-05, | |
| "loss": 4.8098, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877609389116482e-05, | |
| "loss": 4.8018, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87677079436543e-05, | |
| "loss": 4.7994, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875932199614378e-05, | |
| "loss": 4.7982, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875093604863326e-05, | |
| "loss": 4.7924, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.759008407592773, | |
| "eval_runtime": 288.2778, | |
| "eval_samples_per_second": 1323.692, | |
| "eval_steps_per_second": 41.366, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874255010112274e-05, | |
| "loss": 4.7851, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873416415361222e-05, | |
| "loss": 4.7888, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.87257782061017e-05, | |
| "loss": 4.7948, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.871739225859118e-05, | |
| "loss": 4.7835, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8709006311080657e-05, | |
| "loss": 4.7825, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8700620363570137e-05, | |
| "loss": 4.7765, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8692250794863346e-05, | |
| "loss": 4.7791, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683864847352826e-05, | |
| "loss": 4.7589, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675478899842306e-05, | |
| "loss": 4.7814, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667092952331785e-05, | |
| "loss": 4.7678, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865870700482127e-05, | |
| "loss": 4.7744, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865033743611448e-05, | |
| "loss": 4.7718, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.864195148860396e-05, | |
| "loss": 4.7523, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.863356554109344e-05, | |
| "loss": 4.7555, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862517959358292e-05, | |
| "loss": 4.7484, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.86167936460724e-05, | |
| "loss": 4.7433, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608407698561874e-05, | |
| "loss": 4.7517, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860003812985509e-05, | |
| "loss": 4.7372, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.859165218234457e-05, | |
| "loss": 4.7357, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.858326623483405e-05, | |
| "loss": 4.7545, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.857488028732352e-05, | |
| "loss": 4.7369, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.856649433981301e-05, | |
| "loss": 4.7327, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8558124771106226e-05, | |
| "loss": 4.7259, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.85497388235957e-05, | |
| "loss": 4.7404, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854135287608518e-05, | |
| "loss": 4.7188, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853296692857466e-05, | |
| "loss": 4.7143, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.852458098106414e-05, | |
| "loss": 4.7154, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851619503355362e-05, | |
| "loss": 4.7184, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850782546484683e-05, | |
| "loss": 4.7083, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849943951733631e-05, | |
| "loss": 4.7104, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849105356982579e-05, | |
| "loss": 4.7109, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.848266762231527e-05, | |
| "loss": 4.7105, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847429805360848e-05, | |
| "loss": 4.6976, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8465912106097964e-05, | |
| "loss": 4.7107, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8457526158587444e-05, | |
| "loss": 4.7124, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8449140211076924e-05, | |
| "loss": 4.7039, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8440754263566404e-05, | |
| "loss": 4.6774, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8432368316055884e-05, | |
| "loss": 4.6863, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8423982368545363e-05, | |
| "loss": 4.6824, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8415596421034843e-05, | |
| "loss": 4.6896, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840722685232805e-05, | |
| "loss": 4.6799, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839884090481753e-05, | |
| "loss": 4.6839, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839045495730701e-05, | |
| "loss": 4.6711, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838206900979649e-05, | |
| "loss": 4.686, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83736994410897e-05, | |
| "loss": 4.6803, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.836532987238292e-05, | |
| "loss": 4.6805, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83569439248724e-05, | |
| "loss": 4.6798, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834855797736188e-05, | |
| "loss": 4.6597, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834017202985136e-05, | |
| "loss": 4.6686, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8331802461144566e-05, | |
| "loss": 4.6727, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8323416513634046e-05, | |
| "loss": 4.6669, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8315030566123526e-05, | |
| "loss": 4.6642, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8306644618613006e-05, | |
| "loss": 4.6513, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8298258671102486e-05, | |
| "loss": 4.659, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8289872723591966e-05, | |
| "loss": 4.6427, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8281486776081446e-05, | |
| "loss": 4.6724, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8273100828570926e-05, | |
| "loss": 4.6443, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.826474763866787e-05, | |
| "loss": 4.6551, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825637806996108e-05, | |
| "loss": 4.6521, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.824799212245056e-05, | |
| "loss": 4.6297, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823960617494004e-05, | |
| "loss": 4.6459, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823122022742952e-05, | |
| "loss": 4.6388, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8222834279919e-05, | |
| "loss": 4.6254, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.821444833240848e-05, | |
| "loss": 4.6252, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820606238489796e-05, | |
| "loss": 4.6435, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.819767643738744e-05, | |
| "loss": 4.6304, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818929048987692e-05, | |
| "loss": 4.6236, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.81809045423664e-05, | |
| "loss": 4.6174, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817253497365961e-05, | |
| "loss": 4.6307, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.816414902614909e-05, | |
| "loss": 4.6362, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8155779457442305e-05, | |
| "loss": 4.6292, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8147393509931785e-05, | |
| "loss": 4.6214, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8139007562421265e-05, | |
| "loss": 4.6371, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8130621614910745e-05, | |
| "loss": 4.6302, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8122235667400224e-05, | |
| "loss": 4.6258, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8113849719889704e-05, | |
| "loss": 4.6075, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8105463772379184e-05, | |
| "loss": 4.6194, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.809707782486866e-05, | |
| "loss": 4.6119, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808869187735814e-05, | |
| "loss": 4.6071, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808030592984762e-05, | |
| "loss": 4.609, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8071936361140827e-05, | |
| "loss": 4.6083, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8063550413630307e-05, | |
| "loss": 4.6216, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8055164466119786e-05, | |
| "loss": 4.6164, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8046794897413e-05, | |
| "loss": 4.5923, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803840894990248e-05, | |
| "loss": 4.597, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803002300239196e-05, | |
| "loss": 4.6016, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802163705488144e-05, | |
| "loss": 4.6017, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801325110737092e-05, | |
| "loss": 4.6011, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800488153866413e-05, | |
| "loss": 4.5991, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.799649559115361e-05, | |
| "loss": 4.594, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.798810964364309e-05, | |
| "loss": 4.5846, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797972369613257e-05, | |
| "loss": 4.5951, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797133774862205e-05, | |
| "loss": 4.5879, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.796295180111153e-05, | |
| "loss": 4.5879, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.795456585360101e-05, | |
| "loss": 4.5957, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.794617990609049e-05, | |
| "loss": 4.5843, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.793779395857997e-05, | |
| "loss": 4.5777, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792942438987319e-05, | |
| "loss": 4.5784, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792103844236267e-05, | |
| "loss": 4.5807, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.791265249485215e-05, | |
| "loss": 4.5853, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.790426654734163e-05, | |
| "loss": 4.5718, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7895880599831107e-05, | |
| "loss": 4.5705, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7887494652320587e-05, | |
| "loss": 4.5721, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7879108704810067e-05, | |
| "loss": 4.572, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7870722757299546e-05, | |
| "loss": 4.5606, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7862369567396485e-05, | |
| "loss": 4.5475, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7853983619885965e-05, | |
| "loss": 4.5643, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7845597672375445e-05, | |
| "loss": 4.5659, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7837211724864925e-05, | |
| "loss": 4.5633, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782882577735441e-05, | |
| "loss": 4.562, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782043982984389e-05, | |
| "loss": 4.5553, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781205388233337e-05, | |
| "loss": 4.5612, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.780368431362658e-05, | |
| "loss": 4.5501, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779529836611606e-05, | |
| "loss": 4.5531, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.778691241860554e-05, | |
| "loss": 4.5574, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777852647109502e-05, | |
| "loss": 4.5576, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777014052358449e-05, | |
| "loss": 4.5491, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.776175457607397e-05, | |
| "loss": 4.5496, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.775338500736719e-05, | |
| "loss": 4.5569, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.774499905985666e-05, | |
| "loss": 4.5557, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.773661311234615e-05, | |
| "loss": 4.5407, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.772822716483563e-05, | |
| "loss": 4.5521, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7719857596128845e-05, | |
| "loss": 4.5397, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771147164861832e-05, | |
| "loss": 4.5524, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.77030857011078e-05, | |
| "loss": 4.5299, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769469975359728e-05, | |
| "loss": 4.5295, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768631380608676e-05, | |
| "loss": 4.5341, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767792785857624e-05, | |
| "loss": 4.5408, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766954191106572e-05, | |
| "loss": 4.5298, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76611559635552e-05, | |
| "loss": 4.5479, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765278639484841e-05, | |
| "loss": 4.5368, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764440044733789e-05, | |
| "loss": 4.5288, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.763601449982737e-05, | |
| "loss": 4.5309, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762762855231685e-05, | |
| "loss": 4.5405, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761925898361006e-05, | |
| "loss": 4.532, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761087303609954e-05, | |
| "loss": 4.5324, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760248708858902e-05, | |
| "loss": 4.5242, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75941011410785e-05, | |
| "loss": 4.527, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758573157237171e-05, | |
| "loss": 4.5324, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.757734562486119e-05, | |
| "loss": 4.5202, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756895967735067e-05, | |
| "loss": 4.5188, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756057372984015e-05, | |
| "loss": 4.5248, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.755220416113336e-05, | |
| "loss": 4.5151, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.754381821362284e-05, | |
| "loss": 4.5153, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.753543226611232e-05, | |
| "loss": 4.5231, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75270463186018e-05, | |
| "loss": 4.5133, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7518676749895016e-05, | |
| "loss": 4.513, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7510307181188225e-05, | |
| "loss": 4.5124, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7501921233677705e-05, | |
| "loss": 4.5076, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.479434013366699, | |
| "eval_runtime": 280.6715, | |
| "eval_samples_per_second": 1359.564, | |
| "eval_steps_per_second": 42.487, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7493535286167185e-05, | |
| "loss": 4.5016, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7485149338656665e-05, | |
| "loss": 4.5081, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7476763391146145e-05, | |
| "loss": 4.5194, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7468377443635625e-05, | |
| "loss": 4.5095, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7459991496125105e-05, | |
| "loss": 4.5078, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7451605548614585e-05, | |
| "loss": 4.5018, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7443219601104065e-05, | |
| "loss": 4.5093, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7434833653593545e-05, | |
| "loss": 4.4899, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7426447706083025e-05, | |
| "loss": 4.5133, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7418061758572505e-05, | |
| "loss": 4.509, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7409675811061985e-05, | |
| "loss": 4.5044, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7401289863551465e-05, | |
| "loss": 4.5135, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739292029484468e-05, | |
| "loss": 4.4894, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7384534347334154e-05, | |
| "loss": 4.495, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7376148399823634e-05, | |
| "loss": 4.5011, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7367762452313114e-05, | |
| "loss": 4.4827, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7359376504802594e-05, | |
| "loss": 4.4934, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7350990557292074e-05, | |
| "loss": 4.4848, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7342604609781554e-05, | |
| "loss": 4.4851, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7334218662271034e-05, | |
| "loss": 4.5079, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7325832714760514e-05, | |
| "loss": 4.4867, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7317446767249994e-05, | |
| "loss": 4.4907, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7309060819739474e-05, | |
| "loss": 4.4781, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730069125103269e-05, | |
| "loss": 4.4981, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.729230530352217e-05, | |
| "loss": 4.4732, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.728391935601165e-05, | |
| "loss": 4.4749, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727553340850113e-05, | |
| "loss": 4.473, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.726716383979434e-05, | |
| "loss": 4.4831, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725879427108755e-05, | |
| "loss": 4.4693, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.725040832357703e-05, | |
| "loss": 4.4723, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724202237606651e-05, | |
| "loss": 4.4793, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.723363642855599e-05, | |
| "loss": 4.4804, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722525048104547e-05, | |
| "loss": 4.4621, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721686453353495e-05, | |
| "loss": 4.4798, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720847858602443e-05, | |
| "loss": 4.4817, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720009263851391e-05, | |
| "loss": 4.4727, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719172306980712e-05, | |
| "loss": 4.4514, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.718335350110033e-05, | |
| "loss": 4.4645, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.717496755358981e-05, | |
| "loss": 4.4524, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.716658160607929e-05, | |
| "loss": 4.4693, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.715819565856877e-05, | |
| "loss": 4.4608, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714980971105825e-05, | |
| "loss": 4.4641, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714142376354773e-05, | |
| "loss": 4.453, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.713303781603721e-05, | |
| "loss": 4.4622, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.712466824733042e-05, | |
| "loss": 4.4634, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71162822998199e-05, | |
| "loss": 4.4646, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.710789635230938e-05, | |
| "loss": 4.4655, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709951040479886e-05, | |
| "loss": 4.4437, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709114083609208e-05, | |
| "loss": 4.4509, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.708275488858156e-05, | |
| "loss": 4.4647, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7074368941071037e-05, | |
| "loss": 4.4507, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7065982993560517e-05, | |
| "loss": 4.4523, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705759704604999e-05, | |
| "loss": 4.4443, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704921109853947e-05, | |
| "loss": 4.4504, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704082515102895e-05, | |
| "loss": 4.4383, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.703245558232216e-05, | |
| "loss": 4.4591, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702406963481164e-05, | |
| "loss": 4.4444, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.701568368730112e-05, | |
| "loss": 4.4457, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.70072977397906e-05, | |
| "loss": 4.4554, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699891179228008e-05, | |
| "loss": 4.4262, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699052584476956e-05, | |
| "loss": 4.4443, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6982139897259045e-05, | |
| "loss": 4.4363, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6973753949748525e-05, | |
| "loss": 4.4295, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6965384381041734e-05, | |
| "loss": 4.4263, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6956998433531214e-05, | |
| "loss": 4.4402, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694862886482442e-05, | |
| "loss": 4.4372, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69402429173139e-05, | |
| "loss": 4.4264, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.693185696980338e-05, | |
| "loss": 4.4198, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.692347102229286e-05, | |
| "loss": 4.4383, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.691508507478234e-05, | |
| "loss": 4.4466, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.690669912727182e-05, | |
| "loss": 4.4343, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68983131797613e-05, | |
| "loss": 4.4339, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.688992723225078e-05, | |
| "loss": 4.4439, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6881557663544e-05, | |
| "loss": 4.4397, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.687317171603348e-05, | |
| "loss": 4.4397, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.686478576852296e-05, | |
| "loss": 4.4159, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.685639982101244e-05, | |
| "loss": 4.4339, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.684803025230565e-05, | |
| "loss": 4.433, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683964430479513e-05, | |
| "loss": 4.4181, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683127473608834e-05, | |
| "loss": 4.4268, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.682288878857782e-05, | |
| "loss": 4.4253, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68145028410673e-05, | |
| "loss": 4.4393, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.680611689355678e-05, | |
| "loss": 4.4308, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6797747324849986e-05, | |
| "loss": 4.412, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6789361377339466e-05, | |
| "loss": 4.4169, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678097542982895e-05, | |
| "loss": 4.42, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677258948231843e-05, | |
| "loss": 4.4252, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676420353480791e-05, | |
| "loss": 4.4226, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.675581758729739e-05, | |
| "loss": 4.4194, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.674743163978687e-05, | |
| "loss": 4.4173, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6739045692276346e-05, | |
| "loss": 4.4075, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.673067612356956e-05, | |
| "loss": 4.4206, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.672229017605904e-05, | |
| "loss": 4.4104, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.671392060735225e-05, | |
| "loss": 4.4169, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.670553465984173e-05, | |
| "loss": 4.4223, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.669714871233121e-05, | |
| "loss": 4.4149, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668876276482069e-05, | |
| "loss": 4.4066, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668037681731017e-05, | |
| "loss": 4.4083, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.667199086979965e-05, | |
| "loss": 4.4098, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.666360492228913e-05, | |
| "loss": 4.4142, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665521897477861e-05, | |
| "loss": 4.405, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664683302726809e-05, | |
| "loss": 4.4021, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6638479837365035e-05, | |
| "loss": 4.4007, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6630093889854515e-05, | |
| "loss": 4.407, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6621707942343995e-05, | |
| "loss": 4.396, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.661332199483347e-05, | |
| "loss": 4.3861, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.660493604732295e-05, | |
| "loss": 4.397, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.659655009981243e-05, | |
| "loss": 4.4032, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.658816415230191e-05, | |
| "loss": 4.395, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657977820479139e-05, | |
| "loss": 4.403, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6571408636084604e-05, | |
| "loss": 4.3903, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6563022688574084e-05, | |
| "loss": 4.4004, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655465311986729e-05, | |
| "loss": 4.3903, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.654626717235677e-05, | |
| "loss": 4.389, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.653788122484625e-05, | |
| "loss": 4.4013, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.652949527733573e-05, | |
| "loss": 4.3996, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.652110932982521e-05, | |
| "loss": 4.3921, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.651272338231469e-05, | |
| "loss": 4.3935, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.650433743480417e-05, | |
| "loss": 4.3933, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.649595148729365e-05, | |
| "loss": 4.3973, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.648758191858686e-05, | |
| "loss": 4.3861, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647919597107634e-05, | |
| "loss": 4.3978, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647081002356582e-05, | |
| "loss": 4.3855, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.646242407605531e-05, | |
| "loss": 4.397, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.645405450734852e-05, | |
| "loss": 4.3732, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6445668559838e-05, | |
| "loss": 4.3828, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.643728261232748e-05, | |
| "loss": 4.3783, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642889666481696e-05, | |
| "loss": 4.3872, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642051071730644e-05, | |
| "loss": 4.3794, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.641212476979592e-05, | |
| "loss": 4.3982, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6403755201089126e-05, | |
| "loss": 4.3862, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6395369253578606e-05, | |
| "loss": 4.381, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6386983306068086e-05, | |
| "loss": 4.377, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6378613737361295e-05, | |
| "loss": 4.3885, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6370227789850775e-05, | |
| "loss": 4.3863, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636184184234026e-05, | |
| "loss": 4.3881, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635345589482974e-05, | |
| "loss": 4.3782, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.634506994731922e-05, | |
| "loss": 4.378, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.63366839998087e-05, | |
| "loss": 4.3879, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632829805229818e-05, | |
| "loss": 4.3757, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6319912104787655e-05, | |
| "loss": 4.3737, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6311526157277135e-05, | |
| "loss": 4.3813, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6303140209766615e-05, | |
| "loss": 4.373, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6294754262256095e-05, | |
| "loss": 4.3693, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6286368314745575e-05, | |
| "loss": 4.3756, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6277998746038784e-05, | |
| "loss": 4.3769, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6269629177332e-05, | |
| "loss": 4.368, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626124322982148e-05, | |
| "loss": 4.368, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.625285728231096e-05, | |
| "loss": 4.3635, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.345666408538818, | |
| "eval_runtime": 303.4225, | |
| "eval_samples_per_second": 1257.623, | |
| "eval_steps_per_second": 39.302, | |
| "step": 228960 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 9.49904359122191e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |